Allow disabling debug statements

This commit is contained in:
Peter Boyer 2015-12-31 13:42:59 -05:00
parent efa8676211
commit 89427b7f45
32 changed files with 399 additions and 159 deletions

View File

@ -1 +1 @@
1 + 2 = 3 + 5
1 + 2 + 32 + (1 + 2) = 3 + 5

View File

@ -1,6 +1,10 @@
package antlr4
import "fmt"
const (
PortDebug = false
)
var ATNINVALID_ALT_NUMBER = 0
type ATN struct {
@ -65,11 +69,15 @@ func (this *ATN) nextTokensInContext(s IATNState, ctx IRuleContext) *IntervalSet
// rule.
func (this *ATN) nextTokensNoContext(s IATNState) *IntervalSet {
if s.GetNextTokenWithinRule() != nil {
if PortDebug {
fmt.Println("DEBUG 1")
}
return s.GetNextTokenWithinRule()
}
if PortDebug {
fmt.Println("DEBUG 2")
fmt.Println(this.nextTokensInContext(s, nil))
}
s.SetNextTokenWithinRule(this.nextTokensInContext(s, nil))
s.GetNextTokenWithinRule().readOnly = true
return s.GetNextTokenWithinRule()

View File

@ -48,7 +48,9 @@ var CommonTokenFactoryDEFAULT = NewCommonTokenFactory(false)
func (this *CommonTokenFactory) Create(source *TokenSourceCharStreamPair, ttype int, text string, channel, start, stop, line, column int) IToken {
if PortDebug {
fmt.Println("Token factory creating: " + text)
}
var t = NewCommonToken(source, ttype, channel, start, stop)
t.line = line
@ -65,7 +67,9 @@ func (this *CommonTokenFactory) Create(source *TokenSourceCharStreamPair, ttype
func (this *CommonTokenFactory) createThin(ttype int, text string) IToken {
if PortDebug {
fmt.Println("Token factory creating: " + text)
}
var t = NewCommonToken(nil, ttype, TokenDefaultChannel, -1, -1)
t.SetText(text)

View File

@ -55,12 +55,16 @@ func (bt *CommonTokenStream) Consume() {
skipEofCheck = false
}
if PortDebug {
fmt.Println("Consume 1")
}
if !skipEofCheck && bt.LA(1) == TokenEOF {
panic("cannot consume EOF")
}
if bt.Sync(bt.index + 1) {
if PortDebug {
fmt.Println("Consume 2")
}
bt.index = bt.adjustSeekIndex(bt.index + 1)
}
}
@ -75,7 +79,9 @@ func (bt *CommonTokenStream) Sync(i int) bool {
var n = i - len(bt.tokens) + 1 // how many more elements we need?
if n > 0 {
var fetched = bt.fetch(n)
if PortDebug {
fmt.Println("Sync done")
}
return fetched >= n
}
return true
@ -92,7 +98,9 @@ func (bt *CommonTokenStream) fetch(n int) int {
for i := 0; i < n; i++ {
var t IToken = bt.tokenSource.nextToken()
if PortDebug {
fmt.Println("fetch loop")
}
t.SetTokenIndex( len(bt.tokens) )
bt.tokens = append(bt.tokens, t)
if t.GetTokenType() == TokenEOF {
@ -101,7 +109,9 @@ func (bt *CommonTokenStream) fetch(n int) int {
}
}
if PortDebug {
fmt.Println("fetch done")
}
return n
}

View File

@ -24,20 +24,28 @@ func NewDefaultErrorListener() *DefaultErrorListener {
}
func (this *DefaultErrorListener) SyntaxError(recognizer IRecognizer, offendingSymbol interface{}, line, column int, msg string, e IRecognitionException) {
if PortDebug {
fmt.Println("SyntaxError!")
}
}
func (this *DefaultErrorListener) ReportAmbiguity(recognizer IParser, dfa *DFA, startIndex, stopIndex int, exact bool, ambigAlts *BitSet, configs *ATNConfigSet) {
if PortDebug {
fmt.Println("ReportAmbiguity!")
}
}
func (this *DefaultErrorListener) ReportAttemptingFullContext(recognizer IParser, dfa *DFA, startIndex, stopIndex int, conflictingAlts *BitSet, configs *ATNConfigSet) {
if PortDebug {
fmt.Println("ReportAttemptingFullContext!")
}
}
func (this *DefaultErrorListener) ReportContextSensitivity(recognizer IParser, dfa *DFA, startIndex, stopIndex, prediction int, configs *ATNConfigSet) {
if PortDebug {
fmt.Println("ReportContextSensitivity!")
}
}
type ConsoleErrorListener struct {
*DefaultErrorListener

View File

@ -209,26 +209,36 @@ func (this *DefaultErrorStrategy) Sync(recognizer IParser) {
return
}
if PortDebug {
fmt.Println("STATE" + strconv.Itoa(recognizer.GetState()))
}
var s = recognizer.GetInterpreter().atn.states[recognizer.GetState()]
var la = recognizer.GetTokenStream().LA(1)
if PortDebug {
fmt.Println("LA" + strconv.Itoa(la))
}
// try cheaper subset first might get lucky. seems to shave a wee bit off
if la == TokenEOF || recognizer.GetATN().nextTokens(s, nil).contains(la) {
if PortDebug {
fmt.Println("OK1")
}
return
}
// Return but don't end recovery. only do that upon valid token Match
if recognizer.isExpectedToken(la) {
if PortDebug {
fmt.Println("OK2")
}
return
}
if PortDebug {
fmt.Println("LA" + strconv.Itoa(la))
fmt.Println(recognizer.GetATN().nextTokens(s, nil))
}
switch s.GetStateType() {
case ATNStateBLOCK_START:
@ -545,7 +555,9 @@ func (this *DefaultErrorStrategy) getMissingSymbol(recognizer IParser) IToken {
tf := recognizer.GetTokenFactory()
if PortDebug {
fmt.Println("Missing symbol error")
}
return tf.Create( current.GetSource(), expectedTokenType, tokenText, TokenDefaultChannel, -1, -1, current.GetLine(), current.GetColumn())
}

View File

@ -31,7 +31,9 @@ func NewFileStream(fileName string) *FileStream {
fs.filename = fileName
s := string(buf.Bytes())
if PortDebug {
fmt.Println(s)
}
fs.InputStream = NewInputStream(s)
return fs

View File

@ -67,8 +67,10 @@ func (is *InputStream) Mark() int {
}
func (is *InputStream) Release(marker int) {
if PortDebug {
fmt.Println("RELEASING")
}
}
func (is *InputStream) Seek(index int) {
if index <= is.index {

View File

@ -68,7 +68,9 @@ func (i *IntervalSet) addRange(l, h int) {
}
func (is *IntervalSet) addInterval(v *Interval) {
if PortDebug {
fmt.Println("addInterval" + v.String())
}
if is.intervals == nil {
is.intervals = make([]*Interval, 0)
is.intervals = append(is.intervals, v)
@ -96,9 +98,13 @@ func (is *IntervalSet) addInterval(v *Interval) {
}
func (i *IntervalSet) addSet(other *IntervalSet) *IntervalSet {
if PortDebug {
fmt.Println("addSet")
}
if other.intervals != nil {
if PortDebug {
fmt.Println(len(other.intervals))
}
for k := 0; k < len(other.intervals); k++ {
var i2 = other.intervals[k]
i.addInterval(NewInterval(i2.start, i2.stop))

View File

@ -75,6 +75,7 @@ func (la *LL1Analyzer) LOOK(s, stopState IATNState, ctx IRuleContext) *IntervalS
if ctx != nil {
lookContext = predictionContextFromRuleContext(s.GetATN(), ctx)
}
if PortDebug {
fmt.Println("DEBUG 5")
// fmt.Println("DEBUG" + lookContext.String())
fmt.Println(s)
@ -83,8 +84,11 @@ func (la *LL1Analyzer) LOOK(s, stopState IATNState, ctx IRuleContext) *IntervalS
fmt.Println(r)
fmt.Println(seeThruPreds)
fmt.Println("=====")
}
la._LOOK(s, stopState, lookContext, r, NewSet(nil, nil), NewBitSet(), seeThruPreds, true)
if PortDebug {
fmt.Println(r)
}
return r
}
@ -147,7 +151,9 @@ func (la *LL1Analyzer) _LOOK(s, stopState IATNState, ctx IPredictionContext, loo
lookBusy.add(c)
if s == stopState {
if PortDebug {
fmt.Println("DEBUG 6")
}
if ctx == nil {
look.addOne(TokenEpsilon)
return
@ -169,7 +175,9 @@ func (la *LL1Analyzer) _LOOK(s, stopState IATNState, ctx IPredictionContext, loo
}
if ctx != PredictionContextEMPTY {
if PortDebug {
fmt.Println("DEBUG 7")
}
// run thru all possible stack tops in ctx
for i := 0; i < ctx.length(); i++ {
@ -188,7 +196,9 @@ func (la *LL1Analyzer) _LOOK(s, stopState IATNState, ctx IPredictionContext, loo
t := s.GetTransitions()[i]
if t1, ok := t.(*RuleTransition); ok {
if PortDebug {
fmt.Println("DEBUG 8")
}
if calledRuleStack.contains(t1.getTarget().GetRuleIndex()) {
continue
@ -198,32 +208,37 @@ func (la *LL1Analyzer) _LOOK(s, stopState IATNState, ctx IPredictionContext, loo
la.___LOOK(stopState, newContext, look, lookBusy, calledRuleStack, seeThruPreds, addEOF, t1)
if PortDebug {
fmt.Println(look)
//
// defer func() {
// calledRuleStack.remove(t1.getTarget().GetRuleIndex())
// }()
//
// calledRuleStack.add(t1.getTarget().GetRuleIndex())
// la._LOOK(t1.getTarget(), stopState, newContext, look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
}
} else if t2, ok := t.(IAbstractPredicateTransition); ok {
if PortDebug {
fmt.Println("DEBUG 9")
}
if seeThruPreds {
la._LOOK(t2.getTarget(), stopState, ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
} else {
look.addOne(LL1AnalyzerHIT_PRED)
}
} else if t.getIsEpsilon() {
if PortDebug {
fmt.Println("DEBUG 10")
}
la._LOOK(t.getTarget(), stopState, ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
} else if _, ok := t.(*WildcardTransition); ok {
if PortDebug {
fmt.Println("DEBUG 11")
}
look.addRange(TokenMinUserTokenType, la.atn.maxTokenType)
} else {
if PortDebug {
fmt.Println("DEBUG 12")
}
set := t.getLabel()
if PortDebug {
fmt.Println(set)
}
if set != nil {
if _, ok := t.(*NotSetTransition); ok {
set = set.complement(TokenMinUserTokenType, la.atn.maxTokenType)

View File

@ -211,10 +211,14 @@ func (l *Lexer) nextToken() IToken {
if l._type != LexerMore {
break
}
if PortDebug {
fmt.Println("lex inner loop")
}
}
if PortDebug {
fmt.Println("lex loop")
}
if continueOuter {
continue
}
@ -293,7 +297,9 @@ func (l *Lexer) emitToken(token IToken) {
// custom Token objects or provide a Newfactory.
// /
func (l *Lexer) emit() IToken {
if PortDebug {
fmt.Println("emit")
}
var t = l._factory.Create(l._tokenFactorySourcePair, l._type, l._text, l._channel, l._tokenStartCharIndex, l.getCharIndex()-1, l._tokenStartLine, l._tokenStartColumn)
l.emitToken(t)
return t
@ -302,7 +308,9 @@ func (l *Lexer) emit() IToken {
func (l *Lexer) emitEOF() IToken {
cpos := l.getCharPositionInLine()
lpos := l.getLine()
if PortDebug {
fmt.Println("emitEOF")
}
var eof = l._factory.Create(l._tokenFactorySourcePair, TokenEOF, "", TokenDefaultChannel, l._input.Index(), l._input.Index()-1, lpos, cpos)
l.emitToken(eof)
return eof
@ -351,12 +359,16 @@ func (this *Lexer) GetATN() *ATN {
// Forces load of all tokens. Does not include EOF token.
// /
func (l *Lexer) getAllTokens() []IToken {
if PortDebug {
fmt.Println("getAllTokens")
}
var tokens = make([]IToken, 0)
var t = l.nextToken()
for t.GetTokenType() != TokenEOF {
tokens = append(tokens, t)
if PortDebug {
fmt.Println("getAllTokens")
}
t = l.nextToken()
}
return tokens

View File

@ -88,7 +88,7 @@ func NewLexerATNSimulator(recog ILexer, atn *ATN, decisionToDFA []*DFA, sharedCo
return this
}
var LexerATNSimulatorDebug = true
var LexerATNSimulatorDebug = false
var LexerATNSimulatorDFADebug = false
var LexerATNSimulatorMIN_DFA_EDGE = 0
@ -105,14 +105,18 @@ func (this *LexerATNSimulator) copyState(simulator *LexerATNSimulator) {
func (this *LexerATNSimulator) Match(input CharStream, mode int) int {
if PortDebug {
fmt.Println("Match")
}
this.Match_calls += 1
this.mode = mode
var mark = input.Mark()
defer func() {
if PortDebug {
fmt.Println("FINALLY")
}
input.Release(mark)
}()
@ -122,10 +126,14 @@ func (this *LexerATNSimulator) Match(input CharStream, mode int) int {
var dfa = this.decisionToDFA[mode]
if dfa.s0 == nil {
if PortDebug {
fmt.Println("MatchATN")
}
return this.MatchATN(input)
} else {
if PortDebug {
fmt.Println("execATN")
}
return this.execATN(input, dfa.s0)
}
}
@ -224,7 +232,9 @@ func (this *LexerATNSimulator) execATN(input CharStream, ds0 *DFAState) int {
s = target // flip current DFA target becomes Newsrc/from state
}
if PortDebug {
fmt.Println("DONE WITH execATN loop")
}
return this.failOrAccept(this.prevAccept, input, s.configs, t)
}
@ -286,7 +296,9 @@ func (this *LexerATNSimulator) failOrAccept(prevAccept *SimState, input CharStre
var lexerActionExecutor = prevAccept.dfaState.lexerActionExecutor
this.accept(input, lexerActionExecutor, this.startIndex, prevAccept.index, prevAccept.line, prevAccept.column)
if PortDebug {
fmt.Println(prevAccept.dfaState.prediction)
}
return prevAccept.dfaState.prediction
} else {
// if no accept and EOF is first char, return EOF
@ -357,7 +369,9 @@ func (this *LexerATNSimulator) getReachableTarget(trans ITransition, t int) IATN
func (this *LexerATNSimulator) computeStartState(input CharStream, p IATNState) *OrderedATNConfigSet {
if PortDebug {
fmt.Println("DEBUG" + strconv.Itoa(len(p.GetTransitions())))
}
var configs = NewOrderedATNConfigSet()
for i := 0; i < len(p.GetTransitions()); i++ {
@ -366,7 +380,9 @@ func (this *LexerATNSimulator) computeStartState(input CharStream, p IATNState)
this.closure(input, cfg, configs.ATNConfigSet, false, false, false)
}
if PortDebug {
fmt.Println("DEBUG" + configs.String())
}
return configs
}
@ -661,7 +677,9 @@ func (this *LexerATNSimulator) consume(input CharStream) {
}
func (this *LexerATNSimulator) GetTokenName(tt int) string {
if PortDebug {
fmt.Println(tt)
}
if tt == -1 {
return "EOF"
} else {

View File

@ -126,10 +126,14 @@ func (p *Parser) GetParseListeners() []ParseTreeListener {
func (p *Parser) Match(ttype int) IToken {
if PortDebug {
fmt.Println("get current token")
}
var t = p.getCurrentToken()
if PortDebug {
fmt.Println("TOKEN IS " + t.GetText())
}
if t.GetTokenType() == ttype {
p._errHandler.ReportMatch(p)
@ -144,7 +148,9 @@ func (p *Parser) Match(ttype int) IToken {
}
}
if PortDebug {
fmt.Println("match done")
}
return t
}
@ -411,10 +417,14 @@ func (p *Parser) NotifyErrorListeners(msg string, offendingToken IToken, err IRe
func (p *Parser) Consume() IToken {
var o = p.getCurrentToken()
if o.GetTokenType() != TokenEOF {
if PortDebug {
fmt.Println("Consuming")
}
p.GetInputStream().Consume()
if PortDebug {
fmt.Println("Done consuming")
}
}
var hasListener = p._parseListeners != nil && len(p._parseListeners) > 0
if p.BuildParseTrees || hasListener {
if p._errHandler.inErrorRecoveryMode(p) {

View File

@ -47,7 +47,7 @@ func NewParserATNSimulator(parser IParser, atn *ATN, decisionToDFA []*DFA, share
return this
}
var ParserATNSimulatorDebug = true
var ParserATNSimulatorDebug = false
var ParserATNSimulatorListATNDecisions = false
var ParserATNSimulatorDFADebug = false
var ParserATNSimulatorRetryDebug = false
@ -57,7 +57,9 @@ func (this *ParserATNSimulator) reset() {
func (this *ParserATNSimulator) AdaptivePredict(input TokenStream, decision int, outerContext IParserRuleContext) int {
fmt.Println("Adaptive preduct")
if PortDebug {
fmt.Println("Adaptive predict")
}
if ParserATNSimulatorDebug || ParserATNSimulatorListATNDecisions {
@ -998,7 +1000,9 @@ func (this *ParserATNSimulator) closureCheckingStopState(config IATNConfig, conf
} else {
// we have no context info, just chase follow links (if greedy)
if ParserATNSimulatorDebug {
if PortDebug {
fmt.Println("DEBUG 1")
}
fmt.Println("FALLING off rule " + this.getRuleName(config.GetState().GetRuleIndex()))
}
this.closure_(config, configs, closureBusy, collectPredicates, fullCtx, depth, treatEofAsEpsilon)
@ -1023,7 +1027,9 @@ func (this *ParserATNSimulator) closureCheckingStopState(config IATNConfig, conf
} else {
// else if we have no context info, just chase follow links (if greedy)
if ParserATNSimulatorDebug {
if PortDebug {
fmt.Println("DEBUG 2")
}
fmt.Println("FALLING off rule " + this.getRuleName(config.GetState().GetRuleIndex()))
}
}
@ -1033,7 +1039,9 @@ func (this *ParserATNSimulator) closureCheckingStopState(config IATNConfig, conf
// Do the actual work of walking epsilon edges//
func (this *ParserATNSimulator) closure_(config IATNConfig, configs *ATNConfigSet, closureBusy *Set, collectPredicates, fullCtx bool, depth int, treatEofAsEpsilon bool) {
if PortDebug {
fmt.Println("closure_")
}
var p = config.GetState()
// optimization
if !p.GetEpsilonOnlyTransitions() {
@ -1047,7 +1055,9 @@ func (this *ParserATNSimulator) closure_(config IATNConfig, configs *ATNConfigSe
var continueCollecting = collectPredicates && !ok
var c = this.getEpsilonTarget(config, t, continueCollecting, depth == 0, fullCtx, treatEofAsEpsilon)
if c != nil {
if PortDebug {
fmt.Println("DEBUG 1")
}
if !t.getIsEpsilon() && closureBusy.add(c) != c {
// avoid infinite recursion for EOF* and EOF+
continue
@ -1056,7 +1066,9 @@ func (this *ParserATNSimulator) closure_(config IATNConfig, configs *ATNConfigSe
if _, ok := config.GetState().(*RuleStopState); ok {
if PortDebug {
fmt.Println("DEBUG 2")
}
// target fell off end of rule mark resulting c as having dipped into outer context
// We can't get here if incoming config was rule stop and we had context
// track how far we dip into outer context. Might
@ -1064,16 +1076,22 @@ func (this *ParserATNSimulator) closure_(config IATNConfig, configs *ATNConfigSe
// preds if this is > 0.
if closureBusy.add(c) != c {
if PortDebug {
fmt.Println("DEBUG 3")
}
// avoid infinite recursion for right-recursive rules
continue
} else {
if PortDebug {
fmt.Println(c)
fmt.Println(closureBusy)
}
}
if this._dfa != nil && this._dfa.precedenceDfa {
if PortDebug {
fmt.Println("DEBUG 4")
}
if t.(*EpsilonTransition).outermostPrecedenceReturn == this._dfa.atnStartState.GetRuleIndex() {
c.precedenceFilterSuppressed = true
}
@ -1292,7 +1310,9 @@ func (this *ParserATNSimulator) getConflictingAltsOrUniqueAlt(configs *ATNConfig
func (this *ParserATNSimulator) GetTokenName(t int) string {
if PortDebug {
fmt.Println("Get token name")
}
if t == TokenEOF {
return "EOF"

View File

@ -83,7 +83,9 @@ func (this *Recognizer) GetState() int {
}
func (this *Recognizer) SetState(v int) {
if PortDebug {
fmt.Println("SETTING STATE " + strconv.Itoa(v) + " from " + strconv.Itoa(this.state))
}
this.state = v
}

View File

@ -1462,7 +1462,6 @@ public class ParserATNSimulator extends ATNSimulator {
boolean fullCtx,
boolean treatEofAsEpsilon)
{
console.log("closure");
final int initialDepth = 0;
closureCheckingStopState(config, configs, closureBusy, collectPredicates,
fullCtx,

View File

@ -726,11 +726,9 @@ ArithmeticParser.prototype.relop = function() {
this.state = 56;
_la = this._input.LA(1);
if(!((((_la) & ~0x1f) == 0 && ((1 << _la) & ((1 << ArithmeticParser.GT) | (1 << ArithmeticParser.LT) | (1 << ArithmeticParser.EQ))) !== 0))) {
console.log("DEBUG1")
this._errHandler.recoverInline(this);
}
else {
console.log("DEBUG2")
this.consume();
}
} catch (re) {
@ -820,7 +818,6 @@ ArithmeticParser.prototype.number = function() {
do {
this.state = 61;
this.match(ArithmeticParser.DIGIT);
console.log("Done with match")
this.state = 64;
this._errHandler.sync(this);
_la = this._input.LA(1);

View File

@ -129,12 +129,16 @@ BufferedTokenStream.prototype.consume = function() {
// not yet initialized
skipEofCheck = false;
}
if (PORT_DEBUG) {
console.log("consume 1")
}
if (!skipEofCheck && this.LA(1) === Token.EOF) {
throw "cannot consume EOF";
}
if (this.sync(this.index + 1)) {
if (PORT_DEBUG) {
console.log("consume 2")
}
this.index = this.adjustSeekIndex(this.index + 1);
}
};
@ -149,7 +153,9 @@ BufferedTokenStream.prototype.sync = function(i) {
var n = i - this.tokens.length + 1; // how many more elements we need?
if (n > 0) {
var fetched = this.fetch(n);
if (PORT_DEBUG) {
console.log("sync done")
}
return fetched >= n;
}
@ -166,7 +172,9 @@ BufferedTokenStream.prototype.fetch = function(n) {
}
for (var i = 0; i < n; i++) {
var t = this.tokenSource.nextToken();
if (PORT_DEBUG) {
console.log("fetch loop")
}
t.tokenIndex = this.tokens.length;
this.tokens.push(t);
if (t.type === Token.EOF) {
@ -175,7 +183,9 @@ BufferedTokenStream.prototype.fetch = function(n) {
}
}
if (PORT_DEBUG) {
console.log("fetch done")
}
return n;
};

View File

@ -74,7 +74,9 @@ CommonTokenFactory.DEFAULT = new CommonTokenFactory();
CommonTokenFactory.prototype.create = function(source, type, text, channel, start, stop, line, column) {
if (PORT_DEBUG) {
console.log("Token factory creating: " + text)
}
var t = new CommonToken(source, type, channel, start, stop);
t.line = line;
@ -89,7 +91,9 @@ CommonTokenFactory.prototype.create = function(source, type, text, channel, star
CommonTokenFactory.prototype.createThin = function(type, text) {
if (PORT_DEBUG) {
console.log("Token factory creating: " + text)
}
var t = new CommonToken(null, type);
t.text = text;

View File

@ -42,7 +42,9 @@ function FileStream(fileName) {
InputStream.call(this, data);
this.fileName = fileName;
if (PORT_DEBUG) {
console.log(data);
}
return this;
}

View File

@ -101,7 +101,9 @@ InputStream.prototype.mark = function() {
};
InputStream.prototype.release = function(marker) {
if (PORT_DEBUG) {
console.log("RELEASING")
}
};
// consume() ahead until p==_index; can't just set p=_index as we must

View File

@ -50,7 +50,9 @@ IntervalSet.prototype.addRange = function(l, h) {
};
IntervalSet.prototype.addInterval = function(v) {
if (PORT_DEBUG) {
console.log("addInterval" + v.toString())
}
if (this.intervals === null) {
this.intervals = [];
this.intervals.push(v);
@ -81,9 +83,13 @@ IntervalSet.prototype.addInterval = function(v) {
};
IntervalSet.prototype.addSet = function(other) {
if (PORT_DEBUG) {
console.log("addSet")
}
if (other.intervals !== null) {
if (PORT_DEBUG) {
console.log(other.intervals.length)
}
for (var k = 0; k < other.intervals.length; k++) {
var i = other.intervals[k];
this.addInterval(new Interval(i.start, i.stop));

View File

@ -110,6 +110,7 @@ LL1Analyzer.prototype.LOOK = function(s, stopState, ctx) {
var seeThruPreds = true; // ignore preds; get all lookahead
ctx = ctx || null;
var lookContext = ctx!==null ? predictionContextFromRuleContext(s.atn, ctx) : null;
if (PORT_DEBUG) {
console.log("DEBUG 5")
console.log(s.toString())
console.log(stopState)
@ -117,8 +118,11 @@ LL1Analyzer.prototype.LOOK = function(s, stopState, ctx) {
console.log(r.toString())
console.log(seeThruPreds)
console.log("=====")
}
this._LOOK(s, stopState, lookContext, r, new Set(), new BitSet(), seeThruPreds, true);
if (PORT_DEBUG) {
console.log(r.toString())
}
return r;
};
@ -159,7 +163,9 @@ LL1Analyzer.prototype._LOOK = function(s, stopState , ctx, look, lookBusy, calle
}
lookBusy.add(c);
if (s === stopState) {
if (PORT_DEBUG) {
console.log("DEBUG 6")
}
if (ctx ===null) {
look.addOne(Token.EPSILON);
return;
@ -177,7 +183,9 @@ LL1Analyzer.prototype._LOOK = function(s, stopState , ctx, look, lookBusy, calle
return;
}
if (ctx !== PredictionContext.EMPTY) {
if (PORT_DEBUG) {
console.log("DEBUG 7")
}
// run thru all possible stack tops in ctx
for(var i=0; i<ctx.length; i++) {
var returnState = this.atn.states[ctx.getReturnState(i)];
@ -198,7 +206,9 @@ LL1Analyzer.prototype._LOOK = function(s, stopState , ctx, look, lookBusy, calle
var t = s.transitions[j];
if (t.constructor === RuleTransition) {
if (PORT_DEBUG) {
console.log("DEBUG 8")
}
if (calledRuleStack.contains(t.target.ruleIndex)) {
continue;
@ -211,25 +221,37 @@ LL1Analyzer.prototype._LOOK = function(s, stopState , ctx, look, lookBusy, calle
calledRuleStack.remove(t.target.ruleIndex);
}
if (PORT_DEBUG) {
console.log(look.toString())
}
} else if (t instanceof AbstractPredicateTransition ) {
if (PORT_DEBUG) {
console.log("DEBUG 9")
}
if (seeThruPreds) {
this._LOOK(t.target, stopState, ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF);
} else {
look.addOne(LL1Analyzer.HIT_PRED);
}
} else if( t.isEpsilon) {
if (PORT_DEBUG) {
console.log("DEBUG 10")
}
this._LOOK(t.target, stopState, ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF);
} else if (t.constructor === WildcardTransition) {
if (PORT_DEBUG) {
console.log("DEBUG 11")
}
look.addRange( Token.MIN_USER_TOKEN_TYPE, this.atn.maxTokenType );
} else {
if (PORT_DEBUG) {
console.log("DEBUG 12")
}
var set = t.label;
if (PORT_DEBUG) {
console.log(set.toString())
}
if (set !== null) {
if (t instanceof NotSetTransition) {
set = set.complement(Token.MIN_USER_TOKEN_TYPE, this.atn.maxTokenType);

View File

@ -169,10 +169,14 @@ Lexer.prototype.nextToken = function() {
break;
}
if (PORT_DEBUG) {
console.log("lex inner loop")
}
}
if (PORT_DEBUG) {
console.log("lex loop")
}
if (continueOuter) {
continue;
@ -262,7 +266,9 @@ Lexer.prototype.emitToken = function(token) {
// custom Token objects or provide a new factory.
// /
Lexer.prototype.emit = function() {
if (PORT_DEBUG) {
console.log("emit")
}
var t = this._factory.create(this._tokenFactorySourcePair, this._type,
this._text, this._channel, this._tokenStartCharIndex, this
.getCharIndex() - 1, this._tokenStartLine,
@ -272,7 +278,9 @@ Lexer.prototype.emit = function() {
};
Lexer.prototype.emitEOF = function() {
if (PORT_DEBUG) {
console.log("emitEOF")
}
var cpos = this.column;
var lpos = this.line;
var eof = this._factory.create(this._tokenFactorySourcePair, Token.EOF,

View File

@ -134,10 +134,14 @@ Parser.prototype.reset = function() {
Parser.prototype.match = function(ttype) {
if (PORT_DEBUG) {
console.log("get current token")
}
var t = this.getCurrentToken();
if (PORT_DEBUG) {
console.log("TOKEN IS " + t.text)
}
if (t.type === ttype) {
this._errHandler.reportMatch(this);
this.consume();
@ -151,7 +155,9 @@ Parser.prototype.match = function(ttype) {
}
}
if (PORT_DEBUG) {
console.log("Match done")
}
return t;
};
@ -405,10 +411,14 @@ Parser.prototype.notifyErrorListeners = function(msg, offendingToken, err) {
Parser.prototype.consume = function() {
var o = this.getCurrentToken();
if (o.type !== Token.EOF) {
if (PORT_DEBUG) {
console.log("Consuming")
}
this.getInputStream().consume();
if (PORT_DEBUG) {
console.log("done consuming")
}
}
var hasListener = this._parseListeners !== null && this._parseListeners.length > 0;
if (this.buildParseTrees || hasListener) {

View File

@ -164,7 +164,9 @@ Object.defineProperty(Recognizer.prototype, "state", {
return this._stateNumber;
},
set : function(state) {
if (PORT_DEBUG) {
console.log("SETTING STATE" + state + " from " + this._stateNumber )
}
this._stateNumber = state;
}
});

View File

@ -75,11 +75,15 @@ ATN.prototype.nextTokensInContext = function(s, ctx) {
// rule.
ATN.prototype.nextTokensNoContext = function(s) {
if (s.nextTokenWithinRule !== null ) {
if (PORT_DEBUG) {
console.log("DEBUG 1")
}
return s.nextTokenWithinRule;
}
if (PORT_DEBUG) {
console.log("DEBUG 2")
console.log(this.nextTokensInContext(s, null).toString())
}
s.nextTokenWithinRule = this.nextTokensInContext(s, null);
s.nextTokenWithinRule.readOnly = true;
return s.nextTokenWithinRule;

View File

@ -101,7 +101,7 @@ function LexerATNSimulator(recog, atn, decisionToDFA, sharedContextCache) {
LexerATNSimulator.prototype = Object.create(ATNSimulator.prototype);
LexerATNSimulator.prototype.constructor = LexerATNSimulator;
LexerATNSimulator.prototype.debug = true;
LexerATNSimulator.prototype.debug = false;
LexerATNSimulator.prototype.dfa_debug = false;
LexerATNSimulator.MIN_DFA_EDGE = 0;
@ -118,7 +118,9 @@ LexerATNSimulator.prototype.copyState = function(simulator) {
LexerATNSimulator.prototype.match = function(input, mode) {
if (PORT_DEBUG) {
console.log("MATCH")
}
this.match_calls += 1;
this.mode = mode;
@ -128,15 +130,21 @@ LexerATNSimulator.prototype.match = function(input, mode) {
this.prevAccept.reset();
var dfa = this.decisionToDFA[mode];
if (dfa.s0 === null) {
if (PORT_DEBUG) {
console.log("matchATN")
}
return this.matchATN(input);
} else {
if (PORT_DEBUG) {
console.log("execATN")
}
var res = this.execATN(input, dfa.s0);
return res;
}
} finally {
if (PORT_DEBUG) {
console.log("FINALLY")
}
input.release(mark);
}
};
@ -233,7 +241,9 @@ LexerATNSimulator.prototype.execATN = function(input, ds0) {
s = target; // flip; current DFA target becomes new src/from state
}
if (PORT_DEBUG) {
console.log("Done with execATN loop")
}
return this.failOrAccept(this.prevAccept, input, s.configs, t);
};
@ -298,7 +308,9 @@ LexerATNSimulator.prototype.failOrAccept = function(prevAccept, input, reach, t)
this.accept(input, lexerActionExecutor, this.startIndex,
prevAccept.index, prevAccept.line, prevAccept.column);
if (PORT_DEBUG) {
console.log("Prevaccept", prevAccept.dfaState.prediction)
}
return prevAccept.dfaState.prediction;
} else {
@ -669,7 +681,9 @@ LexerATNSimulator.prototype.consume = function(input) {
};
LexerATNSimulator.prototype.getTokenName = function(tt) {
if (PORT_DEBUG) {
console.log(tt);
}
if (tt === -1) {
return "EOF";
} else {

View File

@ -313,7 +313,7 @@ function ParserATNSimulator(parser, atn, decisionToDFA, sharedContextCache) {
ParserATNSimulator.prototype = Object.create(ATNSimulator.prototype);
ParserATNSimulator.prototype.constructor = ParserATNSimulator;
ParserATNSimulator.prototype.debug = true;
ParserATNSimulator.prototype.debug = false;
ParserATNSimulator.prototype.debug_list_atn_decisions = false;
ParserATNSimulator.prototype.dfa_debug = false;
ParserATNSimulator.prototype.retry_debug = false;
@ -324,7 +324,9 @@ ParserATNSimulator.prototype.reset = function() {
ParserATNSimulator.prototype.adaptivePredict = function(input, decision, outerContext) {
if (PORT_DEBUG) {
console.log("adaptive predict")
}
if (this.debug || this.debug_list_atn_decisions) {
console.log("adaptivePredict decision " + decision +
@ -1254,7 +1256,9 @@ ParserATNSimulator.prototype.closureCheckingStopState = function(config, configs
} else {
// we have no context info, just chase follow links (if greedy)
if (this.debug) {
if (PORT_DEBUG) {
console.log("DEBUG 1")
}
console.log("FALLING off rule " + this.getRuleName(config.state.ruleIndex));
}
this.closure_(config, configs, closureBusy, collectPredicates,
@ -1280,7 +1284,9 @@ ParserATNSimulator.prototype.closureCheckingStopState = function(config, configs
} else {
// else if we have no context info, just chase follow links (if greedy)
if (this.debug) {
if (PORT_DEBUG) {
console.log("DEBUG 2")
}
console.log("FALLING off rule " + this.getRuleName(config.state.ruleIndex));
}
}
@ -1290,7 +1296,9 @@ ParserATNSimulator.prototype.closureCheckingStopState = function(config, configs
// Do the actual work of walking epsilon edges//
ParserATNSimulator.prototype.closure_ = function(config, configs, closureBusy, collectPredicates, fullCtx, depth, treatEofAsEpsilon) {
if (PORT_DEBUG) {
console.log("closure_")
}
var p = config.state;
// optimization
if (! p.epsilonOnlyTransitions) {
@ -1303,7 +1311,9 @@ ParserATNSimulator.prototype.closure_ = function(config, configs, closureBusy, c
var continueCollecting = collectPredicates && !(t instanceof ActionTransition);
var c = this.getEpsilonTarget(config, t, continueCollecting, depth === 0, fullCtx, treatEofAsEpsilon);
if (c!==null) {
if (PORT_DEBUG) {
console.log("DEBUG 1")
}
if (!t.isEpsilon && closureBusy.add(c)!==c){
// avoid infinite recursion for EOF* and EOF+
continue;
@ -1311,7 +1321,9 @@ ParserATNSimulator.prototype.closure_ = function(config, configs, closureBusy, c
var newDepth = depth;
if ( config.state instanceof RuleStopState) {
if (PORT_DEBUG) {
console.log("DEBUG 2")
}
// target fell off end of rule; mark resulting c as having dipped into outer context
// We can't get here if incoming config was rule stop and we had context
// track how far we dip into outer context. Might
@ -1319,16 +1331,22 @@ ParserATNSimulator.prototype.closure_ = function(config, configs, closureBusy, c
// preds if this is > 0.
if (closureBusy.add(c)!==c) {
if (PORT_DEBUG) {
console.log("DEBUG 3")
}
// avoid infinite recursion for right-recursive rules
continue;
} else {
if (PORT_DEBUG) {
console.log(c.toString())
console.log(closureBusy.toString())
}
}
if (this._dfa !== null && this._dfa.precedenceDfa) {
if (PORT_DEBUG) {
console.log("DEBUG 4")
}
if (t.outermostPrecedenceReturn === this._dfa.atnStartState.ruleIndex) {
c.precedenceFilterSuppressed = true;
}
@ -1528,7 +1546,9 @@ ParserATNSimulator.prototype.getConflictingAltsOrUniqueAlt = function(configs) {
ParserATNSimulator.prototype.getTokenName = function( t) {
if (PORT_DEBUG) {
console.log("Get token name")
}
if (t===Token.EOF) {
return "EOF";

View File

@ -245,26 +245,35 @@ DefaultErrorStrategy.prototype.sync = function(recognizer) {
return;
}
if (PORT_DEBUG) {
console.log("STATE" + recognizer.state)
}
var s = recognizer._interp.atn.states[recognizer.state];
var la = recognizer.getTokenStream().LA(1);
if (PORT_DEBUG) {
console.log("LA" + la);
}
// try cheaper subset first; might get lucky. seems to shave a wee bit off
if (la===Token.EOF || recognizer.atn.nextTokens(s).contains(la)) {
if (PORT_DEBUG) {
console.log("OK1")
}
return;
}
// Return but don't end recovery. only do that upon valid token match
if(recognizer.isExpectedToken(la)) {
if (PORT_DEBUG) {
console.log("OK2")
}
return;
}
if (PORT_DEBUG) {
console.log("LA" + la)
// console.log(recognizer.GetATN().nextTokens(s, nil))
}
switch (s.stateType) {
case ATNState.BLOCK_START:

View File

@ -1 +1 @@
1 + 2 = 3 + 5
1 + 2 + 32 + (1 + 2) = 3 + 5

View File

@ -1,3 +1,5 @@
PORT_DEBUG = false
var antlr4 = require("./antlr4/index"),
tree = antlr4.tree
ArithmeticLexer = require("./ArithmeticLexer").ArithmeticLexer,