Remove JavaScript PORT_DEBUG, re-enable runtime tests for other language targets (#69)

* Re-enable runtime tests for other language targets

* Fix typo in js code

* Remove PORT_DEBUG statements

* Fix missing

* Fix testAltNum
This commit is contained in:
Peter Boyer 2016-10-13 17:06:22 -04:00 committed by GitHub
parent 5cbc1b0ba8
commit 2444386091
41 changed files with 222 additions and 439 deletions

View File

@ -70,13 +70,12 @@
<version>2.12.4</version>
<configuration>
<includes>
<!--<include>**/csharp/Test*.java</include>-->
<!--<include>**/java/Test*.java</include>-->
<include>**/csharp/Test*.java</include>
<include>**/java/Test*.java</include>
<include>**/go/Test*.java</include>
<!--<include>**/javascript/node/Test*.java</include>-->
<!--<include>**/python2/Test*.java</include> -->
<!--<include>**/python3/Test*.java</include>-->
<include>**/javascript/node/Test*.java</include>
<include>**/python2/Test*.java</include>
<include>**/python3/Test*.java</include>
</includes>
</configuration>
</plugin>

View File

@ -232,7 +232,7 @@ Result(r) ::= <%Get<r; format="cap">()%>
ParserPropertyMember() ::= <<
@parser::members {
func Property() bool {
func (p *TParser) Property() bool {
return true
}
}
@ -314,6 +314,32 @@ func (p *PositionAdjustingLexerATNSimulator) ResetAcceptPosition(input antlr.Cha
>>
TreeNodeWithAltNumField(X) ::= <<
@parser::members {
type MyRuleNode struct {
*antlr.BaseParserRuleContext
altNum int
}
func NewMyRuleNode(parent antlr.ParserRuleContext, invokingStateNumber int) *MyRuleNode {
return &MyRuleNode{
BaseParserRuleContext : antlr.NewBaseParserRuleContext(parent, invokingStateNumber),
}
}
func (m *MyRuleNode) GetAltNumber() int {
return m.altNum
}
func (m *MyRuleNode) SetAltNumber(altNum int) {
m.altNum = altNum
}
}
>>
BasicListener(notused) ::= <<
type LeafListener struct {
*BaseTListener

View File

@ -27,7 +27,7 @@ Errors() ::= ""
grammar(grammarName) ::= <<
grammar <grammarName>;
<ParserPropertyMember()>
a : {Property()}? ID {<writeln("\"valid\"")>}
a : {$parser.Property()}? ID {<writeln("\"valid\"")>}
;
ID : 'a'..'z'+ ;
WS : (' '|'\n') -> skip ;

View File

@ -2,6 +2,7 @@
package org.antlr.v4.test.runtime.csharp;
import org.junit.Test;
import org.junit.Ignore;
@SuppressWarnings("unused")
public class TestParseTrees extends BaseTest {

View File

@ -529,14 +529,14 @@ public class TestParserExec extends BaseTest {
@Test
public void testParserProperty() throws Exception {
mkdir(tmpdir);
StringBuilder grammarBuilder = new StringBuilder(155);
StringBuilder grammarBuilder = new StringBuilder(163);
grammarBuilder.append("grammar T;\n");
grammarBuilder.append("@members {\n");
grammarBuilder.append("bool Property() {\n");
grammarBuilder.append(" return true;\n");
grammarBuilder.append("}\n");
grammarBuilder.append("}\n");
grammarBuilder.append("a : {Property()}? ID {Console.WriteLine(\"valid\");}\n");
grammarBuilder.append("a : {$parser.Property()}? ID {Console.WriteLine(\"valid\");}\n");
grammarBuilder.append(" ;\n");
grammarBuilder.append("ID : 'a'..'z'+ ;\n");
grammarBuilder.append("WS : (' '|'\\n') -> skip ;");

View File

@ -1853,6 +1853,52 @@ public class TestLeftRecursion extends BaseTest {
assertEquals("(prog (statement (letterA a)) (statement (letterA a)) <EOF>)\n", found);
assertNull(this.stderrDuringParse);
}
/* This file and method are generated by TestGenerator, any edits will be overwritten by the next generation. */
@Test
public void testPrefixAndOtherAlt_1() throws Exception {
mkdir(parserpkgdir);
StringBuilder grammarBuilder = new StringBuilder(218);
grammarBuilder.append("grammar T;\n");
grammarBuilder.append("s @after {fmt.Println($ctx.ToStringTree(nil, p))} : expr EOF ; \n");
grammarBuilder.append("expr : literal\n");
grammarBuilder.append(" | op expr\n");
grammarBuilder.append(" | expr op expr\n");
grammarBuilder.append(" ;\n");
grammarBuilder.append("literal : '-'? Integer ;\n");
grammarBuilder.append("op : '+' | '-' ;\n");
grammarBuilder.append("Integer : [0-9]+ ;\n");
grammarBuilder.append("WS : (' '|'\\n') -> skip ;");
String grammar = grammarBuilder.toString();
String input ="-1";
String found = execParser("T.g4", grammar, "TParser", "TLexer",
"TListener", "TVisitor", "s", input, false);
assertEquals("(s (expr (literal - 1)) <EOF>)\n", found);
assertNull(this.stderrDuringParse);
}
/* This file and method are generated by TestGenerator, any edits will be overwritten by the next generation. */
@Test
public void testPrefixAndOtherAlt_2() throws Exception {
mkdir(parserpkgdir);
StringBuilder grammarBuilder = new StringBuilder(218);
grammarBuilder.append("grammar T;\n");
grammarBuilder.append("s @after {fmt.Println($ctx.ToStringTree(nil, p))} : expr EOF ; \n");
grammarBuilder.append("expr : literal\n");
grammarBuilder.append(" | op expr\n");
grammarBuilder.append(" | expr op expr\n");
grammarBuilder.append(" ;\n");
grammarBuilder.append("literal : '-'? Integer ;\n");
grammarBuilder.append("op : '+' | '-' ;\n");
grammarBuilder.append("Integer : [0-9]+ ;\n");
grammarBuilder.append("WS : (' '|'\\n') -> skip ;");
String grammar = grammarBuilder.toString();
String input ="-1 + -1";
String found = execParser("T.g4", grammar, "TParser", "TLexer",
"TListener", "TVisitor", "s", input, false);
assertEquals("(s (expr (expr (literal - 1)) (op +) (expr (literal - 1))) <EOF>)\n", found);
assertNull(this.stderrDuringParse);
}
/* This file and method are generated by TestGenerator, any edits will be overwritten by the next generation. */
@Test

View File

@ -55,6 +55,63 @@ public class TestParseTrees extends BaseTest {
assertEquals("(a y)\n", found);
assertNull(this.stderrDuringParse);
}
/* This file and method are generated by TestGenerator, any edits will be overwritten by the next generation. */
@Test
public void testAltNum() throws Exception {
mkdir(parserpkgdir);
StringBuilder grammarBuilder = new StringBuilder(640);
grammarBuilder.append("grammar T;\n");
grammarBuilder.append("\n");
grammarBuilder.append("options { contextSuperClass=MyRuleNode; }\n");
grammarBuilder.append("\n");
grammarBuilder.append("@parser::members {\n");
grammarBuilder.append("\n");
grammarBuilder.append("type MyRuleNode struct {\n");
grammarBuilder.append(" *antlr.BaseParserRuleContext\n");
grammarBuilder.append("\n");
grammarBuilder.append(" altNum int\n");
grammarBuilder.append("}\n");
grammarBuilder.append("\n");
grammarBuilder.append("func NewMyRuleNode(parent antlr.ParserRuleContext, invokingStateNumber int) *MyRuleNode {\n");
grammarBuilder.append(" return &MyRuleNode{\n");
grammarBuilder.append(" BaseParserRuleContext : antlr.NewBaseParserRuleContext(parent, invokingStateNumber),\n");
grammarBuilder.append(" }\n");
grammarBuilder.append("}\n");
grammarBuilder.append("\n");
grammarBuilder.append("func (m *MyRuleNode) GetAltNumber() int {\n");
grammarBuilder.append(" return m.altNum\n");
grammarBuilder.append("}\n");
grammarBuilder.append("\n");
grammarBuilder.append("func (m *MyRuleNode) SetAltNumber(altNum int) {\n");
grammarBuilder.append(" m.altNum = altNum\n");
grammarBuilder.append("}\n");
grammarBuilder.append("\n");
grammarBuilder.append("}\n");
grammarBuilder.append("\n");
grammarBuilder.append("\n");
grammarBuilder.append("s\n");
grammarBuilder.append("@init {\n");
grammarBuilder.append("p.BuildParseTrees = true\n");
grammarBuilder.append("}\n");
grammarBuilder.append("@after {\n");
grammarBuilder.append("fmt.Println($r.ctx.ToStringTree(nil, p))\n");
grammarBuilder.append("}\n");
grammarBuilder.append(" : r=a ;\n");
grammarBuilder.append("\n");
grammarBuilder.append("a : 'f'\n");
grammarBuilder.append(" | 'g'\n");
grammarBuilder.append(" | 'x' b 'z'\n");
grammarBuilder.append(" ;\n");
grammarBuilder.append("b : 'e' {} | 'y'\n");
grammarBuilder.append(" ;");
String grammar = grammarBuilder.toString();
String input ="xyz";
String found = execParser("T.g4", grammar, "TParser", "TLexer",
"TListener", "TVisitor", "s", input, false);
assertEquals("(a:3 x (b:2 y) z)\n", found);
assertNull(this.stderrDuringParse);
}
/* This file and method are generated by TestGenerator, any edits will be overwritten by the next generation. */
@Test

View File

@ -556,14 +556,14 @@ public class TestParserExec extends BaseTest {
@Test
public void testParserProperty() throws Exception {
mkdir(parserpkgdir);
StringBuilder grammarBuilder = new StringBuilder(160);
StringBuilder grammarBuilder = new StringBuilder(181);
grammarBuilder.append("grammar T;\n");
grammarBuilder.append("@parser::members {\n");
grammarBuilder.append("func Property() bool {\n");
grammarBuilder.append("func (p *TParser) Property() bool {\n");
grammarBuilder.append(" return true\n");
grammarBuilder.append("}\n");
grammarBuilder.append("}\n");
grammarBuilder.append("a : {Property()}? ID {fmt.Println(\"valid\")}\n");
grammarBuilder.append("a : {$parser.Property()}? ID {fmt.Println(\"valid\")}\n");
grammarBuilder.append(" ;\n");
grammarBuilder.append("ID : 'a'..'z'+ ;\n");
grammarBuilder.append("WS : (' '|'\\n') -> skip ;");

View File

@ -1,10 +1,10 @@
/* This file is generated by TestGenerator, any edits will be overwritten by the next generation. */
package org.antlr.v4.test.runtime.java;
import org.junit.Ignore;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.*;
public class TestParseTrees extends BaseTest {

View File

@ -639,14 +639,14 @@ public class TestParserExec extends BaseTest {
public void testParserProperty() throws Exception {
mkdir(tmpdir);
StringBuilder grammarBuilder = new StringBuilder(160);
StringBuilder grammarBuilder = new StringBuilder(168);
grammarBuilder.append("grammar T;\n");
grammarBuilder.append("@members {\n");
grammarBuilder.append("boolean Property() {\n");
grammarBuilder.append(" return true;\n");
grammarBuilder.append("}\n");
grammarBuilder.append("}\n");
grammarBuilder.append("a : {Property()}? ID {System.out.println(\"valid\");}\n");
grammarBuilder.append("a : {$parser.Property()}? ID {System.out.println(\"valid\");}\n");
grammarBuilder.append(" ;\n");
grammarBuilder.append("ID : 'a'..'z'+ ;\n");
grammarBuilder.append("WS : (' '|'\\n') -> skip ;");

View File

@ -1,10 +1,10 @@
/* This file is generated by TestGenerator, any edits will be overwritten by the next generation. */
package org.antlr.v4.test.runtime.javascript.node;
import org.junit.Ignore;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.*;
@SuppressWarnings("unused")
public class TestParseTrees extends BaseTest {

View File

@ -585,14 +585,14 @@ public class TestParserExec extends BaseTest {
@Test
public void testParserProperty() throws Exception {
mkdir(tmpdir);
StringBuilder grammarBuilder = new StringBuilder(163);
StringBuilder grammarBuilder = new StringBuilder(171);
grammarBuilder.append("grammar T;\n");
grammarBuilder.append("@members {\n");
grammarBuilder.append("this.Property = function() {\n");
grammarBuilder.append(" return true;\n");
grammarBuilder.append("}\n");
grammarBuilder.append("}\n");
grammarBuilder.append("a : {Property()}? ID {console.log(\"valid\");}\n");
grammarBuilder.append("a : {$parser.Property()}? ID {console.log(\"valid\");}\n");
grammarBuilder.append(" ;\n");
grammarBuilder.append("ID : 'a'..'z'+ ;\n");
grammarBuilder.append("WS : (' '|'\\n') -> skip ;");

View File

@ -1,10 +1,9 @@
/* This file is generated by TestGenerator, any edits will be overwritten by the next generation. */
package org.antlr.v4.test.runtime.python2;
import org.junit.Ignore;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.*;
@SuppressWarnings("unused")
public class TestParseTrees extends BasePython2Test {

View File

@ -666,14 +666,14 @@ public class TestParserExec extends BasePython2Test {
public void testParserProperty() throws Exception {
mkdir(tmpdir);
StringBuilder grammarBuilder = new StringBuilder(145);
StringBuilder grammarBuilder = new StringBuilder(153);
grammarBuilder.append("grammar T;\n");
grammarBuilder.append("@members {\n");
grammarBuilder.append("def Property(self):\n");
grammarBuilder.append(" return True\n");
grammarBuilder.append("\n");
grammarBuilder.append("}\n");
grammarBuilder.append("a : {Property()}? ID {print(\"valid\")}\n");
grammarBuilder.append("a : {$parser.Property()}? ID {print(\"valid\")}\n");
grammarBuilder.append(" ;\n");
grammarBuilder.append("ID : 'a'..'z'+ ;\n");
grammarBuilder.append("WS : (' '|'\\n') -> skip ;");

View File

@ -1,10 +1,9 @@
/* This file is generated by TestGenerator, any edits will be overwritten by the next generation. */
package org.antlr.v4.test.runtime.python3;
import org.junit.Ignore;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.*;
@SuppressWarnings("unused")
public class TestParseTrees extends BasePython3Test {

View File

@ -666,14 +666,14 @@ public class TestParserExec extends BasePython3Test {
public void testParserProperty() throws Exception {
mkdir(tmpdir);
StringBuilder grammarBuilder = new StringBuilder(145);
StringBuilder grammarBuilder = new StringBuilder(153);
grammarBuilder.append("grammar T;\n");
grammarBuilder.append("@members {\n");
grammarBuilder.append("def Property(self):\n");
grammarBuilder.append(" return True\n");
grammarBuilder.append("\n");
grammarBuilder.append("}\n");
grammarBuilder.append("a : {Property()}? ID {print(\"valid\")}\n");
grammarBuilder.append("a : {$parser.Property()}? ID {print(\"valid\")}\n");
grammarBuilder.append(" ;\n");
grammarBuilder.append("ID : 'a'..'z'+ ;\n");
grammarBuilder.append("WS : (' '|'\\n') -> skip ;");

View File

@ -494,6 +494,7 @@ func (p *BaseParser) ExitRule() {
}
func (p *BaseParser) EnterOuterAlt(localctx ParserRuleContext, altNum int) {
localctx.SetAltNumber(altNum)
// if we have Newlocalctx, make sure we replace existing ctx
// that is previous child of parse tree
if p.BuildParseTrees && p.ctx != localctx {

View File

@ -3,7 +3,6 @@ package antlr
import (
"reflect"
"strconv"
// "fmt"
)
type ParserRuleContext interface {
@ -36,7 +35,6 @@ type BaseParserRuleContext struct {
}
func NewBaseParserRuleContext(parent ParserRuleContext, invokingStateNumber int) *BaseParserRuleContext {
prc := new(BaseParserRuleContext)
prc.BaseRuleContext = NewBaseRuleContext(parent, invokingStateNumber)
@ -56,7 +54,6 @@ func NewBaseParserRuleContext(parent ParserRuleContext, invokingStateNumber int)
prc.exception = nil
return prc
}
func (prc *BaseParserRuleContext) SetException(e RecognitionException) {

View File

@ -30,6 +30,9 @@ type RuleContext interface {
GetRuleIndex() int
IsEmpty() bool
GetAltNumber() int
SetAltNumber(altNumber int)
String([]string, RuleContext) string
}
@ -82,6 +85,12 @@ func (b *BaseRuleContext) GetRuleIndex() int {
return b.RuleIndex
}
func (b *BaseRuleContext) GetAltNumber() int {
return ATNInvalidAltNumber
}
func (b *BaseRuleContext) SetAltNumber(altNumber int) {}
// A context is empty if there is no invoking state meaning nobody call
// current context.
func (b *BaseRuleContext) IsEmpty() bool {

View File

@ -34,17 +34,23 @@ func TreesStringTree(tree Tree, ruleNames []string, recog Recognizer) string {
}
func TreesGetNodeText(t Tree, ruleNames []string, recog Parser) string {
if recog != nil {
ruleNames = recog.GetRuleNames()
}
if ruleNames != nil {
if t2, ok := t.(RuleNode); ok {
return ruleNames[t2.GetRuleContext().GetRuleIndex()]
} else if t2, ok := t.(ErrorNode); ok {
switch t2 := t.(type) {
case RuleNode:
t3 := t2.GetRuleContext()
var altNumber = t3.GetAltNumber()
if altNumber != ATNInvalidAltNumber {
return fmt.Sprintf("%s:%d", ruleNames[t3.GetRuleIndex()], altNumber)
}
return ruleNames[t3.GetRuleIndex()]
case ErrorNode:
return fmt.Sprint(t2)
} else if t2, ok := t.(TerminalNode); ok {
case TerminalNode:
if t2.GetSymbol() != nil {
return t2.GetSymbol().GetText()
}
@ -83,7 +89,7 @@ func TreesgetAncestors(t Tree) []Tree {
return ancestors
}
func TreesfindAllTokenNodes(t ParseTree, ttype int) []ParseTree {
func TreesFindAllTokenNodes(t ParseTree, ttype int) []ParseTree {
return TreesfindAllNodes(t, ttype, true)
}
@ -118,10 +124,10 @@ func TreesFindAllNodes(t ParseTree, index int, findTokens bool, nodes []ParseTre
}
}
func Treesdescendants(t ParseTree) []ParseTree {
func TreesDescendants(t ParseTree) []ParseTree {
var nodes = []ParseTree{t}
for i := 0; i < t.GetChildCount(); i++ {
nodes = append(nodes, Treesdescendants(t.GetChild(i).(ParseTree))...)
nodes = append(nodes, TreesDescendants(t.GetChild(i).(ParseTree))...)
}
return nodes
}

View File

@ -129,16 +129,10 @@ BufferedTokenStream.prototype.consume = function() {
// not yet initialized
skipEofCheck = false;
}
if (PORT_DEBUG) {
console.log("consume 1")
}
if (!skipEofCheck && this.LA(1) === Token.EOF) {
throw "cannot consume EOF";
}
if (this.sync(this.index + 1)) {
if (PORT_DEBUG) {
console.log("consume 2")
}
this.index = this.adjustSeekIndex(this.index + 1);
}
};
@ -153,10 +147,6 @@ BufferedTokenStream.prototype.sync = function(i) {
var n = i - this.tokens.length + 1; // how many more elements we need?
if (n > 0) {
var fetched = this.fetch(n);
if (PORT_DEBUG) {
console.log("sync done")
}
return fetched >= n;
}
return true;
@ -172,9 +162,6 @@ BufferedTokenStream.prototype.fetch = function(n) {
}
for (var i = 0; i < n; i++) {
var t = this.tokenSource.nextToken();
if (PORT_DEBUG) {
console.log("fetch loop")
}
t.tokenIndex = this.tokens.length;
this.tokens.push(t);
if (t.type === Token.EOF) {
@ -182,10 +169,6 @@ BufferedTokenStream.prototype.fetch = function(n) {
return i + 1;
}
}
if (PORT_DEBUG) {
console.log("fetch done")
}
return n;
};
@ -194,11 +177,9 @@ BufferedTokenStream.prototype.getTokens = function(start, stop, types) {
if (types === undefined) {
types = null;
}
if (start < 0 || stop < 0) {
return null;
}
this.lazyInit();
var subset = [];
if (stop >= this.tokens.length) {
@ -213,7 +194,6 @@ BufferedTokenStream.prototype.getTokens = function(start, stop, types) {
subset.push(t);
}
}
return subset;
};

View File

@ -73,11 +73,6 @@ CommonTokenFactory.prototype.constructor = CommonTokenFactory;
CommonTokenFactory.DEFAULT = new CommonTokenFactory();
CommonTokenFactory.prototype.create = function(source, type, text, channel, start, stop, line, column) {
if (PORT_DEBUG) {
console.log("Token factory creating: " + text)
}
var t = new CommonToken(source, type, channel, start, stop);
t.line = line;
t.column = column;
@ -90,11 +85,6 @@ CommonTokenFactory.prototype.create = function(source, type, text, channel, star
};
CommonTokenFactory.prototype.createThin = function(type, text) {
if (PORT_DEBUG) {
console.log("Token factory creating: " + text)
}
var t = new CommonToken(null, type);
t.text = text;
return t;

View File

@ -41,10 +41,6 @@ function FileStream(fileName) {
var data = fs.readFileSync(fileName, "utf8");
InputStream.call(this, data);
this.fileName = fileName;
if (PORT_DEBUG) {
console.log(data);
}
return this;
}

View File

@ -101,9 +101,6 @@ InputStream.prototype.mark = function() {
};
InputStream.prototype.release = function(marker) {
if (PORT_DEBUG) {
console.log("RELEASING")
}
};
// consume() ahead until p==_index; can't just set p=_index as we must

View File

@ -33,7 +33,7 @@ function IntervalSet() {
this.readOnly = false;
}
IntervalSet.prototype.first = function() {
IntervalSet.prototype.first = function(v) {
if (this.intervals === null || this.intervals.length===0) {
return Token.INVALID_TYPE;
} else {
@ -50,9 +50,6 @@ IntervalSet.prototype.addRange = function(l, h) {
};
IntervalSet.prototype.addInterval = function(v) {
if (PORT_DEBUG) {
console.log("addInterval" + v.toString())
}
if (this.intervals === null) {
this.intervals = [];
this.intervals.push(v);
@ -83,13 +80,7 @@ IntervalSet.prototype.addInterval = function(v) {
};
IntervalSet.prototype.addSet = function(other) {
if (PORT_DEBUG) {
console.log("addSet")
}
if (other.intervals !== null) {
if (PORT_DEBUG) {
console.log(other.intervals.length)
}
for (var k = 0; k < other.intervals.length; k++) {
var i = other.intervals[k];
this.addInterval(new Interval(i.start, i.stop));
@ -105,11 +96,11 @@ IntervalSet.prototype.reduce = function(k) {
var r = this.intervals[k + 1];
// if r contained in l
if (l.stop >= r.stop) {
this.intervals.pop(k + 1); // what is intended here? pop takes no args
this.intervals.pop(k + 1);
this.reduce(k);
} else if (l.stop >= r.start) {
this.intervals[k] = new Interval(l.start, r.stop);
this.intervals.pop(k + 1); // what is intended here? pop takes no args
this.intervals.pop(k + 1);
}
}
};
@ -274,11 +265,6 @@ IntervalSet.prototype.toIndexString = function() {
IntervalSet.prototype.toTokenString = function(literalNames, symbolicNames) {
console.log(symbolicNames)
console.log(literalNames)
console.log(symbolicNames.length)
console.log(literalNames.length)
console.log(this.toString())
var names = [];
for (var i = 0; i < this.intervals.length; i++) {
var v = this.intervals[i];

View File

@ -110,19 +110,7 @@ LL1Analyzer.prototype.LOOK = function(s, stopState, ctx) {
var seeThruPreds = true; // ignore preds; get all lookahead
ctx = ctx || null;
var lookContext = ctx!==null ? predictionContextFromRuleContext(s.atn, ctx) : null;
if (PORT_DEBUG) {
console.log("DEBUG 5")
console.log(s.toString())
console.log(stopState)
console.log(lookContext)
console.log(r.toString())
console.log(seeThruPreds)
console.log("=====")
}
this._LOOK(s, stopState, lookContext, r, new Set(), new BitSet(), seeThruPreds, true);
if (PORT_DEBUG) {
console.log(r.toString())
}
return r;
};
@ -163,9 +151,6 @@ LL1Analyzer.prototype._LOOK = function(s, stopState , ctx, look, lookBusy, calle
}
lookBusy.add(c);
if (s === stopState) {
if (PORT_DEBUG) {
console.log("DEBUG 6")
}
if (ctx ===null) {
look.addOne(Token.EPSILON);
return;
@ -183,9 +168,6 @@ LL1Analyzer.prototype._LOOK = function(s, stopState , ctx, look, lookBusy, calle
return;
}
if (ctx !== PredictionContext.EMPTY) {
if (PORT_DEBUG) {
console.log("DEBUG 7")
}
// run thru all possible stack tops in ctx
for(var i=0; i<ctx.length; i++) {
var returnState = this.atn.states[ctx.getReturnState(i)];
@ -205,11 +187,6 @@ LL1Analyzer.prototype._LOOK = function(s, stopState , ctx, look, lookBusy, calle
for(var j=0; j<s.transitions.length; j++) {
var t = s.transitions[j];
if (t.constructor === RuleTransition) {
if (PORT_DEBUG) {
console.log("DEBUG 8")
}
if (calledRuleStack.contains(t.target.ruleIndex)) {
continue;
}
@ -220,38 +197,18 @@ LL1Analyzer.prototype._LOOK = function(s, stopState , ctx, look, lookBusy, calle
} finally {
calledRuleStack.remove(t.target.ruleIndex);
}
if (PORT_DEBUG) {
console.log(look.toString())
}
} else if (t instanceof AbstractPredicateTransition ) {
if (PORT_DEBUG) {
console.log("DEBUG 9")
}
if (seeThruPreds) {
this._LOOK(t.target, stopState, ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF);
} else {
look.addOne(LL1Analyzer.HIT_PRED);
}
} else if( t.isEpsilon) {
if (PORT_DEBUG) {
console.log("DEBUG 10")
}
this._LOOK(t.target, stopState, ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF);
} else if (t.constructor === WildcardTransition) {
if (PORT_DEBUG) {
console.log("DEBUG 11")
}
look.addRange( Token.MIN_USER_TOKEN_TYPE, this.atn.maxTokenType );
} else {
if (PORT_DEBUG) {
console.log("DEBUG 12")
}
var set = t.label;
if (PORT_DEBUG) {
console.log(set.toString())
}
if (set !== null) {
if (t instanceof NotSetTransition) {
set = set.complement(Token.MIN_USER_TOKEN_TYPE, this.atn.maxTokenType);

View File

@ -152,18 +152,9 @@ Lexer.prototype.nextToken = function() {
try {
ttype = this._interp.match(this._input, this._mode);
} catch (e) {
if (PORT_DEBUG) {
console.log("RecognitionException")
}
this.notifyListeners(e); // report error
this.recover(e);
}
if (PORT_DEBUG) {
console.log("ttype", ttype)
}
if (PORT_DEBUG) {
console.log("curType", this._type)
}
if (this._input.LA(1) === Token.EOF) {
this._hitEOF = true;
}
@ -171,28 +162,13 @@ Lexer.prototype.nextToken = function() {
this._type = ttype;
}
if (this._type === Lexer.SKIP) {
if (PORT_DEBUG) {
console.log("skip")
}
continueOuter = true;
break;
}
if (this._type !== Lexer.MORE) {
if (PORT_DEBUG) {
console.log("no more")
}
break;
}
if (PORT_DEBUG) {
console.log("lex inner loop")
}
}
if (PORT_DEBUG) {
console.log("lex loop")
}
if (continueOuter) {
continue;
}
@ -219,9 +195,6 @@ Lexer.prototype.skip = function() {
};
Lexer.prototype.more = function() {
if (PORT_DEBUG) {
console.log("more")
}
this._type = Lexer.MORE;
};
@ -284,9 +257,6 @@ Lexer.prototype.emitToken = function(token) {
// custom Token objects or provide a new factory.
// /
Lexer.prototype.emit = function() {
if (PORT_DEBUG) {
console.log("emit")
}
var t = this._factory.create(this._tokenFactorySourcePair, this._type,
this._text, this._channel, this._tokenStartCharIndex, this
.getCharIndex() - 1, this._tokenStartLine,
@ -296,9 +266,6 @@ Lexer.prototype.emit = function() {
};
Lexer.prototype.emitEOF = function() {
if (PORT_DEBUG) {
console.log("emitEOF")
}
var cpos = this.column;
var lpos = this.line;
var eof = this._factory.create(this._tokenFactorySourcePair, Token.EOF,

View File

@ -133,15 +133,7 @@ Parser.prototype.reset = function() {
// mismatched symbol
Parser.prototype.match = function(ttype) {
if (PORT_DEBUG) {
console.log("get current token")
}
var t = this.getCurrentToken();
if (PORT_DEBUG) {
console.log("TOKEN IS " + t.text)
}
if (t.type === ttype) {
this._errHandler.reportMatch(this);
this.consume();
@ -154,11 +146,6 @@ Parser.prototype.match = function(ttype) {
this._ctx.addErrorNode(t);
}
}
if (PORT_DEBUG) {
console.log("Match done")
}
return t;
};
// Match current input symbol as a wildcard. If the symbol type matches
@ -411,15 +398,8 @@ Parser.prototype.notifyErrorListeners = function(msg, offendingToken, err) {
Parser.prototype.consume = function() {
var o = this.getCurrentToken();
if (o.type !== Token.EOF) {
if (PORT_DEBUG) {
console.log("Consuming")
}
this.getInputStream().consume();
if (PORT_DEBUG) {
console.log("done consuming")
}
}
var hasListener = this._parseListeners !== null && this._parseListeners.length > 0;
if (this.buildParseTrees || hasListener) {
var node;

View File

@ -160,13 +160,9 @@ Recognizer.prototype.precpred = function(localctx , precedence) {
Object.defineProperty(Recognizer.prototype, "state", {
get : function() {
return this._stateNumber;
},
set : function(state) {
if (PORT_DEBUG) {
console.log("SETTING STATE " + state + " from " + this._stateNumber )
}
this._stateNumber = state;
}
});

View File

@ -75,15 +75,8 @@ ATN.prototype.nextTokensInContext = function(s, ctx) {
// rule.
ATN.prototype.nextTokensNoContext = function(s) {
if (s.nextTokenWithinRule !== null ) {
if (PORT_DEBUG) {
console.log("DEBUG A")
}
return s.nextTokenWithinRule;
}
if (PORT_DEBUG) {
console.log("DEBUG 2")
console.log(this.nextTokensInContext(s, null).toString())
}
s.nextTokenWithinRule = this.nextTokensInContext(s, null);
s.nextTokenWithinRule.readOnly = true;
return s.nextTokenWithinRule;

View File

@ -155,7 +155,7 @@ ATNConfigSet.prototype.getPredicates = function() {
for (var i = 0; i < this.configs.length; i++) {
var c = this.configs[i].semanticContext;
if (c !== SemanticContext.NONE) {
preds.push(c);
preds.push(c.semanticContext);
}
}
return preds;
@ -241,7 +241,7 @@ ATNConfigSet.prototype.containsFast = function(item) {
if (this.configLookup === null) {
throw "This method is not implemented for readonly sets.";
}
return this.configLookup.contains(item);
return this.configLookup.containsFast(item);
};
ATNConfigSet.prototype.clear = function() {

View File

@ -168,7 +168,7 @@ ATNDeserializer.prototype.checkUUID = function() {
var uuid = this.readUUID();
if (SUPPORTED_UUIDS.indexOf(uuid)<0) {
throw ("Could not deserialize ATN with UUID: " + uuid +
" (expected " + SERIALIZED_UUID + " or a legacy UUID).");
" (expected " + SERIALIZED_UUID + " or a legacy UUID).", uuid, SERIALIZED_UUID);
}
this.uuid = uuid;
};

View File

@ -78,7 +78,6 @@ SimState.prototype.reset = function() {
function LexerATNSimulator(recog, atn, decisionToDFA, sharedContextCache) {
ATNSimulator.call(this, atn, sharedContextCache);
this.decisionToDFA = decisionToDFA;
this.recog = recog;
// The current token's starting index into the character stream.
@ -102,8 +101,8 @@ function LexerATNSimulator(recog, atn, decisionToDFA, sharedContextCache) {
LexerATNSimulator.prototype = Object.create(ATNSimulator.prototype);
LexerATNSimulator.prototype.constructor = LexerATNSimulator;
LexerATNSimulator.prototype.debug = false;
LexerATNSimulator.prototype.dfa_debug = false;
LexerATNSimulator.debug = false;
LexerATNSimulator.dfa_debug = false;
LexerATNSimulator.MIN_DFA_EDGE = 0;
LexerATNSimulator.MAX_DFA_EDGE = 127; // forces unicode to stay in ATN
@ -118,11 +117,6 @@ LexerATNSimulator.prototype.copyState = function(simulator) {
};
LexerATNSimulator.prototype.match = function(input, mode) {
if (PORT_DEBUG) {
console.log("MATCH")
}
this.match_calls += 1;
this.mode = mode;
var mark = input.mark();
@ -131,34 +125,11 @@ LexerATNSimulator.prototype.match = function(input, mode) {
this.prevAccept.reset();
var dfa = this.decisionToDFA[mode];
if (dfa.s0 === null) {
if (PORT_DEBUG) {
console.log("matchATN")
}
return this.matchATN(input);
} else {
if (PORT_DEBUG) {
console.log("execATN")
if (this.decisionToDFA[mode] && this.decisionToDFA[mode].s0){
var s = "";
// for (var i= 0; i < this.decisionToDFA[mode].s0.edges.length; i++) {
// if (this.decisionToDFA[mode].s0.edges[i]){
// s += this.decisionToDFA[mode].s0.edges[i].toString();
// } else {
// s += "<nil>";
// }
// }
// s += "]";
console.log("mode", mode, this.decisionToDFA[mode].s0.edges.length)
}
}
var res = this.execATN(input, dfa.s0);
return res;
return this.execATN(input, dfa.s0);
}
} finally {
if (PORT_DEBUG) {
console.log("FINALLY")
}
input.release(mark);
}
};
@ -206,10 +177,6 @@ LexerATNSimulator.prototype.execATN = function(input, ds0) {
var t = input.LA(1);
var s = ds0; // s is current/from DFA state
if (PORT_DEBUG) {
console.log("enter execATN", t, s.edges ? s.edges.length : 0, input.index, input.size)
}
while (true) { // while more work
if (this.debug) {
console.log("execATN loop starting closure: " + s.configs);
@ -234,11 +201,7 @@ LexerATNSimulator.prototype.execATN = function(input, ds0) {
// that already has lots of edges out of it. e.g., .* in comments.
// print("Target for:" + str(s) + " and:" + str(t))
var target = this.getExistingTargetState(s, t);
if (PORT_DEBUG) {
console.log(t)
console.log(target != null)
}
// print("Existing:" + str(target))
if (target === null) {
target = this.computeTargetState(input, s, t);
// print("Computed:" + str(target))
@ -251,9 +214,6 @@ LexerATNSimulator.prototype.execATN = function(input, ds0) {
// position accurately reflect the state of the interpreter at the
// end of the token.
if (t !== Token.EOF) {
if (PORT_DEBUG) {
console.log("not eof", t, Token.EOF)
}
this.consume(input);
}
if (target.isAcceptState) {
@ -265,10 +225,6 @@ LexerATNSimulator.prototype.execATN = function(input, ds0) {
t = input.LA(1);
s = target; // flip; current DFA target becomes new src/from state
}
if (PORT_DEBUG) {
console.log("Done with execATN loop")
}
return this.failOrAccept(this.prevAccept, input, s.configs, t);
};
@ -290,9 +246,6 @@ LexerATNSimulator.prototype.getExistingTargetState = function(s, t) {
if(target===undefined) {
target = null;
}
if (PORT_DEBUG) {
console.log("len edges", s.edges.length, t, t - LexerATNSimulator.MIN_DFA_EDGE)
}
if (this.debug && target !== null) {
console.log("reuse state " + s.stateNumber + " edge to " + target.stateNumber);
}
@ -329,19 +282,10 @@ LexerATNSimulator.prototype.computeTargetState = function(input, s, t) {
};
LexerATNSimulator.prototype.failOrAccept = function(prevAccept, input, reach, t) {
if (this.prevAccept.dfaState !== null) {
var lexerActionExecutor = prevAccept.dfaState.lexerActionExecutor;
if (PORT_DEBUG) {
console.log(prevAccept.dfaState.toString())
}
this.accept(input, lexerActionExecutor, this.startIndex,
prevAccept.index, prevAccept.line, prevAccept.column);
if (PORT_DEBUG) {
console.log(prevAccept.dfaState.prediction)
}
return prevAccept.dfaState.prediction;
} else {
// if no accept and EOF is first char, return EOF
@ -360,12 +304,6 @@ LexerATNSimulator.prototype.getReachableConfigSet = function(input, closure,
// this is used to skip processing for configs which have a lower priority
// than a config that already reached an accept state for the same rule
var skipAlt = ATN.INVALID_ALT_NUMBER;
if (PORT_DEBUG) {
console.log("getReachableConfigSet")
console.log("CLOSURE SIZE" + closure.items.length)
}
for (var i = 0; i < closure.items.length; i++) {
var cfg = closure.items[i];
var currentAltReachedAcceptState = (cfg.alt === skipAlt);
@ -373,7 +311,7 @@ LexerATNSimulator.prototype.getReachableConfigSet = function(input, closure,
continue;
}
if (this.debug) {
console.log("testing %s at %s", this.getTokenName(t), cfg
console.log("testing %s at %s\n", this.getTokenName(t), cfg
.toString(this.recog, true));
}
for (var j = 0; j < cfg.state.transitions.length; j++) {
@ -397,9 +335,10 @@ LexerATNSimulator.prototype.getReachableConfigSet = function(input, closure,
}
};
LexerATNSimulator.prototype.accept = function(input, lexerActionExecutor, startIndex, index, line, charPos) {
LexerATNSimulator.prototype.accept = function(input, lexerActionExecutor,
startIndex, index, line, charPos) {
if (this.debug) {
console.log("ACTION %s", lexerActionExecutor);
console.log("ACTION %s\n", lexerActionExecutor);
}
// seek to after last char in token
input.seek(index);
@ -419,9 +358,6 @@ LexerATNSimulator.prototype.getReachableTarget = function(trans, t) {
};
LexerATNSimulator.prototype.computeStartState = function(input, p) {
if (PORT_DEBUG) console.log("Num transitions", p.transitions.length)
var initialContext = PredictionContext.EMPTY;
var configs = new OrderedATNConfigSet();
for (var i = 0; i < p.transitions.length; i++) {
@ -429,7 +365,6 @@ LexerATNSimulator.prototype.computeStartState = function(input, p) {
var cfg = new LexerATNConfig({state:target, alt:i+1, context:initialContext}, null);
this.closure(input, cfg, configs, false, false, false);
}
return configs;
};
@ -447,17 +382,14 @@ LexerATNSimulator.prototype.closure = function(input, config, configs,
if (this.debug) {
console.log("closure(" + config.toString(this.recog, true) + ")");
}
if (config.state instanceof RuleStopState) {
if (this.debug) {
if (this.recog !== null && this.recog.getRuleNames) {
console.log("closure at %s rule stop %s", this.recog.getRuleNames()[config.state.ruleIndex], config);
if (this.recog !== null) {
console.log("closure at %s rule stop %s\n", this.recog.getRuleNames()[config.state.ruleIndex], config);
} else {
console.log("closure at rule stop %s", config);
console.log("closure at rule stop %s\n", config);
}
}
if (config.context === null || config.context.hasEmptyPath()) {
if (config.context === null || config.context.isEmpty()) {
configs.add(config);
@ -602,9 +534,6 @@ LexerATNSimulator.prototype.evaluatePredicate = function(input, ruleIndex,
var index = input.index;
var marker = input.mark();
try {
if (PORT_DEBUG) {
console.log("evalPred")
}
this.consume(input);
return this.recog.sempred(null, ruleIndex, predIndex);
} finally {
@ -711,9 +640,6 @@ LexerATNSimulator.prototype.getText = function(input) {
};
LexerATNSimulator.prototype.consume = function(input) {
if (PORT_DEBUG) {
console.log("consume", input.index, input.size);
}
var curChar = input.LA(1);
if (curChar === "\n".charCodeAt(0)) {
this.line += 1;
@ -725,9 +651,6 @@ LexerATNSimulator.prototype.consume = function(input) {
};
LexerATNSimulator.prototype.getTokenName = function(tt) {
if (PORT_DEBUG) {
console.log(tt);
}
if (tt === -1) {
return "EOF";
} else {

View File

@ -134,34 +134,20 @@ LexerActionExecutor.prototype.fixOffsetBeforeMatch = function(offset) {
// of the token.
// /
LexerActionExecutor.prototype.execute = function(lexer, input, startIndex) {
if (PORT_DEBUG) {
console.log("execute");
console.log("len(lexerActions)",this.lexerActions.length);
}
var requiresSeek = false;
var stopIndex = input.index;
try {
for (var i = 0; i < this.lexerActions.length; i++) {
var lexerAction = this.lexerActions[i];
if (lexerAction instanceof LexerIndexedCustomAction) {
if (PORT_DEBUG) {
console.log("LexerIndexedCustomAction");
}
var offset = lexerAction.offset;
input.seek(startIndex + offset);
lexerAction = lexerAction.action;
requiresSeek = (startIndex + offset) !== stopIndex;
} else if (lexerAction.isPositionDependent) {
if (PORT_DEBUG) {
console.log("posDep");
}
input.seek(stopIndex);
requiresSeek = false;
}
if (PORT_DEBUG) {
console.log("exec");
console.log(lexerAction.toString());
}
lexerAction.execute(lexer);
}
} finally {

View File

@ -318,15 +318,11 @@ ParserATNSimulator.prototype.debug_list_atn_decisions = false;
ParserATNSimulator.prototype.dfa_debug = false;
ParserATNSimulator.prototype.retry_debug = false;
ParserATNSimulator.prototype.reset = function() {
};
ParserATNSimulator.prototype.adaptivePredict = function(input, decision, outerContext) {
if (PORT_DEBUG) {
console.log("adaptive predict")
}
if (this.debug || this.debug_list_atn_decisions) {
console.log("adaptivePredict decision " + decision +
" exec LA(1)==" + this.getLookaheadName(input) +
@ -383,13 +379,7 @@ ParserATNSimulator.prototype.adaptivePredict = function(input, decision, outerCo
// appropriate start state for the precedence level rather
// than simply setting DFA.s0.
//
if (PORT_DEBUG){
console.log("precfilter", s0_closure.toString())
}
s0_closure = this.applyPrecedenceFilter(s0_closure);
if (PORT_DEBUG){
console.log("precfilter", s0_closure.toString())
}
s0 = this.addDFAState(dfa, new DFAState(null, s0_closure));
dfa.setPrecedenceStartState(this.parser.getPrecedence(), s0);
} else {
@ -908,11 +898,6 @@ ParserATNSimulator.prototype.removeAllConfigsNotInRuleStopState = function(confi
};
ParserATNSimulator.prototype.computeStartState = function(p, ctx, fullCtx) {
if (PORT_DEBUG){
console.log("computeStartState")
}
// always at least the implicit call to start rule
var initialContext = predictionContextFromRuleContext(this.atn, ctx);
var configs = new ATNConfigSet(fullCtx);
@ -985,20 +970,10 @@ ParserATNSimulator.prototype.applyPrecedenceFilter = function(configs) {
var config;
var statesFromAlt1 = [];
var configSet = new ATNConfigSet(configs.fullCtx);
if (PORT_DEBUG) {
console.log("len", configs.items.length)
for(var i=0; i<configs.items.length; i++) {
config = configs.items[i];
console.log(config.precedenceFilterSuppressed)
}
}
for(var i=0; i<configs.items.length; i++) {
config = configs.items[i];
// handle alt 1 first
if (config.alt !== 1) {
if (PORT_DEBUG) {
console.log("getalt1")
}
continue;
}
var updatedContext = config.semanticContext.evalPrecedence(this.parser, this._outerContext);
@ -1008,14 +983,8 @@ ParserATNSimulator.prototype.applyPrecedenceFilter = function(configs) {
}
statesFromAlt1[config.state.stateNumber] = config.context;
if (updatedContext !== config.semanticContext) {
if (PORT_DEBUG) {
console.log("add 1")
}
configSet.add(new ATNConfig({semanticContext:updatedContext}, config), this.mergeCache);
} else {
if (PORT_DEBUG) {
console.log("add 2")
}
configSet.add(config, this.mergeCache);
}
}
@ -1023,27 +992,18 @@ ParserATNSimulator.prototype.applyPrecedenceFilter = function(configs) {
config = configs.items[i];
if (config.alt === 1) {
// already handled
if (PORT_DEBUG) {
console.log("getalt2")
}
continue;
}
// In the future, this elimination step could be updated to also
// filter the prediction context for alternatives predicting alt>1
// (basically a graph subtraction algorithm).
if (!config.precedenceFilterSuppressed) {
if (PORT_DEBUG) {
console.log("!precedenceFilterSuppressed")
}
var context = statesFromAlt1[config.state.stateNumber] || null;
if (context!==null && context.equals(config.context)) {
// eliminated
continue;
}
}
if (PORT_DEBUG) {
console.log("add 3", config.precedenceFilterSuppressed)
}
configSet.add(config, this.mergeCache);
}
return configSet;
@ -1291,9 +1251,6 @@ ParserATNSimulator.prototype.closureCheckingStopState = function(config, configs
} else {
// we have no context info, just chase follow links (if greedy)
if (this.debug) {
if (PORT_DEBUG) {
console.log("DEBUG B")
}
console.log("FALLING off rule " + this.getRuleName(config.state.ruleIndex));
}
this.closure_(config, configs, closureBusy, collectPredicates,
@ -1319,9 +1276,6 @@ ParserATNSimulator.prototype.closureCheckingStopState = function(config, configs
} else {
// else if we have no context info, just chase follow links (if greedy)
if (this.debug) {
if (PORT_DEBUG) {
console.log("DEBUG 2")
}
console.log("FALLING off rule " + this.getRuleName(config.state.ruleIndex));
}
}
@ -1331,9 +1285,6 @@ ParserATNSimulator.prototype.closureCheckingStopState = function(config, configs
// Do the actual work of walking epsilon edges//
ParserATNSimulator.prototype.closure_ = function(config, configs, closureBusy, collectPredicates, fullCtx, depth, treatEofAsEpsilon) {
if (PORT_DEBUG) {
console.log("closure_")
}
var p = config.state;
// optimization
if (! p.epsilonOnlyTransitions) {
@ -1346,20 +1297,12 @@ ParserATNSimulator.prototype.closure_ = function(config, configs, closureBusy, c
var continueCollecting = collectPredicates && !(t instanceof ActionTransition);
var c = this.getEpsilonTarget(config, t, continueCollecting, depth === 0, fullCtx, treatEofAsEpsilon);
if (c!==null) {
if (PORT_DEBUG) {
console.log("DEBUG 1 ok")
}
if (!t.isEpsilon && closureBusy.add(c)!==c){
// avoid infinite recursion for EOF* and EOF+
continue;
}
var newDepth = depth;
if ( config.state instanceof RuleStopState) {
if (PORT_DEBUG) {
console.log("DEBUG 2")
console.log(closureBusy.toString())
}
// target fell off end of rule; mark resulting c as having dipped into outer context
// We can't get here if incoming config was rule stop and we had context
// track how far we dip into outer context. Might
@ -1367,26 +1310,12 @@ ParserATNSimulator.prototype.closure_ = function(config, configs, closureBusy, c
// preds if this is > 0.
if (closureBusy.add(c)!==c) {
if (PORT_DEBUG) {
console.log("DEBUG 3", i, p.transitions.length)
}
// avoid infinite recursion for right-recursive rules
continue;
} else {
if (PORT_DEBUG) {
console.log(c.toString())
console.log(closureBusy.toString())
}
}
if (this._dfa !== null && this._dfa.precedenceDfa) {
if (PORT_DEBUG) {
console.log("DEBUG 4")
}
if (t.outermostPrecedenceReturn === this._dfa.atnStartState.ruleIndex) {
if (PORT_DEBUG) {
console.log("precedenceFilterSuppressed")
}
c.precedenceFilterSuppressed = true;
}
}
@ -1395,7 +1324,6 @@ ParserATNSimulator.prototype.closure_ = function(config, configs, closureBusy, c
configs.dipsIntoOuterContext = true; // TODO: can remove? only care when we add to set per middle of this method
newDepth -= 1;
if (this.debug) {
// console.log((new Error()).stack)
console.log("dips into outer ctx: " + c);
}
} else if (t instanceof RuleTransition) {
@ -1404,16 +1332,9 @@ ParserATNSimulator.prototype.closure_ = function(config, configs, closureBusy, c
newDepth += 1;
}
}
if (PORT_DEBUG) {
console.log("computeCheckingStopState")
}
this.closureCheckingStopState(c, configs, closureBusy, continueCollecting, fullCtx, newDepth, treatEofAsEpsilon);
}
}
if (PORT_DEBUG) {
console.log("closure_ done")
}
};
ParserATNSimulator.prototype.getRuleName = function( index) {
@ -1591,18 +1512,13 @@ ParserATNSimulator.prototype.getConflictingAltsOrUniqueAlt = function(configs) {
};
ParserATNSimulator.prototype.getTokenName = function( t) {
if (PORT_DEBUG) {
console.log("Get token name")
}
if (t===Token.EOF) {
return "EOF";
}
if( this.parser!==null && this.parser.literalNames!==null) {
if (t >= this.parser.literalNames.length) {
console.log("" + t + " ttype out of range: " + this.parser.literalNames);
// console.log(this.parser.getInputStream().getTokens());
console.log("" + this.parser.getInputStream().getTokens());
} else {
return this.parser.literalNames[t] + "<" + t + ">";
}
@ -1613,14 +1529,14 @@ ParserATNSimulator.prototype.getTokenName = function( t) {
ParserATNSimulator.prototype.getLookaheadName = function(input) {
return this.getTokenName(input.LA(1));
};
``
// Used for debugging in adaptivePredict around execATN but I cut
// it out for clarity now that alg. works well. We can leave this
// "dead" code for a bit.
//
ParserATNSimulator.prototype.dumpDeadEndConfigs = function(nvae) {
console.log("dead end configs: ");
var decs = nvae.deadEndConfigs;
var decs = nvae.getDeadEndConfigs();
for(var i=0; i<decs.length; i++) {
var c = decs[i];
var trans = "no edges";

View File

@ -352,7 +352,7 @@ function OR(a, b) {
OR.prototype = Object.create(SemanticContext.prototype);
OR.prototype.constructor = OR;
OR.prototype.equals = function(other) {
OR.prototype.constructor = function(other) {
if (this === other) {
return true;
} else if (!(other instanceof OR)) {
@ -402,12 +402,8 @@ OR.prototype.evalPrecedence = function(parser, outerContext) {
return null;
}
var result = null;
operands.forEach(function(o) {
if (result === null) {
result = o
} else {
result = SemanticContext.orContext(result, o);
}
operands.map(function(o) {
return result === null ? o : SemanticContext.orContext(result, o);
});
return result;
};

View File

@ -74,7 +74,7 @@ ConsoleErrorListener.INSTANCE = new ConsoleErrorListener();
// </pre>
//
ConsoleErrorListener.prototype.syntaxError = function(recognizer, offendingSymbol, line, column, msg, e) {
console.log("line " + line + ":" + column + " " + msg);
console.error("line " + line + ":" + column + " " + msg);
};
function ProxyErrorListener(delegates) {

View File

@ -244,37 +244,16 @@ DefaultErrorStrategy.prototype.sync = function(recognizer) {
if (this.inErrorRecoveryMode(recognizer)) {
return;
}
if (PORT_DEBUG) {
console.log("STATE" + recognizer.state)
}
var s = recognizer._interp.atn.states[recognizer.state];
var la = recognizer.getTokenStream().LA(1);
if (PORT_DEBUG) {
console.log("LA" + la);
}
// try cheaper subset first; might get lucky. seems to shave a wee bit off
if (la===Token.EOF || recognizer.atn.nextTokens(s).contains(la)) {
if (PORT_DEBUG) {
console.log("OK1")
}
return;
}
// Return but don't end recovery. only do that upon valid token match
if(recognizer.isExpectedToken(la)) {
if (PORT_DEBUG) {
console.log("OK2")
}
return;
}
if (PORT_DEBUG) {
console.log("LA" + la)
}
switch (s.stateType) {
case ATNState.BLOCK_START:
case ATNState.STAR_BLOCK_START:
@ -582,7 +561,6 @@ DefaultErrorStrategy.prototype.getMissingSymbol = function(recognizer) {
if (current.type===Token.EOF && lookback !== null) {
current = lookback;
}
return recognizer.getTokenFactory().create(current.source,
expectedTokenType, tokenText, Token.DEFAULT_CHANNEL,
-1, -1, current.line, current.column);
@ -802,4 +780,4 @@ BailErrorStrategy.prototype.sync = function(recognizer) {
};
exports.BailErrorStrategy = BailErrorStrategy;
exports.DefaultErrorStrategy = DefaultErrorStrategy;
exports.DefaultErrorStrategy = DefaultErrorStrategy;

View File

@ -102,7 +102,7 @@ LexerNoViableAltException.prototype.constructor = LexerNoViableAltException;
LexerNoViableAltException.prototype.toString = function() {
var symbol = "";
if (this.startIndex >= 0 && this.startIndex < this.input.size) {
symbol = this.input.getText(this.startIndex,this.startIndex);
symbol = this.input.getText((this.startIndex,this.startIndex));
}
return "LexerNoViableAltException" + symbol;
};

View File

@ -1,5 +1,3 @@
PORT_DEBUG = false; // TODO(pboyer) remove
exports.atn = require('./atn/index');
exports.dfa = require('./dfa/index');
exports.tree = require('./tree/index');

View File

@ -2,7 +2,7 @@ fileHeader(grammarFileName, ANTLRVersion) ::= <<
// Generated from <grammarFileName; format="java-escape"> by ANTLR <ANTLRVersion>.
>>
ParserFile(file, parser, namedActions) ::= <<
ParserFile(file, parser, namedActions, contextSuperClass) ::= <<
<fileHeader(file.grammarFileName, file.ANTLRVersion)>
<if(file.genPackage)>
@ -1042,12 +1042,12 @@ Set<a.name; format="cap">(<a.type;format="lower">)}; separator="\n\n">
<endif>
// get<struct.name> differentiates from other interfaces.
get<struct.name>()
// Is<struct.name> differentiates from other interfaces.
Is<struct.name>()
}
type <struct.name> struct {
*antlr.BaseParserRuleContext
<if(contextSuperClass)>*<contextSuperClass><else>*antlr.BaseParserRuleContext<endif>
parser antlr.Parser
<if(attrs)>
<attrs; separator="\n">
@ -1056,17 +1056,17 @@ type <struct.name> struct {
func NewEmpty<struct.name>() *<struct.name> {
var p = new(<struct.name>)
p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(nil, -1)
p.<if(contextSuperClass)><contextSuperClass><else>BaseParserRuleContext<endif> = <if(contextSuperClass)>New<contextSuperClass><else>antlr.NewBaseParserRuleContext<endif>(nil, -1)
p.RuleIndex = <parser.name>RULE_<struct.derivedFromName>
return p
}
func (*<struct.name>) get<struct.name>() {}
func (*<struct.name>) Is<struct.name>() {}
func New<struct.name>(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int<struct.ctorAttrs:{a | , <a.name> <a.type;format="lower">}>) *<struct.name> {
var p = new(<struct.name>)
p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(parent, invokingState)
p.<if(contextSuperClass)><contextSuperClass><else>BaseParserRuleContext<endif> = <if(contextSuperClass)>New<contextSuperClass><else>antlr.NewBaseParserRuleContext<endif>(parent, invokingState)
p.parser = parser
p.RuleIndex = <parser.name>RULE_<struct.derivedFromName>
@ -1074,7 +1074,6 @@ func New<struct.name>(parser antlr.Parser, parent antlr.ParserRuleContext, invok
<if(struct.ctorAttrs)>
<struct.ctorAttrs:{a | p.<a.name> = <a.name>}; separator="\n">
<endif>
return p
}
@ -1145,7 +1144,7 @@ func (s *<struct.name>) GetParser() antlr.Parser { return s.parser }
<getters:{g | func (s *<struct.name>) <g>}; separator="\n\n">
<endif>
<if(struct.provideCopyFrom)><! don't need unless we have subclasses !>
<if(struct.provideCopyFrom)>
func (s *<struct.name>) CopyFrom(ctx *<struct.name>) {
s.BaseParserRuleContext.CopyFrom(ctx.BaseParserRuleContext)
@ -1156,6 +1155,11 @@ func (s *<struct.name>) CopyFrom(ctx *<struct.name>) {
func (s *<struct.name>) GetRuleContext() antlr.RuleContext {
return s
}
func (s *<struct.name>) ToStringTree(ruleNames []string, recog antlr.Recognizer) string {
return antlr.TreesStringTree(s, ruleNames, recog)
}
<if(dispatchMethods)>
<dispatchMethods; separator="\n\n">