forked from jasder/antlr
Fix remaining tests (#64)
* Fix pred context typo * Adjust array lengths * PositionAdjustLexer test fix first steps * allow virtual method calls from Lexer * Hopefully fix issues with dup namedActions * Make parser members * Fix testParserProperty * Fix LeftRecursion test failures
This commit is contained in:
parent
145bfca692
commit
60662c4b70
|
@ -231,7 +231,7 @@ Production(p) ::= <%<p; format="cap">%>
|
|||
Result(r) ::= <%Get<r; format="cap">()%>
|
||||
|
||||
ParserPropertyMember() ::= <<
|
||||
@members {
|
||||
@parser::members {
|
||||
func Property() bool {
|
||||
return true
|
||||
}
|
||||
|
@ -239,53 +239,46 @@ func Property() bool {
|
|||
>>
|
||||
|
||||
PositionAdjustingLexer() ::= <<
|
||||
package antlrtest
|
||||
|
||||
type PositionAdjustingLexer struct {
|
||||
antlr.*BaseLexer
|
||||
}
|
||||
|
||||
func NewPositionAdjustingLexer(input antlr.CharStream) *PositionAdjustingLexer {
|
||||
return &PositionAdjustingLexer{BaseLexer: antlr.NewBaseLexer(input)}
|
||||
}
|
||||
|
||||
func (p *PositionAdjustingLexer) NextToken() *Token {
|
||||
var _, ok = p._interp.(*PositionAdjustingLexerATNSimulator)
|
||||
|
||||
if !ok {
|
||||
p._interp = NewPositionAdjustingLexerATNSimulator(p, _ATN, _decisionToDFA, _sharedContextCache)
|
||||
func (p *PositionAdjustingLexer) NextToken() antlr.Token {
|
||||
if _, ok := p.Interpreter.(*PositionAdjustingLexerATNSimulator); !ok {
|
||||
p.Interpreter = NewPositionAdjustingLexerATNSimulator(p, lexerAtn, p.Interpreter.DecisionToDFA(), p.Interpreter.SharedContextCache())
|
||||
p.Virt = p
|
||||
}
|
||||
|
||||
return p.BaseLexer.NextToken()
|
||||
}
|
||||
|
||||
func (p *PositionAdjustingLexer) Emit() *Token {
|
||||
switch _type {
|
||||
case TOKENS:
|
||||
func (p *PositionAdjustingLexer) Emit() antlr.Token {
|
||||
switch p.GetType() {
|
||||
case PositionAdjustingLexerTOKENS:
|
||||
p.HandleAcceptPositionForKeyword("tokens")
|
||||
|
||||
case LABEL:
|
||||
case PositionAdjustingLexerLABEL:
|
||||
p.HandleAcceptPositionForIdentifier()
|
||||
}
|
||||
|
||||
return p.BaseLexer.Emit()
|
||||
}
|
||||
|
||||
func isIdentifierChar(c rune) bool {
|
||||
return unicode.IsLetter(c) || unicode.IsDigit(c) || c == '_'
|
||||
}
|
||||
|
||||
func (p *PositionAdjustingLexer) HandleAcceptPositionForIdentifier() bool {
|
||||
var tokenText = GetText()
|
||||
var tokenText = p.GetText()
|
||||
var identifierLength int
|
||||
|
||||
for identifierLength \< len(tokenText) && isIdentifierChar(tokenText.charAt(identifierLength)) {
|
||||
for identifierLength \< len(tokenText) && isIdentifierChar([]rune(tokenText)[identifierLength]) {
|
||||
identifierLength++
|
||||
}
|
||||
|
||||
if GetInputStream().Index() \<= _tokenStartCharIndex + identifierLength {
|
||||
if p.GetInputStream().Index() \<= p.TokenStartCharIndex + identifierLength {
|
||||
return false
|
||||
}
|
||||
|
||||
var offset = identifierLength - 1
|
||||
|
||||
p.GetInterpreter().ResetAcceptPosition(p.GetInputStream(), p.TokenStartCharIndex + offset, p.TokenStartLine, p.TokenStartCharPositionInLine + offset)
|
||||
p.GetInterpreter().(*PositionAdjustingLexerATNSimulator).ResetAcceptPosition(p.GetInputStream(), p.TokenStartCharIndex + offset, p.TokenStartLine, p.TokenStartColumn + offset)
|
||||
|
||||
return true
|
||||
}
|
||||
|
@ -297,31 +290,23 @@ func (p *PositionAdjustingLexer) HandleAcceptPositionForKeyword(keyword string)
|
|||
|
||||
var offset = len(keyword) - 1
|
||||
|
||||
p.GetInterpreter().ResetAcceptPosition(p.GetInputStream(), p.TokenStartCharIndex + offset, p.TokenStartLine, p.TokenStartCharPositionInLine + offset)
|
||||
p.GetInterpreter().(*PositionAdjustingLexerATNSimulator).ResetAcceptPosition(p.GetInputStream(), p.TokenStartCharIndex + offset, p.TokenStartLine, p.TokenStartColumn + offset)
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
func (s *PositionAdjustingLexer) GetInterpreter() *LexerATNSimulator {
|
||||
return s // TODO: return super.(*PositionAdjustingLexerATNSimulator).GetInterpreter()
|
||||
}
|
||||
|
||||
func isIdentifierChar(c rune) bool {
|
||||
return Character.isLetterOrDigit(c) || c == '_'
|
||||
}
|
||||
|
||||
type PositionAdjustingLexerATNSimulator struct {
|
||||
*antlr.LexerATNSimulator
|
||||
}
|
||||
|
||||
func NewPositionAdjustingLexerATNSimulator(recog antlr.Lexer, atn *antlr.ATN, decisionToDFA []*antlr.DFA, sharedContextCache *PredictionContextCache) *PositionAdjustingLexerATNSimulator {
|
||||
func NewPositionAdjustingLexerATNSimulator(recog antlr.Lexer, atn *antlr.ATN, decisionToDFA []*antlr.DFA, sharedContextCache *antlr.PredictionContextCache) *PositionAdjustingLexerATNSimulator {
|
||||
return &PositionAdjustingLexerATNSimulator{
|
||||
LexerATNSimulator: antlr.NewLexerATNSimulator(recog, atn, decisionToDFA, sharedContextCache),
|
||||
}
|
||||
}
|
||||
|
||||
func (p *NewPositionAdjustingLexerATNSimulator) ResetAcceptPosition(input CharStream, index, line, charPositionInLine int) {
|
||||
p.GetInputStream().Seek(index)
|
||||
func (p *PositionAdjustingLexerATNSimulator) ResetAcceptPosition(input antlr.CharStream, index, line, charPositionInLine int) {
|
||||
input.Seek(index)
|
||||
p.Line = line
|
||||
p.CharPositionInLine = charPositionInLine
|
||||
p.Consume(input)
|
||||
|
|
|
@ -24,7 +24,7 @@ Errors() ::= ""
|
|||
|
||||
grammar(grammarName) ::= <<
|
||||
grammar <grammarName>;
|
||||
@members {<InitIntMember("i","0")>}
|
||||
@parser::members {<InitIntMember("i","0")>}
|
||||
s : a+ ;
|
||||
a : {<SetMember("i","1")>} ID {<MemberEquals("i","1")>}? {<writeln("\"alt 1\"")>}
|
||||
| {<SetMember("i","2")>} ID {<MemberEquals("i","2")>}? {<writeln("\"alt 2\"")>}
|
||||
|
|
|
@ -27,7 +27,7 @@ Errors() ::= ""
|
|||
|
||||
grammar(grammarName) ::= <<
|
||||
grammar <grammarName>;
|
||||
@members {
|
||||
@parser::members {
|
||||
<Declare_pred()>
|
||||
}
|
||||
s : e {} {<True():Invoke_pred()>}? {<writeln("\"parse\"")>} '!' ;
|
||||
|
|
|
@ -25,7 +25,7 @@ Errors() ::= ""
|
|||
|
||||
grammar(grammarName) ::= <<
|
||||
grammar <grammarName>;
|
||||
@members {
|
||||
@parser::members {
|
||||
<Declare_pred()>
|
||||
}
|
||||
s : a[99] ;
|
||||
|
|
|
@ -12,7 +12,7 @@ Rule() ::= "primary"
|
|||
|
||||
grammar(grammarName) ::= <<
|
||||
grammar <grammarName>;
|
||||
@members {<InitBooleanMember("enumKeyword",True())>}
|
||||
@parser::members {<InitBooleanMember("enumKeyword",True())>}
|
||||
primary
|
||||
: ID {<writeln("\"ID \"+$ID.text")>}
|
||||
| {<GetMember("enumKeyword"):Not()>}? 'enum' {<writeln("\"enum\"")>}
|
||||
|
|
|
@ -28,7 +28,7 @@ Errors() ::= ""
|
|||
|
||||
grammar(grammarName) ::= <<
|
||||
grammar <grammarName>;
|
||||
@members {<InitIntMember("i","0")>}
|
||||
@parser::members {<InitIntMember("i","0")>}
|
||||
s : a[2] a[1];
|
||||
a[int i]
|
||||
: {<ValEquals("$i","1")>}? ID {<writeln("\"alt 1\"")>}
|
||||
|
|
|
@ -30,7 +30,7 @@ Errors() ::= ""
|
|||
|
||||
grammar(grammarName) ::= <<
|
||||
grammar <grammarName>;
|
||||
@members {<InitIntMember("i","0")>}
|
||||
@parser::members {<InitIntMember("i","0")>}
|
||||
s : a[2] a[1];
|
||||
a[int i]
|
||||
: {<ValEquals("$i","1")>}? ID
|
||||
|
|
|
@ -26,7 +26,7 @@ Errors() ::= ""
|
|||
|
||||
grammar(grammarName) ::= <<
|
||||
grammar <grammarName>;
|
||||
@members {
|
||||
@parser::members {
|
||||
<Declare_pred()>
|
||||
}
|
||||
s : e {<True():Invoke_pred()>}? {<writeln("\"parse\"")>} '!' ;
|
||||
|
|
|
@ -31,7 +31,7 @@ Errors() ::= ""
|
|||
|
||||
grammar(grammarName) ::= <<
|
||||
grammar <grammarName>;
|
||||
@members {<InitIntMember("i","0")>}
|
||||
@parser::members {<InitIntMember("i","0")>}
|
||||
s : ({<AddMember("i","1")>
|
||||
<PlusMember("\"i=\"","i"):writeln()>} a)+ ;
|
||||
a : {<ModMemberEquals("i","2","0")>}? ID {<writeln("\"alt 1\"")>}
|
||||
|
|
|
@ -69,9 +69,9 @@ public class TestSemPredEvalParser extends BaseTest {
|
|||
@Test
|
||||
public void testActionHidesPreds() throws Exception {
|
||||
mkdir(tmpdir);
|
||||
StringBuilder grammarBuilder = new StringBuilder(237);
|
||||
StringBuilder grammarBuilder = new StringBuilder(245);
|
||||
grammarBuilder.append("grammar T;\n");
|
||||
grammarBuilder.append("@members {int i = 0;}\n");
|
||||
grammarBuilder.append("@parser::members {int i = 0;}\n");
|
||||
grammarBuilder.append("s : a+ ;\n");
|
||||
grammarBuilder.append("a : {this.i = 1;} ID {this.i == 1}? {Console.WriteLine(\"alt 1\");}\n");
|
||||
grammarBuilder.append(" | {this.i = 2;} ID {this.i == 2}? {Console.WriteLine(\"alt 2\");}\n");
|
||||
|
@ -93,9 +93,9 @@ public class TestSemPredEvalParser extends BaseTest {
|
|||
@Test
|
||||
public void testActionsHidePredsInGlobalFOLLOW() throws Exception {
|
||||
mkdir(tmpdir);
|
||||
StringBuilder grammarBuilder = new StringBuilder(304);
|
||||
StringBuilder grammarBuilder = new StringBuilder(312);
|
||||
grammarBuilder.append("grammar T;\n");
|
||||
grammarBuilder.append("@members {\n");
|
||||
grammarBuilder.append("@parser::members {\n");
|
||||
grammarBuilder.append("bool pred(bool v) {\n");
|
||||
grammarBuilder.append(" Console.WriteLine(\"eval=\"+v.ToString().ToLower());\n");
|
||||
grammarBuilder.append(" return v;\n");
|
||||
|
@ -137,9 +137,9 @@ public class TestSemPredEvalParser extends BaseTest {
|
|||
@Test
|
||||
public void testDepedentPredsInGlobalFOLLOW() throws Exception {
|
||||
mkdir(tmpdir);
|
||||
StringBuilder grammarBuilder = new StringBuilder(327);
|
||||
StringBuilder grammarBuilder = new StringBuilder(335);
|
||||
grammarBuilder.append("grammar T;\n");
|
||||
grammarBuilder.append("@members {\n");
|
||||
grammarBuilder.append("@parser::members {\n");
|
||||
grammarBuilder.append("bool pred(bool v) {\n");
|
||||
grammarBuilder.append(" Console.WriteLine(\"eval=\"+v.ToString().ToLower());\n");
|
||||
grammarBuilder.append(" return v;\n");
|
||||
|
@ -325,9 +325,9 @@ public class TestSemPredEvalParser extends BaseTest {
|
|||
@Test
|
||||
public void testPredTestedEvenWhenUnAmbig_1() throws Exception {
|
||||
mkdir(tmpdir);
|
||||
StringBuilder grammarBuilder = new StringBuilder(213);
|
||||
StringBuilder grammarBuilder = new StringBuilder(221);
|
||||
grammarBuilder.append("grammar T;\n");
|
||||
grammarBuilder.append("@members {bool enumKeyword = true;}\n");
|
||||
grammarBuilder.append("@parser::members {bool enumKeyword = true;}\n");
|
||||
grammarBuilder.append("primary\n");
|
||||
grammarBuilder.append(" : ID {Console.WriteLine(\"ID \"+$ID.text);}\n");
|
||||
grammarBuilder.append(" | {!this.enumKeyword}? 'enum' {Console.WriteLine(\"enum\");}\n");
|
||||
|
@ -345,9 +345,9 @@ public class TestSemPredEvalParser extends BaseTest {
|
|||
@Test
|
||||
public void testPredTestedEvenWhenUnAmbig_2() throws Exception {
|
||||
mkdir(tmpdir);
|
||||
StringBuilder grammarBuilder = new StringBuilder(213);
|
||||
StringBuilder grammarBuilder = new StringBuilder(221);
|
||||
grammarBuilder.append("grammar T;\n");
|
||||
grammarBuilder.append("@members {bool enumKeyword = true;}\n");
|
||||
grammarBuilder.append("@parser::members {bool enumKeyword = true;}\n");
|
||||
grammarBuilder.append("primary\n");
|
||||
grammarBuilder.append(" : ID {Console.WriteLine(\"ID \"+$ID.text);}\n");
|
||||
grammarBuilder.append(" | {!this.enumKeyword}? 'enum' {Console.WriteLine(\"enum\");}\n");
|
||||
|
@ -366,9 +366,9 @@ public class TestSemPredEvalParser extends BaseTest {
|
|||
@Test
|
||||
public void testPredicateDependentOnArg() throws Exception {
|
||||
mkdir(tmpdir);
|
||||
StringBuilder grammarBuilder = new StringBuilder(212);
|
||||
StringBuilder grammarBuilder = new StringBuilder(220);
|
||||
grammarBuilder.append("grammar T;\n");
|
||||
grammarBuilder.append("@members {int i = 0;}\n");
|
||||
grammarBuilder.append("@parser::members {int i = 0;}\n");
|
||||
grammarBuilder.append("s : a[2] a[1];\n");
|
||||
grammarBuilder.append("a[int i]\n");
|
||||
grammarBuilder.append(" : {$i==1}? ID {Console.WriteLine(\"alt 1\");}\n");
|
||||
|
@ -390,9 +390,9 @@ public class TestSemPredEvalParser extends BaseTest {
|
|||
@Test
|
||||
public void testPredicateDependentOnArg2() throws Exception {
|
||||
mkdir(tmpdir);
|
||||
StringBuilder grammarBuilder = new StringBuilder(154);
|
||||
StringBuilder grammarBuilder = new StringBuilder(162);
|
||||
grammarBuilder.append("grammar T;\n");
|
||||
grammarBuilder.append("@members {int i = 0;}\n");
|
||||
grammarBuilder.append("@parser::members {int i = 0;}\n");
|
||||
grammarBuilder.append("s : a[2] a[1];\n");
|
||||
grammarBuilder.append("a[int i]\n");
|
||||
grammarBuilder.append(" : {$i==1}? ID \n");
|
||||
|
@ -412,9 +412,9 @@ public class TestSemPredEvalParser extends BaseTest {
|
|||
@Test
|
||||
public void testPredsInGlobalFOLLOW() throws Exception {
|
||||
mkdir(tmpdir);
|
||||
StringBuilder grammarBuilder = new StringBuilder(298);
|
||||
StringBuilder grammarBuilder = new StringBuilder(306);
|
||||
grammarBuilder.append("grammar T;\n");
|
||||
grammarBuilder.append("@members {\n");
|
||||
grammarBuilder.append("@parser::members {\n");
|
||||
grammarBuilder.append("bool pred(bool v) {\n");
|
||||
grammarBuilder.append(" Console.WriteLine(\"eval=\"+v.ToString().ToLower());\n");
|
||||
grammarBuilder.append(" return v;\n");
|
||||
|
@ -552,9 +552,9 @@ public class TestSemPredEvalParser extends BaseTest {
|
|||
@Test
|
||||
public void testToLeftWithVaryingPredicate() throws Exception {
|
||||
mkdir(tmpdir);
|
||||
StringBuilder grammarBuilder = new StringBuilder(268);
|
||||
StringBuilder grammarBuilder = new StringBuilder(276);
|
||||
grammarBuilder.append("grammar T;\n");
|
||||
grammarBuilder.append("@members {int i = 0;}\n");
|
||||
grammarBuilder.append("@parser::members {int i = 0;}\n");
|
||||
grammarBuilder.append("s : ({this.i += 1;\n");
|
||||
grammarBuilder.append("Console.WriteLine(\"i=\" + this.i);} a)+ ;\n");
|
||||
grammarBuilder.append("a : {this.i % 2 == 0}? ID {Console.WriteLine(\"alt 1\");}\n");
|
||||
|
|
|
@ -907,7 +907,7 @@ public abstract class BaseTest {
|
|||
+ " for _, t := range stream.GetAllTokens() {\n"
|
||||
+ " fmt.Println(t)\n"
|
||||
+ " }\n"
|
||||
+ (showDFA ? "fmt.Print(lexer.GetInterpreter().DecisionToDFA[antlr.LexerDefaultMode].ToLexerString())\n"
|
||||
+ (showDFA ? "fmt.Print(lexer.GetInterpreter().DecisionToDFA()[antlr.LexerDefaultMode].ToLexerString())\n"
|
||||
: "")
|
||||
+ "}\n"
|
||||
+ "\n");
|
||||
|
|
|
@ -4625,57 +4625,50 @@ public class TestLexerExec extends BaseTest {
|
|||
public void testPositionAdjustingLexer() throws Exception {
|
||||
mkdir(parserpkgdir);
|
||||
|
||||
StringBuilder grammarBuilder = new StringBuilder(2906);
|
||||
StringBuilder grammarBuilder = new StringBuilder(2692);
|
||||
grammarBuilder.append("lexer grammar PositionAdjustingLexer;\n");
|
||||
grammarBuilder.append("\n");
|
||||
grammarBuilder.append("@members {\n");
|
||||
grammarBuilder.append("package antlrtest\n");
|
||||
grammarBuilder.append("\n");
|
||||
grammarBuilder.append("type PositionAdjustingLexer struct {\n");
|
||||
grammarBuilder.append(" antlr.*BaseLexer\n");
|
||||
grammarBuilder.append("}\n");
|
||||
grammarBuilder.append("\n");
|
||||
grammarBuilder.append("func NewPositionAdjustingLexer(input antlr.CharStream) *PositionAdjustingLexer {\n");
|
||||
grammarBuilder.append(" return &PositionAdjustingLexer{BaseLexer: antlr.NewBaseLexer(input)}\n");
|
||||
grammarBuilder.append("}\n");
|
||||
grammarBuilder.append("\n");
|
||||
grammarBuilder.append("func (p *PositionAdjustingLexer) NextToken() *Token {\n");
|
||||
grammarBuilder.append(" var _, ok = p._interp.(*PositionAdjustingLexerATNSimulator)\n");
|
||||
grammarBuilder.append("\n");
|
||||
grammarBuilder.append(" if !ok {\n");
|
||||
grammarBuilder.append(" p._interp = NewPositionAdjustingLexerATNSimulator(p, _ATN, _decisionToDFA, _sharedContextCache)\n");
|
||||
grammarBuilder.append("func (p *PositionAdjustingLexer) NextToken() antlr.Token {\n");
|
||||
grammarBuilder.append(" if _, ok := p.Interpreter.(*PositionAdjustingLexerATNSimulator); !ok {\n");
|
||||
grammarBuilder.append(" p.Interpreter = NewPositionAdjustingLexerATNSimulator(p, lexerAtn, p.Interpreter.DecisionToDFA(), p.Interpreter.SharedContextCache())\n");
|
||||
grammarBuilder.append(" p.Virt = p\n");
|
||||
grammarBuilder.append(" }\n");
|
||||
grammarBuilder.append("\n");
|
||||
grammarBuilder.append(" return p.BaseLexer.NextToken()\n");
|
||||
grammarBuilder.append("}\n");
|
||||
grammarBuilder.append("\n");
|
||||
grammarBuilder.append("func (p *PositionAdjustingLexer) Emit() *Token {\n");
|
||||
grammarBuilder.append(" switch _type {\n");
|
||||
grammarBuilder.append(" case TOKENS:\n");
|
||||
grammarBuilder.append("func (p *PositionAdjustingLexer) Emit() antlr.Token {\n");
|
||||
grammarBuilder.append(" switch p.GetType() {\n");
|
||||
grammarBuilder.append(" case PositionAdjustingLexerTOKENS:\n");
|
||||
grammarBuilder.append(" p.HandleAcceptPositionForKeyword(\"tokens\")\n");
|
||||
grammarBuilder.append("\n");
|
||||
grammarBuilder.append(" case LABEL:\n");
|
||||
grammarBuilder.append(" case PositionAdjustingLexerLABEL:\n");
|
||||
grammarBuilder.append(" p.HandleAcceptPositionForIdentifier()\n");
|
||||
grammarBuilder.append(" }\n");
|
||||
grammarBuilder.append("\n");
|
||||
grammarBuilder.append(" return p.BaseLexer.Emit()\n");
|
||||
grammarBuilder.append("}\n");
|
||||
grammarBuilder.append("\n");
|
||||
grammarBuilder.append("func isIdentifierChar(c rune) bool {\n");
|
||||
grammarBuilder.append(" return unicode.IsLetter(c) || unicode.IsDigit(c) || c == '_'\n");
|
||||
grammarBuilder.append("}\n");
|
||||
grammarBuilder.append("\n");
|
||||
grammarBuilder.append("func (p *PositionAdjustingLexer) HandleAcceptPositionForIdentifier() bool {\n");
|
||||
grammarBuilder.append(" var tokenText = GetText()\n");
|
||||
grammarBuilder.append(" var tokenText = p.GetText()\n");
|
||||
grammarBuilder.append(" var identifierLength int\n");
|
||||
grammarBuilder.append("\n");
|
||||
grammarBuilder.append(" for identifierLength < len(tokenText) && isIdentifierChar(tokenText.charAt(identifierLength)) {\n");
|
||||
grammarBuilder.append(" for identifierLength < len(tokenText) && isIdentifierChar([]rune(tokenText)[identifierLength]) {\n");
|
||||
grammarBuilder.append(" identifierLength++\n");
|
||||
grammarBuilder.append(" }\n");
|
||||
grammarBuilder.append("\n");
|
||||
grammarBuilder.append(" if GetInputStream().Index() <= _tokenStartCharIndex + identifierLength {\n");
|
||||
grammarBuilder.append(" if p.GetInputStream().Index() <= p.TokenStartCharIndex + identifierLength {\n");
|
||||
grammarBuilder.append(" return false\n");
|
||||
grammarBuilder.append(" }\n");
|
||||
grammarBuilder.append("\n");
|
||||
grammarBuilder.append(" var offset = identifierLength - 1\n");
|
||||
grammarBuilder.append("\n");
|
||||
grammarBuilder.append(" p.GetInterpreter().ResetAcceptPosition(p.GetInputStream(), p.TokenStartCharIndex + offset, p.TokenStartLine, p.TokenStartCharPositionInLine + offset)\n");
|
||||
grammarBuilder.append(" p.GetInterpreter().(*PositionAdjustingLexerATNSimulator).ResetAcceptPosition(p.GetInputStream(), p.TokenStartCharIndex + offset, p.TokenStartLine, p.TokenStartColumn + offset)\n");
|
||||
grammarBuilder.append("\n");
|
||||
grammarBuilder.append(" return true\n");
|
||||
grammarBuilder.append("}\n");
|
||||
|
@ -4687,31 +4680,23 @@ public class TestLexerExec extends BaseTest {
|
|||
grammarBuilder.append("\n");
|
||||
grammarBuilder.append(" var offset = len(keyword) - 1\n");
|
||||
grammarBuilder.append("\n");
|
||||
grammarBuilder.append(" p.GetInterpreter().ResetAcceptPosition(p.GetInputStream(), p.TokenStartCharIndex + offset, p.TokenStartLine, p.TokenStartCharPositionInLine + offset)\n");
|
||||
grammarBuilder.append(" p.GetInterpreter().(*PositionAdjustingLexerATNSimulator).ResetAcceptPosition(p.GetInputStream(), p.TokenStartCharIndex + offset, p.TokenStartLine, p.TokenStartColumn + offset)\n");
|
||||
grammarBuilder.append("\n");
|
||||
grammarBuilder.append(" return true\n");
|
||||
grammarBuilder.append("}\n");
|
||||
grammarBuilder.append("\n");
|
||||
grammarBuilder.append("func (s *PositionAdjustingLexer) GetInterpreter() *LexerATNSimulator {\n");
|
||||
grammarBuilder.append(" return s // TODO: return super.(*PositionAdjustingLexerATNSimulator).GetInterpreter()\n");
|
||||
grammarBuilder.append("}\n");
|
||||
grammarBuilder.append("\n");
|
||||
grammarBuilder.append("func isIdentifierChar(c rune) bool {\n");
|
||||
grammarBuilder.append(" return Character.isLetterOrDigit(c) || c == '_'\n");
|
||||
grammarBuilder.append("}\n");
|
||||
grammarBuilder.append("\n");
|
||||
grammarBuilder.append("type PositionAdjustingLexerATNSimulator struct {\n");
|
||||
grammarBuilder.append(" *antlr.LexerATNSimulator\n");
|
||||
grammarBuilder.append("}\n");
|
||||
grammarBuilder.append("\n");
|
||||
grammarBuilder.append("func NewPositionAdjustingLexerATNSimulator(recog antlr.Lexer, atn *antlr.ATN, decisionToDFA []*antlr.DFA, sharedContextCache *PredictionContextCache) *PositionAdjustingLexerATNSimulator {\n");
|
||||
grammarBuilder.append("func NewPositionAdjustingLexerATNSimulator(recog antlr.Lexer, atn *antlr.ATN, decisionToDFA []*antlr.DFA, sharedContextCache *antlr.PredictionContextCache) *PositionAdjustingLexerATNSimulator {\n");
|
||||
grammarBuilder.append(" return &PositionAdjustingLexerATNSimulator{\n");
|
||||
grammarBuilder.append(" LexerATNSimulator: antlr.NewLexerATNSimulator(recog, atn, decisionToDFA, sharedContextCache),\n");
|
||||
grammarBuilder.append(" }\n");
|
||||
grammarBuilder.append("}\n");
|
||||
grammarBuilder.append("\n");
|
||||
grammarBuilder.append("func (p *NewPositionAdjustingLexerATNSimulator) ResetAcceptPosition(input CharStream, index, line, charPositionInLine int) {\n");
|
||||
grammarBuilder.append(" p.GetInputStream().Seek(index)\n");
|
||||
grammarBuilder.append("func (p *PositionAdjustingLexerATNSimulator) ResetAcceptPosition(input antlr.CharStream, index, line, charPositionInLine int) {\n");
|
||||
grammarBuilder.append(" input.Seek(index)\n");
|
||||
grammarBuilder.append(" p.Line = line\n");
|
||||
grammarBuilder.append(" p.CharPositionInLine = charPositionInLine\n");
|
||||
grammarBuilder.append(" p.Consume(input)\n");
|
||||
|
|
|
@ -556,9 +556,9 @@ public class TestParserExec extends BaseTest {
|
|||
@Test
|
||||
public void testParserProperty() throws Exception {
|
||||
mkdir(parserpkgdir);
|
||||
StringBuilder grammarBuilder = new StringBuilder(152);
|
||||
StringBuilder grammarBuilder = new StringBuilder(160);
|
||||
grammarBuilder.append("grammar T;\n");
|
||||
grammarBuilder.append("@members {\n");
|
||||
grammarBuilder.append("@parser::members {\n");
|
||||
grammarBuilder.append("func Property() bool {\n");
|
||||
grammarBuilder.append(" return true\n");
|
||||
grammarBuilder.append("}\n");
|
||||
|
|
|
@ -72,9 +72,9 @@ public class TestSemPredEvalParser extends BaseTest {
|
|||
@Test
|
||||
public void testActionHidesPreds() throws Exception {
|
||||
mkdir(parserpkgdir);
|
||||
StringBuilder grammarBuilder = new StringBuilder(223);
|
||||
StringBuilder grammarBuilder = new StringBuilder(231);
|
||||
grammarBuilder.append("grammar T;\n");
|
||||
grammarBuilder.append("@members {var i int = 0; var _ int = i; }\n");
|
||||
grammarBuilder.append("@parser::members {var i int = 0; var _ int = i; }\n");
|
||||
grammarBuilder.append("s : a+ ;\n");
|
||||
grammarBuilder.append("a : {i = 1;} ID {i == 1}? {fmt.Println(\"alt 1\")}\n");
|
||||
grammarBuilder.append(" | {i = 2;} ID {i == 2}? {fmt.Println(\"alt 2\")}\n");
|
||||
|
@ -97,9 +97,9 @@ public class TestSemPredEvalParser extends BaseTest {
|
|||
@Test
|
||||
public void testActionsHidePredsInGlobalFOLLOW() throws Exception {
|
||||
mkdir(parserpkgdir);
|
||||
StringBuilder grammarBuilder = new StringBuilder(278);
|
||||
StringBuilder grammarBuilder = new StringBuilder(286);
|
||||
grammarBuilder.append("grammar T;\n");
|
||||
grammarBuilder.append("@members {\n");
|
||||
grammarBuilder.append("@parser::members {\n");
|
||||
grammarBuilder.append("func pred(v bool) bool {\n");
|
||||
grammarBuilder.append(" fmt.Println(\"eval=\" + fmt.Sprint(v))\n");
|
||||
grammarBuilder.append("\n");
|
||||
|
@ -144,9 +144,9 @@ public class TestSemPredEvalParser extends BaseTest {
|
|||
@Test
|
||||
public void testDepedentPredsInGlobalFOLLOW() throws Exception {
|
||||
mkdir(parserpkgdir);
|
||||
StringBuilder grammarBuilder = new StringBuilder(305);
|
||||
StringBuilder grammarBuilder = new StringBuilder(313);
|
||||
grammarBuilder.append("grammar T;\n");
|
||||
grammarBuilder.append("@members {\n");
|
||||
grammarBuilder.append("@parser::members {\n");
|
||||
grammarBuilder.append("func pred(v bool) bool {\n");
|
||||
grammarBuilder.append(" fmt.Println(\"eval=\" + fmt.Sprint(v))\n");
|
||||
grammarBuilder.append("\n");
|
||||
|
@ -341,9 +341,9 @@ public class TestSemPredEvalParser extends BaseTest {
|
|||
@Test
|
||||
public void testPredTestedEvenWhenUnAmbig_1() throws Exception {
|
||||
mkdir(parserpkgdir);
|
||||
StringBuilder grammarBuilder = new StringBuilder(225);
|
||||
StringBuilder grammarBuilder = new StringBuilder(233);
|
||||
grammarBuilder.append("grammar T;\n");
|
||||
grammarBuilder.append("@members {var enumKeyword bool = true; var _ bool = enumKeyword; }\n");
|
||||
grammarBuilder.append("@parser::members {var enumKeyword bool = true; var _ bool = enumKeyword; }\n");
|
||||
grammarBuilder.append("primary\n");
|
||||
grammarBuilder.append(" : ID {fmt.Println(\"ID \"+$ID.text)}\n");
|
||||
grammarBuilder.append(" | {!enumKeyword}? 'enum' {fmt.Println(\"enum\")}\n");
|
||||
|
@ -362,9 +362,9 @@ public class TestSemPredEvalParser extends BaseTest {
|
|||
@Test
|
||||
public void testPredTestedEvenWhenUnAmbig_2() throws Exception {
|
||||
mkdir(parserpkgdir);
|
||||
StringBuilder grammarBuilder = new StringBuilder(225);
|
||||
StringBuilder grammarBuilder = new StringBuilder(233);
|
||||
grammarBuilder.append("grammar T;\n");
|
||||
grammarBuilder.append("@members {var enumKeyword bool = true; var _ bool = enumKeyword; }\n");
|
||||
grammarBuilder.append("@parser::members {var enumKeyword bool = true; var _ bool = enumKeyword; }\n");
|
||||
grammarBuilder.append("primary\n");
|
||||
grammarBuilder.append(" : ID {fmt.Println(\"ID \"+$ID.text)}\n");
|
||||
grammarBuilder.append(" | {!enumKeyword}? 'enum' {fmt.Println(\"enum\")}\n");
|
||||
|
@ -384,9 +384,9 @@ public class TestSemPredEvalParser extends BaseTest {
|
|||
@Test
|
||||
public void testPredicateDependentOnArg() throws Exception {
|
||||
mkdir(parserpkgdir);
|
||||
StringBuilder grammarBuilder = new StringBuilder(222);
|
||||
StringBuilder grammarBuilder = new StringBuilder(230);
|
||||
grammarBuilder.append("grammar T;\n");
|
||||
grammarBuilder.append("@members {var i int = 0; var _ int = i; }\n");
|
||||
grammarBuilder.append("@parser::members {var i int = 0; var _ int = i; }\n");
|
||||
grammarBuilder.append("s : a[2] a[1];\n");
|
||||
grammarBuilder.append("a[int i]\n");
|
||||
grammarBuilder.append(" : {$i == 1}? ID {fmt.Println(\"alt 1\")}\n");
|
||||
|
@ -409,9 +409,9 @@ public class TestSemPredEvalParser extends BaseTest {
|
|||
@Test
|
||||
public void testPredicateDependentOnArg2() throws Exception {
|
||||
mkdir(parserpkgdir);
|
||||
StringBuilder grammarBuilder = new StringBuilder(178);
|
||||
StringBuilder grammarBuilder = new StringBuilder(186);
|
||||
grammarBuilder.append("grammar T;\n");
|
||||
grammarBuilder.append("@members {var i int = 0; var _ int = i; }\n");
|
||||
grammarBuilder.append("@parser::members {var i int = 0; var _ int = i; }\n");
|
||||
grammarBuilder.append("s : a[2] a[1];\n");
|
||||
grammarBuilder.append("a[int i]\n");
|
||||
grammarBuilder.append(" : {$i == 1}? ID \n");
|
||||
|
@ -432,9 +432,9 @@ public class TestSemPredEvalParser extends BaseTest {
|
|||
@Test
|
||||
public void testPredsInGlobalFOLLOW() throws Exception {
|
||||
mkdir(parserpkgdir);
|
||||
StringBuilder grammarBuilder = new StringBuilder(272);
|
||||
StringBuilder grammarBuilder = new StringBuilder(280);
|
||||
grammarBuilder.append("grammar T;\n");
|
||||
grammarBuilder.append("@members {\n");
|
||||
grammarBuilder.append("@parser::members {\n");
|
||||
grammarBuilder.append("func pred(v bool) bool {\n");
|
||||
grammarBuilder.append(" fmt.Println(\"eval=\" + fmt.Sprint(v))\n");
|
||||
grammarBuilder.append("\n");
|
||||
|
@ -579,9 +579,9 @@ public class TestSemPredEvalParser extends BaseTest {
|
|||
@Test
|
||||
public void testToLeftWithVaryingPredicate() throws Exception {
|
||||
mkdir(parserpkgdir);
|
||||
StringBuilder grammarBuilder = new StringBuilder(259);
|
||||
StringBuilder grammarBuilder = new StringBuilder(267);
|
||||
grammarBuilder.append("grammar T;\n");
|
||||
grammarBuilder.append("@members {var i int = 0; var _ int = i; }\n");
|
||||
grammarBuilder.append("@parser::members {var i int = 0; var _ int = i; }\n");
|
||||
grammarBuilder.append("s : ({i += 1;\n");
|
||||
grammarBuilder.append("fmt.Println(\"i=\" + fmt.Sprint(i))} a)+ ;\n");
|
||||
grammarBuilder.append("a : {i % 2 == 0}? ID {fmt.Println(\"alt 1\")}\n");
|
||||
|
|
|
@ -79,9 +79,9 @@ public class TestSemPredEvalParser extends BaseTest {
|
|||
public void testActionHidesPreds() throws Exception {
|
||||
mkdir(tmpdir);
|
||||
|
||||
StringBuilder grammarBuilder = new StringBuilder(239);
|
||||
StringBuilder grammarBuilder = new StringBuilder(247);
|
||||
grammarBuilder.append("grammar T;\n");
|
||||
grammarBuilder.append("@members {int i = 0;}\n");
|
||||
grammarBuilder.append("@parser::members {int i = 0;}\n");
|
||||
grammarBuilder.append("s : a+ ;\n");
|
||||
grammarBuilder.append("a : {this.i = 1;} ID {this.i == 1}? {System.out.println(\"alt 1\");}\n");
|
||||
grammarBuilder.append(" | {this.i = 2;} ID {this.i == 2}? {System.out.println(\"alt 2\");}\n");
|
||||
|
@ -107,9 +107,9 @@ public class TestSemPredEvalParser extends BaseTest {
|
|||
public void testActionsHidePredsInGlobalFOLLOW() throws Exception {
|
||||
mkdir(tmpdir);
|
||||
|
||||
StringBuilder grammarBuilder = new StringBuilder(291);
|
||||
StringBuilder grammarBuilder = new StringBuilder(299);
|
||||
grammarBuilder.append("grammar T;\n");
|
||||
grammarBuilder.append("@members {\n");
|
||||
grammarBuilder.append("@parser::members {\n");
|
||||
grammarBuilder.append("boolean pred(boolean v) {\n");
|
||||
grammarBuilder.append(" System.out.println(\"eval=\"+v);\n");
|
||||
grammarBuilder.append(" return v;\n");
|
||||
|
@ -159,9 +159,9 @@ public class TestSemPredEvalParser extends BaseTest {
|
|||
public void testDepedentPredsInGlobalFOLLOW() throws Exception {
|
||||
mkdir(tmpdir);
|
||||
|
||||
StringBuilder grammarBuilder = new StringBuilder(314);
|
||||
StringBuilder grammarBuilder = new StringBuilder(322);
|
||||
grammarBuilder.append("grammar T;\n");
|
||||
grammarBuilder.append("@members {\n");
|
||||
grammarBuilder.append("@parser::members {\n");
|
||||
grammarBuilder.append("boolean pred(boolean v) {\n");
|
||||
grammarBuilder.append(" System.out.println(\"eval=\"+v);\n");
|
||||
grammarBuilder.append(" return v;\n");
|
||||
|
@ -379,9 +379,9 @@ public class TestSemPredEvalParser extends BaseTest {
|
|||
public void testPredTestedEvenWhenUnAmbig_1() throws Exception {
|
||||
mkdir(tmpdir);
|
||||
|
||||
StringBuilder grammarBuilder = new StringBuilder(218);
|
||||
StringBuilder grammarBuilder = new StringBuilder(226);
|
||||
grammarBuilder.append("grammar T;\n");
|
||||
grammarBuilder.append("@members {boolean enumKeyword = true;}\n");
|
||||
grammarBuilder.append("@parser::members {boolean enumKeyword = true;}\n");
|
||||
grammarBuilder.append("primary\n");
|
||||
grammarBuilder.append(" : ID {System.out.println(\"ID \"+$ID.text);}\n");
|
||||
grammarBuilder.append(" | {!this.enumKeyword}? 'enum' {System.out.println(\"enum\");}\n");
|
||||
|
@ -403,9 +403,9 @@ public class TestSemPredEvalParser extends BaseTest {
|
|||
public void testPredTestedEvenWhenUnAmbig_2() throws Exception {
|
||||
mkdir(tmpdir);
|
||||
|
||||
StringBuilder grammarBuilder = new StringBuilder(218);
|
||||
StringBuilder grammarBuilder = new StringBuilder(226);
|
||||
grammarBuilder.append("grammar T;\n");
|
||||
grammarBuilder.append("@members {boolean enumKeyword = true;}\n");
|
||||
grammarBuilder.append("@parser::members {boolean enumKeyword = true;}\n");
|
||||
grammarBuilder.append("primary\n");
|
||||
grammarBuilder.append(" : ID {System.out.println(\"ID \"+$ID.text);}\n");
|
||||
grammarBuilder.append(" | {!this.enumKeyword}? 'enum' {System.out.println(\"enum\");}\n");
|
||||
|
@ -428,9 +428,9 @@ public class TestSemPredEvalParser extends BaseTest {
|
|||
public void testPredicateDependentOnArg() throws Exception {
|
||||
mkdir(tmpdir);
|
||||
|
||||
StringBuilder grammarBuilder = new StringBuilder(214);
|
||||
StringBuilder grammarBuilder = new StringBuilder(222);
|
||||
grammarBuilder.append("grammar T;\n");
|
||||
grammarBuilder.append("@members {int i = 0;}\n");
|
||||
grammarBuilder.append("@parser::members {int i = 0;}\n");
|
||||
grammarBuilder.append("s : a[2] a[1];\n");
|
||||
grammarBuilder.append("a[int i]\n");
|
||||
grammarBuilder.append(" : {$i==1}? ID {System.out.println(\"alt 1\");}\n");
|
||||
|
@ -456,9 +456,9 @@ public class TestSemPredEvalParser extends BaseTest {
|
|||
public void testPredicateDependentOnArg2() throws Exception {
|
||||
mkdir(tmpdir);
|
||||
|
||||
StringBuilder grammarBuilder = new StringBuilder(154);
|
||||
StringBuilder grammarBuilder = new StringBuilder(162);
|
||||
grammarBuilder.append("grammar T;\n");
|
||||
grammarBuilder.append("@members {int i = 0;}\n");
|
||||
grammarBuilder.append("@parser::members {int i = 0;}\n");
|
||||
grammarBuilder.append("s : a[2] a[1];\n");
|
||||
grammarBuilder.append("a[int i]\n");
|
||||
grammarBuilder.append(" : {$i==1}? ID \n");
|
||||
|
@ -482,9 +482,9 @@ public class TestSemPredEvalParser extends BaseTest {
|
|||
public void testPredsInGlobalFOLLOW() throws Exception {
|
||||
mkdir(tmpdir);
|
||||
|
||||
StringBuilder grammarBuilder = new StringBuilder(285);
|
||||
StringBuilder grammarBuilder = new StringBuilder(293);
|
||||
grammarBuilder.append("grammar T;\n");
|
||||
grammarBuilder.append("@members {\n");
|
||||
grammarBuilder.append("@parser::members {\n");
|
||||
grammarBuilder.append("boolean pred(boolean v) {\n");
|
||||
grammarBuilder.append(" System.out.println(\"eval=\"+v);\n");
|
||||
grammarBuilder.append(" return v;\n");
|
||||
|
@ -646,9 +646,9 @@ public class TestSemPredEvalParser extends BaseTest {
|
|||
public void testToLeftWithVaryingPredicate() throws Exception {
|
||||
mkdir(tmpdir);
|
||||
|
||||
StringBuilder grammarBuilder = new StringBuilder(271);
|
||||
StringBuilder grammarBuilder = new StringBuilder(279);
|
||||
grammarBuilder.append("grammar T;\n");
|
||||
grammarBuilder.append("@members {int i = 0;}\n");
|
||||
grammarBuilder.append("@parser::members {int i = 0;}\n");
|
||||
grammarBuilder.append("s : ({this.i += 1;\n");
|
||||
grammarBuilder.append("System.out.println(\"i=\" + this.i);} a)+ ;\n");
|
||||
grammarBuilder.append("a : {this.i % 2 == 0}? ID {System.out.println(\"alt 1\");}\n");
|
||||
|
|
|
@ -75,9 +75,9 @@ public class TestSemPredEvalParser extends BaseTest {
|
|||
@Test
|
||||
public void testActionHidesPreds() throws Exception {
|
||||
mkdir(tmpdir);
|
||||
StringBuilder grammarBuilder = new StringBuilder(228);
|
||||
StringBuilder grammarBuilder = new StringBuilder(236);
|
||||
grammarBuilder.append("grammar T;\n");
|
||||
grammarBuilder.append("@members {this.i = 0;}\n");
|
||||
grammarBuilder.append("@parser::members {this.i = 0;}\n");
|
||||
grammarBuilder.append("s : a+ ;\n");
|
||||
grammarBuilder.append("a : {this.i = 1;} ID {this.i === 1}? {console.log(\"alt 1\");}\n");
|
||||
grammarBuilder.append(" | {this.i = 2;} ID {this.i === 2}? {console.log(\"alt 2\");}\n");
|
||||
|
@ -101,9 +101,9 @@ public class TestSemPredEvalParser extends BaseTest {
|
|||
@Test
|
||||
public void testActionsHidePredsInGlobalFOLLOW() throws Exception {
|
||||
mkdir(tmpdir);
|
||||
StringBuilder grammarBuilder = new StringBuilder(291);
|
||||
StringBuilder grammarBuilder = new StringBuilder(299);
|
||||
grammarBuilder.append("grammar T;\n");
|
||||
grammarBuilder.append("@members {\n");
|
||||
grammarBuilder.append("@parser::members {\n");
|
||||
grammarBuilder.append("this.pred = function(v) {\n");
|
||||
grammarBuilder.append(" console.log(\"eval=\" + v.toString());\n");
|
||||
grammarBuilder.append(" return v;\n");
|
||||
|
@ -149,9 +149,9 @@ public class TestSemPredEvalParser extends BaseTest {
|
|||
@Test
|
||||
public void testDepedentPredsInGlobalFOLLOW() throws Exception {
|
||||
mkdir(tmpdir);
|
||||
StringBuilder grammarBuilder = new StringBuilder(316);
|
||||
StringBuilder grammarBuilder = new StringBuilder(324);
|
||||
grammarBuilder.append("grammar T;\n");
|
||||
grammarBuilder.append("@members {\n");
|
||||
grammarBuilder.append("@parser::members {\n");
|
||||
grammarBuilder.append("this.pred = function(v) {\n");
|
||||
grammarBuilder.append(" console.log(\"eval=\" + v.toString());\n");
|
||||
grammarBuilder.append(" return v;\n");
|
||||
|
@ -353,9 +353,9 @@ public class TestSemPredEvalParser extends BaseTest {
|
|||
@Test
|
||||
public void testPredTestedEvenWhenUnAmbig_1() throws Exception {
|
||||
mkdir(tmpdir);
|
||||
StringBuilder grammarBuilder = new StringBuilder(201);
|
||||
StringBuilder grammarBuilder = new StringBuilder(209);
|
||||
grammarBuilder.append("grammar T;\n");
|
||||
grammarBuilder.append("@members {this.enumKeyword = true;}\n");
|
||||
grammarBuilder.append("@parser::members {this.enumKeyword = true;}\n");
|
||||
grammarBuilder.append("primary\n");
|
||||
grammarBuilder.append(" : ID {console.log(\"ID \"+$ID.text);}\n");
|
||||
grammarBuilder.append(" | {!this.enumKeyword}? 'enum' {console.log(\"enum\");}\n");
|
||||
|
@ -375,9 +375,9 @@ public class TestSemPredEvalParser extends BaseTest {
|
|||
@Test
|
||||
public void testPredTestedEvenWhenUnAmbig_2() throws Exception {
|
||||
mkdir(tmpdir);
|
||||
StringBuilder grammarBuilder = new StringBuilder(201);
|
||||
StringBuilder grammarBuilder = new StringBuilder(209);
|
||||
grammarBuilder.append("grammar T;\n");
|
||||
grammarBuilder.append("@members {this.enumKeyword = true;}\n");
|
||||
grammarBuilder.append("@parser::members {this.enumKeyword = true;}\n");
|
||||
grammarBuilder.append("primary\n");
|
||||
grammarBuilder.append(" : ID {console.log(\"ID \"+$ID.text);}\n");
|
||||
grammarBuilder.append(" | {!this.enumKeyword}? 'enum' {console.log(\"enum\");}\n");
|
||||
|
@ -398,9 +398,9 @@ public class TestSemPredEvalParser extends BaseTest {
|
|||
@Test
|
||||
public void testPredicateDependentOnArg() throws Exception {
|
||||
mkdir(tmpdir);
|
||||
StringBuilder grammarBuilder = new StringBuilder(203);
|
||||
StringBuilder grammarBuilder = new StringBuilder(211);
|
||||
grammarBuilder.append("grammar T;\n");
|
||||
grammarBuilder.append("@members {this.i = 0;}\n");
|
||||
grammarBuilder.append("@parser::members {this.i = 0;}\n");
|
||||
grammarBuilder.append("s : a[2] a[1];\n");
|
||||
grammarBuilder.append("a[int i]\n");
|
||||
grammarBuilder.append(" : {$i===1}? ID {console.log(\"alt 1\");}\n");
|
||||
|
@ -424,9 +424,9 @@ public class TestSemPredEvalParser extends BaseTest {
|
|||
@Test
|
||||
public void testPredicateDependentOnArg2() throws Exception {
|
||||
mkdir(tmpdir);
|
||||
StringBuilder grammarBuilder = new StringBuilder(157);
|
||||
StringBuilder grammarBuilder = new StringBuilder(165);
|
||||
grammarBuilder.append("grammar T;\n");
|
||||
grammarBuilder.append("@members {this.i = 0;}\n");
|
||||
grammarBuilder.append("@parser::members {this.i = 0;}\n");
|
||||
grammarBuilder.append("s : a[2] a[1];\n");
|
||||
grammarBuilder.append("a[int i]\n");
|
||||
grammarBuilder.append(" : {$i===1}? ID \n");
|
||||
|
@ -448,9 +448,9 @@ public class TestSemPredEvalParser extends BaseTest {
|
|||
@Test
|
||||
public void testPredsInGlobalFOLLOW() throws Exception {
|
||||
mkdir(tmpdir);
|
||||
StringBuilder grammarBuilder = new StringBuilder(285);
|
||||
StringBuilder grammarBuilder = new StringBuilder(293);
|
||||
grammarBuilder.append("grammar T;\n");
|
||||
grammarBuilder.append("@members {\n");
|
||||
grammarBuilder.append("@parser::members {\n");
|
||||
grammarBuilder.append("this.pred = function(v) {\n");
|
||||
grammarBuilder.append(" console.log(\"eval=\" + v.toString());\n");
|
||||
grammarBuilder.append(" return v;\n");
|
||||
|
@ -600,9 +600,9 @@ public class TestSemPredEvalParser extends BaseTest {
|
|||
@Test
|
||||
public void testToLeftWithVaryingPredicate() throws Exception {
|
||||
mkdir(tmpdir);
|
||||
StringBuilder grammarBuilder = new StringBuilder(252);
|
||||
StringBuilder grammarBuilder = new StringBuilder(260);
|
||||
grammarBuilder.append("grammar T;\n");
|
||||
grammarBuilder.append("@members {this.i = 0;}\n");
|
||||
grammarBuilder.append("@parser::members {this.i = 0;}\n");
|
||||
grammarBuilder.append("s : ({this.i += 1;\n");
|
||||
grammarBuilder.append("console.log(\"i=\" + this.i);} a)+ ;\n");
|
||||
grammarBuilder.append("a : {this.i % 2 === 0}? ID {console.log(\"alt 1\");}\n");
|
||||
|
|
|
@ -81,9 +81,9 @@ public class TestSemPredEvalParser extends BasePython2Test {
|
|||
public void testActionHidesPreds() throws Exception {
|
||||
mkdir(tmpdir);
|
||||
|
||||
StringBuilder grammarBuilder = new StringBuilder(204);
|
||||
StringBuilder grammarBuilder = new StringBuilder(212);
|
||||
grammarBuilder.append("grammar T;\n");
|
||||
grammarBuilder.append("@members {i = 0}\n");
|
||||
grammarBuilder.append("@parser::members {i = 0}\n");
|
||||
grammarBuilder.append("s : a+ ;\n");
|
||||
grammarBuilder.append("a : {self.i = 1} ID {self.i == 1}? {print(\"alt 1\")}\n");
|
||||
grammarBuilder.append(" | {self.i = 2} ID {self.i == 2}? {print(\"alt 2\")}\n");
|
||||
|
@ -110,9 +110,9 @@ public class TestSemPredEvalParser extends BasePython2Test {
|
|||
public void testActionsHidePredsInGlobalFOLLOW() throws Exception {
|
||||
mkdir(tmpdir);
|
||||
|
||||
StringBuilder grammarBuilder = new StringBuilder(269);
|
||||
StringBuilder grammarBuilder = new StringBuilder(277);
|
||||
grammarBuilder.append("grammar T;\n");
|
||||
grammarBuilder.append("@members {\n");
|
||||
grammarBuilder.append("@parser::members {\n");
|
||||
grammarBuilder.append("def pred(self, v):\n");
|
||||
grammarBuilder.append(" print('eval=' + str(v).lower())\n");
|
||||
grammarBuilder.append(" return v\n");
|
||||
|
@ -164,9 +164,9 @@ public class TestSemPredEvalParser extends BasePython2Test {
|
|||
public void testDepedentPredsInGlobalFOLLOW() throws Exception {
|
||||
mkdir(tmpdir);
|
||||
|
||||
StringBuilder grammarBuilder = new StringBuilder(292);
|
||||
StringBuilder grammarBuilder = new StringBuilder(300);
|
||||
grammarBuilder.append("grammar T;\n");
|
||||
grammarBuilder.append("@members {\n");
|
||||
grammarBuilder.append("@parser::members {\n");
|
||||
grammarBuilder.append("def pred(self, v):\n");
|
||||
grammarBuilder.append(" print('eval=' + str(v).lower())\n");
|
||||
grammarBuilder.append(" return v\n");
|
||||
|
@ -392,9 +392,9 @@ public class TestSemPredEvalParser extends BasePython2Test {
|
|||
public void testPredTestedEvenWhenUnAmbig_1() throws Exception {
|
||||
mkdir(tmpdir);
|
||||
|
||||
StringBuilder grammarBuilder = new StringBuilder(184);
|
||||
StringBuilder grammarBuilder = new StringBuilder(192);
|
||||
grammarBuilder.append("grammar T;\n");
|
||||
grammarBuilder.append("@members {enumKeyword = True}\n");
|
||||
grammarBuilder.append("@parser::members {enumKeyword = True}\n");
|
||||
grammarBuilder.append("primary\n");
|
||||
grammarBuilder.append(" : ID {print(\"ID \"+$ID.text)}\n");
|
||||
grammarBuilder.append(" | {not self.enumKeyword}? 'enum' {print(\"enum\")}\n");
|
||||
|
@ -417,9 +417,9 @@ public class TestSemPredEvalParser extends BasePython2Test {
|
|||
public void testPredTestedEvenWhenUnAmbig_2() throws Exception {
|
||||
mkdir(tmpdir);
|
||||
|
||||
StringBuilder grammarBuilder = new StringBuilder(184);
|
||||
StringBuilder grammarBuilder = new StringBuilder(192);
|
||||
grammarBuilder.append("grammar T;\n");
|
||||
grammarBuilder.append("@members {enumKeyword = True}\n");
|
||||
grammarBuilder.append("@parser::members {enumKeyword = True}\n");
|
||||
grammarBuilder.append("primary\n");
|
||||
grammarBuilder.append(" : ID {print(\"ID \"+$ID.text)}\n");
|
||||
grammarBuilder.append(" | {not self.enumKeyword}? 'enum' {print(\"enum\")}\n");
|
||||
|
@ -443,9 +443,9 @@ public class TestSemPredEvalParser extends BasePython2Test {
|
|||
public void testPredicateDependentOnArg() throws Exception {
|
||||
mkdir(tmpdir);
|
||||
|
||||
StringBuilder grammarBuilder = new StringBuilder(181);
|
||||
StringBuilder grammarBuilder = new StringBuilder(189);
|
||||
grammarBuilder.append("grammar T;\n");
|
||||
grammarBuilder.append("@members {i = 0}\n");
|
||||
grammarBuilder.append("@parser::members {i = 0}\n");
|
||||
grammarBuilder.append("s : a[2] a[1];\n");
|
||||
grammarBuilder.append("a[int i]\n");
|
||||
grammarBuilder.append(" : {$i==1}? ID {print(\"alt 1\")}\n");
|
||||
|
@ -472,9 +472,9 @@ public class TestSemPredEvalParser extends BasePython2Test {
|
|||
public void testPredicateDependentOnArg2() throws Exception {
|
||||
mkdir(tmpdir);
|
||||
|
||||
StringBuilder grammarBuilder = new StringBuilder(149);
|
||||
StringBuilder grammarBuilder = new StringBuilder(157);
|
||||
grammarBuilder.append("grammar T;\n");
|
||||
grammarBuilder.append("@members {i = 0}\n");
|
||||
grammarBuilder.append("@parser::members {i = 0}\n");
|
||||
grammarBuilder.append("s : a[2] a[1];\n");
|
||||
grammarBuilder.append("a[int i]\n");
|
||||
grammarBuilder.append(" : {$i==1}? ID \n");
|
||||
|
@ -499,9 +499,9 @@ public class TestSemPredEvalParser extends BasePython2Test {
|
|||
public void testPredsInGlobalFOLLOW() throws Exception {
|
||||
mkdir(tmpdir);
|
||||
|
||||
StringBuilder grammarBuilder = new StringBuilder(263);
|
||||
StringBuilder grammarBuilder = new StringBuilder(271);
|
||||
grammarBuilder.append("grammar T;\n");
|
||||
grammarBuilder.append("@members {\n");
|
||||
grammarBuilder.append("@parser::members {\n");
|
||||
grammarBuilder.append("def pred(self, v):\n");
|
||||
grammarBuilder.append(" print('eval=' + str(v).lower())\n");
|
||||
grammarBuilder.append(" return v\n");
|
||||
|
@ -669,9 +669,9 @@ public class TestSemPredEvalParser extends BasePython2Test {
|
|||
public void testToLeftWithVaryingPredicate() throws Exception {
|
||||
mkdir(tmpdir);
|
||||
|
||||
StringBuilder grammarBuilder = new StringBuilder(228);
|
||||
StringBuilder grammarBuilder = new StringBuilder(236);
|
||||
grammarBuilder.append("grammar T;\n");
|
||||
grammarBuilder.append("@members {i = 0}\n");
|
||||
grammarBuilder.append("@parser::members {i = 0}\n");
|
||||
grammarBuilder.append("s : ({self.i += 1\n");
|
||||
grammarBuilder.append("print(\"i=\" + str(self.i))} a)+ ;\n");
|
||||
grammarBuilder.append("a : {self.i % 2 == 0}? ID {print(\"alt 1\")}\n");
|
||||
|
|
|
@ -81,9 +81,9 @@ public class TestSemPredEvalParser extends BasePython3Test {
|
|||
public void testActionHidesPreds() throws Exception {
|
||||
mkdir(tmpdir);
|
||||
|
||||
StringBuilder grammarBuilder = new StringBuilder(204);
|
||||
StringBuilder grammarBuilder = new StringBuilder(212);
|
||||
grammarBuilder.append("grammar T;\n");
|
||||
grammarBuilder.append("@members {i = 0}\n");
|
||||
grammarBuilder.append("@parser::members {i = 0}\n");
|
||||
grammarBuilder.append("s : a+ ;\n");
|
||||
grammarBuilder.append("a : {self.i = 1} ID {self.i == 1}? {print(\"alt 1\")}\n");
|
||||
grammarBuilder.append(" | {self.i = 2} ID {self.i == 2}? {print(\"alt 2\")}\n");
|
||||
|
@ -110,9 +110,9 @@ public class TestSemPredEvalParser extends BasePython3Test {
|
|||
public void testActionsHidePredsInGlobalFOLLOW() throws Exception {
|
||||
mkdir(tmpdir);
|
||||
|
||||
StringBuilder grammarBuilder = new StringBuilder(269);
|
||||
StringBuilder grammarBuilder = new StringBuilder(277);
|
||||
grammarBuilder.append("grammar T;\n");
|
||||
grammarBuilder.append("@members {\n");
|
||||
grammarBuilder.append("@parser::members {\n");
|
||||
grammarBuilder.append("def pred(self, v):\n");
|
||||
grammarBuilder.append(" print('eval=' + str(v).lower())\n");
|
||||
grammarBuilder.append(" return v\n");
|
||||
|
@ -164,9 +164,9 @@ public class TestSemPredEvalParser extends BasePython3Test {
|
|||
public void testDepedentPredsInGlobalFOLLOW() throws Exception {
|
||||
mkdir(tmpdir);
|
||||
|
||||
StringBuilder grammarBuilder = new StringBuilder(292);
|
||||
StringBuilder grammarBuilder = new StringBuilder(300);
|
||||
grammarBuilder.append("grammar T;\n");
|
||||
grammarBuilder.append("@members {\n");
|
||||
grammarBuilder.append("@parser::members {\n");
|
||||
grammarBuilder.append("def pred(self, v):\n");
|
||||
grammarBuilder.append(" print('eval=' + str(v).lower())\n");
|
||||
grammarBuilder.append(" return v\n");
|
||||
|
@ -392,9 +392,9 @@ public class TestSemPredEvalParser extends BasePython3Test {
|
|||
public void testPredTestedEvenWhenUnAmbig_1() throws Exception {
|
||||
mkdir(tmpdir);
|
||||
|
||||
StringBuilder grammarBuilder = new StringBuilder(184);
|
||||
StringBuilder grammarBuilder = new StringBuilder(192);
|
||||
grammarBuilder.append("grammar T;\n");
|
||||
grammarBuilder.append("@members {enumKeyword = True}\n");
|
||||
grammarBuilder.append("@parser::members {enumKeyword = True}\n");
|
||||
grammarBuilder.append("primary\n");
|
||||
grammarBuilder.append(" : ID {print(\"ID \"+$ID.text)}\n");
|
||||
grammarBuilder.append(" | {not self.enumKeyword}? 'enum' {print(\"enum\")}\n");
|
||||
|
@ -417,9 +417,9 @@ public class TestSemPredEvalParser extends BasePython3Test {
|
|||
public void testPredTestedEvenWhenUnAmbig_2() throws Exception {
|
||||
mkdir(tmpdir);
|
||||
|
||||
StringBuilder grammarBuilder = new StringBuilder(184);
|
||||
StringBuilder grammarBuilder = new StringBuilder(192);
|
||||
grammarBuilder.append("grammar T;\n");
|
||||
grammarBuilder.append("@members {enumKeyword = True}\n");
|
||||
grammarBuilder.append("@parser::members {enumKeyword = True}\n");
|
||||
grammarBuilder.append("primary\n");
|
||||
grammarBuilder.append(" : ID {print(\"ID \"+$ID.text)}\n");
|
||||
grammarBuilder.append(" | {not self.enumKeyword}? 'enum' {print(\"enum\")}\n");
|
||||
|
@ -443,9 +443,9 @@ public class TestSemPredEvalParser extends BasePython3Test {
|
|||
public void testPredicateDependentOnArg() throws Exception {
|
||||
mkdir(tmpdir);
|
||||
|
||||
StringBuilder grammarBuilder = new StringBuilder(181);
|
||||
StringBuilder grammarBuilder = new StringBuilder(189);
|
||||
grammarBuilder.append("grammar T;\n");
|
||||
grammarBuilder.append("@members {i = 0}\n");
|
||||
grammarBuilder.append("@parser::members {i = 0}\n");
|
||||
grammarBuilder.append("s : a[2] a[1];\n");
|
||||
grammarBuilder.append("a[int i]\n");
|
||||
grammarBuilder.append(" : {$i==1}? ID {print(\"alt 1\")}\n");
|
||||
|
@ -472,9 +472,9 @@ public class TestSemPredEvalParser extends BasePython3Test {
|
|||
public void testPredicateDependentOnArg2() throws Exception {
|
||||
mkdir(tmpdir);
|
||||
|
||||
StringBuilder grammarBuilder = new StringBuilder(149);
|
||||
StringBuilder grammarBuilder = new StringBuilder(157);
|
||||
grammarBuilder.append("grammar T;\n");
|
||||
grammarBuilder.append("@members {i = 0}\n");
|
||||
grammarBuilder.append("@parser::members {i = 0}\n");
|
||||
grammarBuilder.append("s : a[2] a[1];\n");
|
||||
grammarBuilder.append("a[int i]\n");
|
||||
grammarBuilder.append(" : {$i==1}? ID \n");
|
||||
|
@ -499,9 +499,9 @@ public class TestSemPredEvalParser extends BasePython3Test {
|
|||
public void testPredsInGlobalFOLLOW() throws Exception {
|
||||
mkdir(tmpdir);
|
||||
|
||||
StringBuilder grammarBuilder = new StringBuilder(263);
|
||||
StringBuilder grammarBuilder = new StringBuilder(271);
|
||||
grammarBuilder.append("grammar T;\n");
|
||||
grammarBuilder.append("@members {\n");
|
||||
grammarBuilder.append("@parser::members {\n");
|
||||
grammarBuilder.append("def pred(self, v):\n");
|
||||
grammarBuilder.append(" print('eval=' + str(v).lower())\n");
|
||||
grammarBuilder.append(" return v\n");
|
||||
|
@ -669,9 +669,9 @@ public class TestSemPredEvalParser extends BasePython3Test {
|
|||
public void testToLeftWithVaryingPredicate() throws Exception {
|
||||
mkdir(tmpdir);
|
||||
|
||||
StringBuilder grammarBuilder = new StringBuilder(228);
|
||||
StringBuilder grammarBuilder = new StringBuilder(236);
|
||||
grammarBuilder.append("grammar T;\n");
|
||||
grammarBuilder.append("@members {i = 0}\n");
|
||||
grammarBuilder.append("@parser::members {i = 0}\n");
|
||||
grammarBuilder.append("s : ({self.i += 1\n");
|
||||
grammarBuilder.append("print(\"i=\" + str(self.i))} a)+ ;\n");
|
||||
grammarBuilder.append("a : {self.i % 2 == 0}? ID {print(\"alt 1\")}\n");
|
||||
|
|
|
@ -94,6 +94,7 @@ func NewBaseATNConfig(c ATNConfig, state ATNState, context PredictionContext, se
|
|||
context: context,
|
||||
semanticContext: semanticContext,
|
||||
reachesIntoOuterContext: c.GetReachesIntoOuterContext(),
|
||||
precedenceFilterSuppressed: c.getPrecedenceFilterSuppressed(),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -197,6 +198,8 @@ func (b *BaseATNConfig) String() string {
|
|||
return fmt.Sprintf("(%v,%v%v%v%v)", b.state, b.alt, s1, s2, s3)
|
||||
}
|
||||
|
||||
|
||||
|
||||
type LexerATNConfig struct {
|
||||
*BaseATNConfig
|
||||
lexerActionExecutor *LexerActionExecutor
|
||||
|
|
|
@ -2,9 +2,16 @@ package antlr
|
|||
|
||||
var ATNSimulatorError = NewDFAState(0x7FFFFFFF, NewBaseATNConfigSet(false))
|
||||
|
||||
type IATNSimulator interface {
|
||||
SharedContextCache() *PredictionContextCache
|
||||
ATN() *ATN
|
||||
DecisionToDFA() []*DFA
|
||||
}
|
||||
|
||||
type BaseATNSimulator struct {
|
||||
atn *ATN
|
||||
sharedContextCache *PredictionContextCache
|
||||
decisionToDFA []*DFA
|
||||
}
|
||||
|
||||
func NewBaseATNSimulator(atn *ATN, sharedContextCache *PredictionContextCache) *BaseATNSimulator {
|
||||
|
@ -25,3 +32,15 @@ func (b *BaseATNSimulator) getCachedContext(context PredictionContext) Predictio
|
|||
|
||||
return getCachedBasePredictionContext(context, b.sharedContextCache, visited)
|
||||
}
|
||||
|
||||
func (b *BaseATNSimulator) SharedContextCache() *PredictionContextCache {
|
||||
return b.sharedContextCache
|
||||
}
|
||||
|
||||
func (b *BaseATNSimulator) ATN() *ATN {
|
||||
return b.atn
|
||||
}
|
||||
|
||||
func (b *BaseATNSimulator) DecisionToDFA() []*DFA {
|
||||
return b.decisionToDFA
|
||||
}
|
|
@ -7,10 +7,8 @@ import (
|
|||
"os"
|
||||
)
|
||||
|
||||
//
|
||||
// This is an InputStream that is loaded from a file all at once
|
||||
// when you construct the object.
|
||||
//
|
||||
|
||||
type FileStream struct {
|
||||
*InputStream
|
||||
|
|
|
@ -15,6 +15,8 @@ type Lexer interface {
|
|||
TokenSource
|
||||
Recognizer
|
||||
|
||||
Emit() Token
|
||||
|
||||
setChannel(int)
|
||||
pushMode(int)
|
||||
popMode() int
|
||||
|
@ -25,11 +27,12 @@ type Lexer interface {
|
|||
type BaseLexer struct {
|
||||
*BaseRecognizer
|
||||
|
||||
Interpreter *LexerATNSimulator
|
||||
Interpreter ILexerATNSimulator
|
||||
TokenStartCharIndex int
|
||||
TokenStartLine int
|
||||
TokenStartColumn int
|
||||
ActionType int
|
||||
Virt Lexer // The most derived lexer implementation. Allows virtual method calls.
|
||||
|
||||
input CharStream
|
||||
factory TokenFactory
|
||||
|
@ -53,6 +56,8 @@ func NewBaseLexer(input CharStream) *BaseLexer {
|
|||
lexer.factory = CommonTokenFactoryDEFAULT
|
||||
lexer.tokenFactorySourcePair = &TokenSourceCharStreamPair{lexer, input}
|
||||
|
||||
lexer.Virt = lexer
|
||||
|
||||
lexer.Interpreter = nil // child classes must populate it
|
||||
|
||||
// The goal of all lexer rules/methods is to create a token object.
|
||||
|
@ -129,7 +134,7 @@ func (b *BaseLexer) reset() {
|
|||
b.Interpreter.reset()
|
||||
}
|
||||
|
||||
func (b *BaseLexer) GetInterpreter() *LexerATNSimulator {
|
||||
func (b *BaseLexer) GetInterpreter() ILexerATNSimulator {
|
||||
return b.Interpreter
|
||||
}
|
||||
|
||||
|
@ -154,11 +159,12 @@ func (b *BaseLexer) setTokenFactory(f TokenFactory) {
|
|||
}
|
||||
|
||||
func (b *BaseLexer) safeMatch() (ret int) {
|
||||
|
||||
// previously in catch block
|
||||
defer func() {
|
||||
if e := recover(); e != nil {
|
||||
if re, ok := e.(RecognitionException); ok {
|
||||
if PortDebug {
|
||||
fmt.Println("RecognitionException")
|
||||
}
|
||||
b.notifyListeners(re) // Report error
|
||||
b.Recover(re)
|
||||
ret = LexerSkip // default
|
||||
|
@ -184,23 +190,29 @@ func (b *BaseLexer) NextToken() Token {
|
|||
b.input.Release(tokenStartMarker)
|
||||
}()
|
||||
|
||||
for true {
|
||||
for {
|
||||
if b.hitEOF {
|
||||
b.emitEOF()
|
||||
b.EmitEOF()
|
||||
return b.token
|
||||
}
|
||||
b.token = nil
|
||||
b.channel = TokenDefaultChannel
|
||||
b.TokenStartCharIndex = b.input.Index()
|
||||
b.TokenStartColumn = b.Interpreter.column
|
||||
b.TokenStartLine = b.Interpreter.line
|
||||
b.TokenStartColumn = b.Interpreter.GetCharPositionInLine()
|
||||
b.TokenStartLine = b.Interpreter.GetLine()
|
||||
b.text = ""
|
||||
var continueOuter = false
|
||||
for true {
|
||||
continueOuter := false
|
||||
for {
|
||||
b.thetype = TokenInvalidType
|
||||
var ttype = LexerSkip
|
||||
ttype := LexerSkip
|
||||
|
||||
ttype = b.safeMatch()
|
||||
if PortDebug {
|
||||
fmt.Println("ttype", ttype)
|
||||
}
|
||||
if PortDebug {
|
||||
fmt.Println("curType", b.thetype)
|
||||
}
|
||||
|
||||
if b.input.LA(1) == TokenEOF {
|
||||
b.hitEOF = true
|
||||
|
@ -210,9 +222,15 @@ func (b *BaseLexer) NextToken() Token {
|
|||
}
|
||||
if b.thetype == LexerSkip {
|
||||
continueOuter = true
|
||||
if PortDebug {
|
||||
fmt.Println("skip")
|
||||
}
|
||||
break
|
||||
}
|
||||
if b.thetype != LexerMore {
|
||||
if PortDebug {
|
||||
fmt.Println("no more")
|
||||
}
|
||||
break
|
||||
}
|
||||
if PortDebug {
|
||||
|
@ -227,7 +245,7 @@ func (b *BaseLexer) NextToken() Token {
|
|||
continue
|
||||
}
|
||||
if b.token == nil {
|
||||
b.emit()
|
||||
b.Virt.Emit()
|
||||
}
|
||||
return b.token
|
||||
}
|
||||
|
@ -246,6 +264,9 @@ func (b *BaseLexer) Skip() {
|
|||
}
|
||||
|
||||
func (b *BaseLexer) More() {
|
||||
if PortDebug {
|
||||
fmt.Println("more")
|
||||
}
|
||||
b.thetype = LexerMore
|
||||
}
|
||||
|
||||
|
@ -290,7 +311,7 @@ func (b *BaseLexer) setInputStream(input CharStream) {
|
|||
// and GetToken (to push tokens into a list and pull from that list
|
||||
// rather than a single variable as l implementation does).
|
||||
// /
|
||||
func (b *BaseLexer) emitToken(token Token) {
|
||||
func (b *BaseLexer) EmitToken(token Token) {
|
||||
b.token = token
|
||||
}
|
||||
|
||||
|
@ -300,35 +321,35 @@ func (b *BaseLexer) emitToken(token Token) {
|
|||
// use that to set the token's text. Override l method to emit
|
||||
// custom Token objects or provide a Newfactory.
|
||||
// /
|
||||
func (b *BaseLexer) emit() Token {
|
||||
func (b *BaseLexer) Emit() Token {
|
||||
if PortDebug {
|
||||
fmt.Println("emit")
|
||||
fmt.Println("emit base lexer")
|
||||
}
|
||||
var t = b.factory.Create(b.tokenFactorySourcePair, b.thetype, b.text, b.channel, b.TokenStartCharIndex, b.getCharIndex()-1, b.TokenStartLine, b.TokenStartColumn)
|
||||
b.emitToken(t)
|
||||
var t = b.factory.Create(b.tokenFactorySourcePair, b.thetype, b.text, b.channel, b.TokenStartCharIndex, b.GetCharIndex()-1, b.TokenStartLine, b.TokenStartColumn)
|
||||
b.EmitToken(t)
|
||||
return t
|
||||
}
|
||||
|
||||
func (b *BaseLexer) emitEOF() Token {
|
||||
func (b *BaseLexer) EmitEOF() Token {
|
||||
cpos := b.GetCharPositionInLine()
|
||||
lpos := b.GetLine()
|
||||
if PortDebug {
|
||||
fmt.Println("emitEOF")
|
||||
}
|
||||
var eof = b.factory.Create(b.tokenFactorySourcePair, TokenEOF, "", TokenDefaultChannel, b.input.Index(), b.input.Index()-1, lpos, cpos)
|
||||
b.emitToken(eof)
|
||||
b.EmitToken(eof)
|
||||
return eof
|
||||
}
|
||||
|
||||
func (b *BaseLexer) GetCharPositionInLine() int {
|
||||
return b.Interpreter.column
|
||||
return b.Interpreter.GetCharPositionInLine()
|
||||
}
|
||||
|
||||
func (b *BaseLexer) GetLine() int {
|
||||
return b.Interpreter.line
|
||||
return b.Interpreter.GetLine()
|
||||
}
|
||||
|
||||
func (b *BaseLexer) getType() int {
|
||||
func (b *BaseLexer) GetType() int {
|
||||
return b.thetype
|
||||
}
|
||||
|
||||
|
@ -337,7 +358,7 @@ func (b *BaseLexer) setType(t int) {
|
|||
}
|
||||
|
||||
// What is the index of the current character of lookahead?///
|
||||
func (b *BaseLexer) getCharIndex() int {
|
||||
func (b *BaseLexer) GetCharIndex() int {
|
||||
return b.input.Index()
|
||||
}
|
||||
|
||||
|
@ -356,7 +377,7 @@ func (b *BaseLexer) SetText(text string) {
|
|||
}
|
||||
|
||||
func (b *BaseLexer) GetATN() *ATN {
|
||||
return b.Interpreter.atn
|
||||
return b.Interpreter.ATN()
|
||||
}
|
||||
|
||||
// Return a list of all Token objects in input char stream.
|
||||
|
@ -366,14 +387,15 @@ func (b *BaseLexer) getAllTokens() []Token {
|
|||
if PortDebug {
|
||||
fmt.Println("getAllTokens")
|
||||
}
|
||||
var vl = b.Virt
|
||||
var tokens = make([]Token, 0)
|
||||
var t = b.NextToken()
|
||||
var t = vl.NextToken()
|
||||
for t.GetTokenType() != TokenEOF {
|
||||
tokens = append(tokens, t)
|
||||
if PortDebug {
|
||||
fmt.Println("getAllTokens")
|
||||
}
|
||||
t = b.NextToken()
|
||||
t = vl.NextToken()
|
||||
}
|
||||
return tokens
|
||||
}
|
||||
|
@ -414,7 +436,7 @@ func (b *BaseLexer) Recover(re RecognitionException) {
|
|||
if b.input.LA(1) != TokenEOF {
|
||||
if _, ok := re.(*LexerNoViableAltException); ok {
|
||||
// Skip a char and try again
|
||||
b.Interpreter.consume(b.input)
|
||||
b.Interpreter.Consume(b.input)
|
||||
} else {
|
||||
// TODO: Do we lose character or line position information?
|
||||
b.input.Consume()
|
||||
|
|
|
@ -1,5 +1,9 @@
|
|||
package antlr
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
)
|
||||
|
||||
// Represents an executor for a sequence of lexer actions which traversed during
|
||||
// the Matching operation of a lexer rule (token).
|
||||
//
|
||||
|
@ -129,6 +133,10 @@ func (l *LexerActionExecutor) fixOffsetBeforeMatch(offset int) *LexerActionExecu
|
|||
// of the token.
|
||||
// /
|
||||
func (l *LexerActionExecutor) execute(lexer Lexer, input CharStream, startIndex int) {
|
||||
if PortDebug {
|
||||
fmt.Println("execute")
|
||||
fmt.Println("len(lexerActions)", len(l.lexerActions))
|
||||
}
|
||||
var requiresSeek = false
|
||||
var stopIndex = input.Index()
|
||||
|
||||
|
@ -141,14 +149,24 @@ func (l *LexerActionExecutor) execute(lexer Lexer, input CharStream, startIndex
|
|||
for i := 0; i < len(l.lexerActions); i++ {
|
||||
var lexerAction = l.lexerActions[i]
|
||||
if la, ok := lexerAction.(*LexerIndexedCustomAction); ok {
|
||||
if PortDebug {
|
||||
fmt.Printf("LexerIndexedCustomAction")
|
||||
}
|
||||
var offset = la.offset
|
||||
input.Seek(startIndex + offset)
|
||||
lexerAction = la.lexerAction
|
||||
requiresSeek = (startIndex + offset) != stopIndex
|
||||
} else if lexerAction.getIsPositionDependent() {
|
||||
if PortDebug {
|
||||
fmt.Printf("posDep")
|
||||
}
|
||||
input.Seek(stopIndex)
|
||||
requiresSeek = false
|
||||
}
|
||||
if PortDebug {
|
||||
fmt.Println("exec")
|
||||
fmt.Println(lexerAction)
|
||||
}
|
||||
lexerAction.execute(lexer)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -5,70 +5,37 @@ import (
|
|||
"strconv"
|
||||
)
|
||||
|
||||
// When we hit an accept state in either the DFA or the ATN, we
|
||||
// have to notify the character stream to start buffering characters
|
||||
// via {@link IntStream//mark} and record the current state. The current sim state
|
||||
// includes the current index into the input, the current line,
|
||||
// and current character position in that line. Note that the Lexer is
|
||||
// tracking the starting line and characterization of the token. These
|
||||
// variables track the "state" of the simulator when it hits an accept state.
|
||||
//
|
||||
// <p>We track these variables separately for the DFA and ATN simulation
|
||||
// because the DFA simulation often has to fail over to the ATN
|
||||
// simulation. If the ATN simulation fails, we need the DFA to fall
|
||||
// back to its previously accepted state, if any. If the ATN succeeds,
|
||||
// then the ATN does the accept and the DFA simulator that invoked it
|
||||
// can simply return the predicted token type.</p>
|
||||
///
|
||||
type ILexerATNSimulator interface {
|
||||
IATNSimulator
|
||||
|
||||
func resetSimState(sim *SimState) {
|
||||
sim.index = -1
|
||||
sim.line = 0
|
||||
sim.column = -1
|
||||
sim.dfaState = nil
|
||||
}
|
||||
|
||||
type SimState struct {
|
||||
index int
|
||||
line int
|
||||
column int
|
||||
dfaState *DFAState
|
||||
}
|
||||
|
||||
func NewSimState() *SimState {
|
||||
|
||||
s := new(SimState)
|
||||
resetSimState(s)
|
||||
return s
|
||||
|
||||
}
|
||||
|
||||
func (s *SimState) reset() {
|
||||
resetSimState(s)
|
||||
reset()
|
||||
Match(input CharStream, mode int) int
|
||||
GetCharPositionInLine() int
|
||||
GetLine() int
|
||||
GetText(input CharStream) string
|
||||
Consume(input CharStream)
|
||||
}
|
||||
|
||||
type LexerATNSimulator struct {
|
||||
*BaseATNSimulator
|
||||
|
||||
recog Lexer
|
||||
predictionMode int
|
||||
DecisionToDFA []*DFA
|
||||
mergeCache DoubleDict
|
||||
startIndex int
|
||||
line int
|
||||
column int
|
||||
mode int
|
||||
prevAccept *SimState
|
||||
MatchCalls int
|
||||
recog Lexer
|
||||
predictionMode int
|
||||
mergeCache DoubleDict
|
||||
startIndex int
|
||||
Line int
|
||||
CharPositionInLine int
|
||||
mode int
|
||||
prevAccept *SimState
|
||||
MatchCalls int
|
||||
}
|
||||
|
||||
func NewLexerATNSimulator(recog Lexer, atn *ATN, decisionToDFA []*DFA, sharedContextCache *PredictionContextCache) *LexerATNSimulator {
|
||||
|
||||
l := new(LexerATNSimulator)
|
||||
|
||||
l.BaseATNSimulator = NewBaseATNSimulator(atn, sharedContextCache)
|
||||
|
||||
l.DecisionToDFA = decisionToDFA
|
||||
l.decisionToDFA = decisionToDFA
|
||||
l.recog = recog
|
||||
// The current token's starting index into the character stream.
|
||||
// Shared across DFA to ATN simulation in case the ATN fails and the
|
||||
|
@ -76,10 +43,10 @@ func NewLexerATNSimulator(recog Lexer, atn *ATN, decisionToDFA []*DFA, sharedCon
|
|||
// ATN-generated exception object.
|
||||
l.startIndex = -1
|
||||
// line number 1..n within the input///
|
||||
l.line = 1
|
||||
l.Line = 1
|
||||
// The index of the character relative to the beginning of the line
|
||||
// 0..n-1///
|
||||
l.column = 0
|
||||
l.CharPositionInLine = 0
|
||||
l.mode = LexerDefaultMode
|
||||
// Used during DFA/ATN exec to record the most recent accept configuration
|
||||
// info
|
||||
|
@ -97,14 +64,13 @@ var LexerATNSimulatorMaxDFAEdge = 127 // forces unicode to stay in ATN
|
|||
var LexerATNSimulatorMatchCalls = 0
|
||||
|
||||
func (l *LexerATNSimulator) copyState(simulator *LexerATNSimulator) {
|
||||
l.column = simulator.column
|
||||
l.line = simulator.line
|
||||
l.CharPositionInLine = simulator.CharPositionInLine
|
||||
l.Line = simulator.Line
|
||||
l.mode = simulator.mode
|
||||
l.startIndex = simulator.startIndex
|
||||
}
|
||||
|
||||
func (l *LexerATNSimulator) Match(input CharStream, mode int) int {
|
||||
|
||||
if PortDebug {
|
||||
fmt.Println("Match")
|
||||
}
|
||||
|
@ -123,7 +89,7 @@ func (l *LexerATNSimulator) Match(input CharStream, mode int) int {
|
|||
l.startIndex = input.Index()
|
||||
l.prevAccept.reset()
|
||||
|
||||
var dfa = l.DecisionToDFA[mode]
|
||||
var dfa = l.decisionToDFA[mode]
|
||||
|
||||
if dfa.s0 == nil {
|
||||
if PortDebug {
|
||||
|
@ -134,6 +100,7 @@ func (l *LexerATNSimulator) Match(input CharStream, mode int) int {
|
|||
|
||||
if PortDebug {
|
||||
fmt.Println("execATN")
|
||||
fmt.Println("mode", mode, len(l.decisionToDFA[0].s0.edges))
|
||||
}
|
||||
|
||||
return l.execATN(input, dfa.s0)
|
||||
|
@ -142,8 +109,8 @@ func (l *LexerATNSimulator) Match(input CharStream, mode int) int {
|
|||
func (l *LexerATNSimulator) reset() {
|
||||
l.prevAccept.reset()
|
||||
l.startIndex = -1
|
||||
l.line = 1
|
||||
l.column = 0
|
||||
l.Line = 1
|
||||
l.CharPositionInLine = 0
|
||||
l.mode = LexerDefaultMode
|
||||
}
|
||||
|
||||
|
@ -161,13 +128,13 @@ func (l *LexerATNSimulator) MatchATN(input CharStream) int {
|
|||
var next = l.addDFAState(s0Closure)
|
||||
|
||||
if !suppressEdge {
|
||||
l.DecisionToDFA[l.mode].s0 = next
|
||||
l.decisionToDFA[l.mode].s0 = next
|
||||
}
|
||||
|
||||
var predict = l.execATN(input, next)
|
||||
|
||||
if LexerATNSimulatorDebug {
|
||||
fmt.Println("DFA after MatchATN: " + l.DecisionToDFA[oldMode].ToLexerString())
|
||||
fmt.Println("DFA after MatchATN: " + l.decisionToDFA[oldMode].ToLexerString())
|
||||
}
|
||||
return predict
|
||||
}
|
||||
|
@ -184,7 +151,16 @@ func (l *LexerATNSimulator) execATN(input CharStream, ds0 *DFAState) int {
|
|||
var t = input.LA(1)
|
||||
var s = ds0 // s is current/from DFA state
|
||||
|
||||
for true { // while more work
|
||||
if PortDebug {
|
||||
fs,ok := input.(*FileStream)
|
||||
if ok {
|
||||
fmt.Println("enter execATN", t, len(s.edges), fs.index, fs.size)
|
||||
} else {
|
||||
fmt.Println("enter execATN", t, len(s.edges))
|
||||
}
|
||||
}
|
||||
|
||||
for { // while more work
|
||||
if LexerATNSimulatorDebug {
|
||||
fmt.Println("execATN loop starting closure: " + s.configs.String())
|
||||
}
|
||||
|
@ -206,11 +182,11 @@ func (l *LexerATNSimulator) execATN(input CharStream, ds0 *DFAState) int {
|
|||
// This optimization makes a lot of sense for loops within DFA.
|
||||
// A character will take us back to an existing DFA state
|
||||
// that already has lots of edges out of it. e.g., .* in comments.
|
||||
// print("Target for:" + str(s) + " and:" + str(t))
|
||||
var target = l.getExistingTargetState(s, t)
|
||||
// if PortDebug {
|
||||
// fmt.Println(target)
|
||||
// }
|
||||
if PortDebug {
|
||||
fmt.Println(t)
|
||||
fmt.Println(target != nil)
|
||||
}
|
||||
if target == nil {
|
||||
target = l.computeTargetState(input, s, t)
|
||||
// print("Computed:" + str(target))
|
||||
|
@ -223,7 +199,10 @@ func (l *LexerATNSimulator) execATN(input CharStream, ds0 *DFAState) int {
|
|||
// position accurately reflect the state of the interpreter at the
|
||||
// end of the token.
|
||||
if t != TokenEOF {
|
||||
l.consume(input)
|
||||
if PortDebug {
|
||||
fmt.Println("consume", t, TokenEOF)
|
||||
}
|
||||
l.Consume(input)
|
||||
}
|
||||
if target.isAcceptState {
|
||||
l.captureSimState(l.prevAccept, input, target)
|
||||
|
@ -256,9 +235,10 @@ func (l *LexerATNSimulator) getExistingTargetState(s *DFAState, t int) *DFAState
|
|||
}
|
||||
|
||||
var target = s.edges[t-LexerATNSimulatorMinDFAEdge]
|
||||
if target == nil {
|
||||
target = nil
|
||||
if PortDebug {
|
||||
fmt.Println("len edges", len(s.edges), t, t-LexerATNSimulatorMinDFAEdge)
|
||||
}
|
||||
|
||||
if LexerATNSimulatorDebug && target != nil {
|
||||
fmt.Println("reuse state " + strconv.Itoa(s.stateNumber) + " edge to " + strconv.Itoa(target.stateNumber))
|
||||
}
|
||||
|
@ -297,6 +277,9 @@ func (l *LexerATNSimulator) computeTargetState(input CharStream, s *DFAState, t
|
|||
|
||||
func (l *LexerATNSimulator) failOrAccept(prevAccept *SimState, input CharStream, reach ATNConfigSet, t int) int {
|
||||
if l.prevAccept.dfaState != nil {
|
||||
if PortDebug {
|
||||
fmt.Println(prevAccept.dfaState)
|
||||
}
|
||||
var lexerActionExecutor = prevAccept.dfaState.lexerActionExecutor
|
||||
l.accept(input, lexerActionExecutor, l.startIndex, prevAccept.index, prevAccept.line, prevAccept.column)
|
||||
|
||||
|
@ -323,6 +306,7 @@ func (l *LexerATNSimulator) getReachableConfigSet(input CharStream, closure ATNC
|
|||
var SkipAlt = ATNInvalidAltNumber
|
||||
|
||||
if PortDebug {
|
||||
fmt.Println("getReachableConfigSet")
|
||||
fmt.Println("CLOSURE SIZE" + strconv.Itoa(len(closure.GetItems())))
|
||||
}
|
||||
|
||||
|
@ -363,8 +347,8 @@ func (l *LexerATNSimulator) accept(input CharStream, lexerActionExecutor *LexerA
|
|||
}
|
||||
// seek to after last char in token
|
||||
input.Seek(index)
|
||||
l.line = line
|
||||
l.column = charPos
|
||||
l.Line = line
|
||||
l.CharPositionInLine = charPos
|
||||
if lexerActionExecutor != nil && l.recog != nil {
|
||||
lexerActionExecutor.execute(l.recog, input, startIndex)
|
||||
}
|
||||
|
@ -562,26 +546,29 @@ func (l *LexerATNSimulator) evaluatePredicate(input CharStream, ruleIndex, predI
|
|||
if !speculative {
|
||||
return l.recog.Sempred(nil, ruleIndex, predIndex)
|
||||
}
|
||||
var savedcolumn = l.column
|
||||
var savedLine = l.line
|
||||
var savedcolumn = l.CharPositionInLine
|
||||
var savedLine = l.Line
|
||||
var index = input.Index()
|
||||
var marker = input.Mark()
|
||||
|
||||
defer func() {
|
||||
l.column = savedcolumn
|
||||
l.line = savedLine
|
||||
l.CharPositionInLine = savedcolumn
|
||||
l.Line = savedLine
|
||||
input.Seek(index)
|
||||
input.Release(marker)
|
||||
}()
|
||||
|
||||
l.consume(input)
|
||||
if PortDebug {
|
||||
fmt.Println("evalPred")
|
||||
}
|
||||
l.Consume(input)
|
||||
return l.recog.Sempred(nil, ruleIndex, predIndex)
|
||||
}
|
||||
|
||||
func (l *LexerATNSimulator) captureSimState(settings *SimState, input CharStream, dfaState *DFAState) {
|
||||
settings.index = input.Index()
|
||||
settings.line = l.line
|
||||
settings.column = l.column
|
||||
settings.line = l.Line
|
||||
settings.column = l.CharPositionInLine
|
||||
settings.dfaState = dfaState
|
||||
}
|
||||
|
||||
|
@ -648,7 +635,7 @@ func (l *LexerATNSimulator) addDFAState(configs ATNConfigSet) *DFAState {
|
|||
proposed.setPrediction(l.atn.ruleToTokenType[firstConfigWithRuleStopState.GetState().GetRuleIndex()])
|
||||
}
|
||||
var hash = proposed.Hash()
|
||||
var dfa = l.DecisionToDFA[l.mode]
|
||||
var dfa = l.decisionToDFA[l.mode]
|
||||
var existing = dfa.GetStates()[hash]
|
||||
if existing != nil {
|
||||
return existing
|
||||
|
@ -662,7 +649,7 @@ func (l *LexerATNSimulator) addDFAState(configs ATNConfigSet) *DFAState {
|
|||
}
|
||||
|
||||
func (l *LexerATNSimulator) getDFA(mode int) *DFA {
|
||||
return l.DecisionToDFA[mode]
|
||||
return l.decisionToDFA[mode]
|
||||
}
|
||||
|
||||
// Get the text Matched so far for the current token.
|
||||
|
@ -671,17 +658,28 @@ func (l *LexerATNSimulator) GetText(input CharStream) string {
|
|||
return input.GetTextFromInterval(NewInterval(l.startIndex, input.Index()-1))
|
||||
}
|
||||
|
||||
func (l *LexerATNSimulator) consume(input CharStream) {
|
||||
func (l *LexerATNSimulator) Consume(input CharStream) {
|
||||
if PortDebug {
|
||||
fmt.Println("consume", input.Index(), input.Size())
|
||||
}
|
||||
var curChar = input.LA(1)
|
||||
if curChar == int('\n') {
|
||||
l.line++
|
||||
l.column = 0
|
||||
l.Line++
|
||||
l.CharPositionInLine = 0
|
||||
} else {
|
||||
l.column++
|
||||
l.CharPositionInLine++
|
||||
}
|
||||
input.Consume()
|
||||
}
|
||||
|
||||
func (l *LexerATNSimulator) GetCharPositionInLine() int {
|
||||
return l.CharPositionInLine
|
||||
}
|
||||
|
||||
func (l *LexerATNSimulator) GetLine() int {
|
||||
return l.Line
|
||||
}
|
||||
|
||||
func (l *LexerATNSimulator) GetTokenName(tt int) string {
|
||||
if PortDebug {
|
||||
fmt.Println(tt)
|
||||
|
@ -692,3 +690,27 @@ func (l *LexerATNSimulator) GetTokenName(tt int) string {
|
|||
|
||||
return "'" + string(tt) + "'"
|
||||
}
|
||||
|
||||
func resetSimState(sim *SimState) {
|
||||
sim.index = -1
|
||||
sim.line = 0
|
||||
sim.column = -1
|
||||
sim.dfaState = nil
|
||||
}
|
||||
|
||||
type SimState struct {
|
||||
index int
|
||||
line int
|
||||
column int
|
||||
dfaState *DFAState
|
||||
}
|
||||
|
||||
func NewSimState() *SimState {
|
||||
s := new(SimState)
|
||||
resetSimState(s)
|
||||
return s
|
||||
}
|
||||
|
||||
func (s *SimState) reset() {
|
||||
resetSimState(s)
|
||||
}
|
||||
|
|
|
@ -691,13 +691,13 @@ func (p *BaseParser) GetRuleInvocationStack(c ParserRuleContext) []string {
|
|||
|
||||
// For debugging and other purposes.//
|
||||
func (p *BaseParser) GetDFAStrings() string {
|
||||
return fmt.Sprint(p.Interpreter.DecisionToDFA)
|
||||
return fmt.Sprint(p.Interpreter.decisionToDFA)
|
||||
}
|
||||
|
||||
// For debugging and other purposes.//
|
||||
func (p *BaseParser) DumpDFA() {
|
||||
var seenOne = false
|
||||
for _, dfa := range p.Interpreter.DecisionToDFA {
|
||||
for _, dfa := range p.Interpreter.decisionToDFA {
|
||||
if len(dfa.GetStates()) > 0 {
|
||||
if seenOne {
|
||||
fmt.Println()
|
||||
|
|
|
@ -14,7 +14,6 @@ type ParserATNSimulator struct {
|
|||
input TokenStream
|
||||
startIndex int
|
||||
dfa *DFA
|
||||
DecisionToDFA []*DFA
|
||||
mergeCache *DoubleDict
|
||||
outerContext ParserRuleContext
|
||||
}
|
||||
|
@ -26,7 +25,7 @@ func NewParserATNSimulator(parser Parser, atn *ATN, decisionToDFA []*DFA, shared
|
|||
p.BaseATNSimulator = NewBaseATNSimulator(atn, sharedContextCache)
|
||||
|
||||
p.parser = parser
|
||||
p.DecisionToDFA = decisionToDFA
|
||||
p.decisionToDFA = decisionToDFA
|
||||
// SLL, LL, or LL + exact ambig detection?//
|
||||
p.predictionMode = PredictionModeLL
|
||||
// LAME globals to avoid parameters!!!!! I need these down deep in predTransition
|
||||
|
@ -81,7 +80,7 @@ func (p *ParserATNSimulator) AdaptivePredict(input TokenStream, decision int, ou
|
|||
p.startIndex = input.Index()
|
||||
p.outerContext = outerContext
|
||||
|
||||
var dfa = p.DecisionToDFA[decision]
|
||||
var dfa = p.decisionToDFA[decision]
|
||||
p.dfa = dfa
|
||||
var m = input.Mark()
|
||||
var index = input.Index()
|
||||
|
@ -136,7 +135,13 @@ func (p *ParserATNSimulator) AdaptivePredict(input TokenStream, decision int, ou
|
|||
// appropriate start state for the precedence level rather
|
||||
// than simply setting DFA.s0.
|
||||
//
|
||||
if PortDebug {
|
||||
fmt.Println("precfilter", s0Closure)
|
||||
}
|
||||
s0Closure = p.applyPrecedenceFilter(s0Closure)
|
||||
if PortDebug {
|
||||
fmt.Println("precfilter", s0Closure)
|
||||
}
|
||||
s0 = p.addDFAState(dfa, NewDFAState(-1, s0Closure))
|
||||
dfa.setPrecedenceStartState(p.parser.GetPrecedence(), s0)
|
||||
} else {
|
||||
|
@ -196,7 +201,7 @@ func (p *ParserATNSimulator) execATN(dfa *DFA, s0 *DFAState, input TokenStream,
|
|||
fmt.Println("s0 = " + s0.String())
|
||||
}
|
||||
var t = input.LA(1)
|
||||
for true { // for more work
|
||||
for { // for more work
|
||||
var D = p.getExistingTargetState(previousD, t)
|
||||
if D == nil {
|
||||
D = p.computeTargetState(dfa, previousD, t)
|
||||
|
@ -391,7 +396,7 @@ func (p *ParserATNSimulator) execATNWithFullContext(dfa *DFA, D *DFAState, s0 AT
|
|||
var t = input.LA(1)
|
||||
var predictedAlt = -1
|
||||
|
||||
for true { // for more work
|
||||
for { // for more work
|
||||
reach = p.computeReachSet(previous, t, fullCtx)
|
||||
if reach == nil {
|
||||
// if any configs in previous dipped into outer context, that
|
||||
|
@ -737,9 +742,18 @@ func (p *ParserATNSimulator) applyPrecedenceFilter(configs ATNConfigSet) ATNConf
|
|||
var statesFromAlt1 = make(map[int]PredictionContext)
|
||||
var configSet = NewBaseATNConfigSet(configs.FullContext())
|
||||
|
||||
if PortDebug {
|
||||
fmt.Println("len", len(configs.GetItems()))
|
||||
for _, config := range configs.GetItems() {
|
||||
fmt.Println(config.getPrecedenceFilterSuppressed())
|
||||
}
|
||||
}
|
||||
for _, config := range configs.GetItems() {
|
||||
// handle alt 1 first
|
||||
if config.GetAlt() != 1 {
|
||||
if PortDebug {
|
||||
fmt.Println("getalt1")
|
||||
}
|
||||
continue
|
||||
}
|
||||
var updatedContext = config.GetSemanticContext().evalPrecedence(p.parser, p.outerContext)
|
||||
|
@ -749,26 +763,42 @@ func (p *ParserATNSimulator) applyPrecedenceFilter(configs ATNConfigSet) ATNConf
|
|||
}
|
||||
statesFromAlt1[config.GetState().GetStateNumber()] = config.GetContext()
|
||||
if updatedContext != config.GetSemanticContext() {
|
||||
if PortDebug {
|
||||
fmt.Println("add1")
|
||||
}
|
||||
configSet.Add(NewBaseATNConfig2(config, updatedContext), p.mergeCache)
|
||||
} else {
|
||||
if PortDebug {
|
||||
fmt.Println("add2")
|
||||
}
|
||||
configSet.Add(config, p.mergeCache)
|
||||
}
|
||||
}
|
||||
for _, config := range configs.GetItems() {
|
||||
|
||||
if config.GetAlt() == 1 {
|
||||
// already handled
|
||||
if PortDebug {
|
||||
fmt.Println("getalt2")
|
||||
}
|
||||
continue
|
||||
}
|
||||
// In the future, p elimination step could be updated to also
|
||||
// filter the prediction context for alternatives predicting alt>1
|
||||
// (basically a graph subtraction algorithm).
|
||||
if !config.getPrecedenceFilterSuppressed() {
|
||||
if PortDebug {
|
||||
fmt.Println("!getPrecedenceFilterSuppressed")
|
||||
}
|
||||
var context = statesFromAlt1[config.GetState().GetStateNumber()]
|
||||
if context != nil && context.equals(config.GetContext()) {
|
||||
// eliminated
|
||||
continue
|
||||
}
|
||||
}
|
||||
if PortDebug {
|
||||
fmt.Println("add3", config.getPrecedenceFilterSuppressed())
|
||||
}
|
||||
configSet.Add(config, p.mergeCache)
|
||||
}
|
||||
return configSet
|
||||
|
@ -1073,7 +1103,7 @@ func (p *ParserATNSimulator) closureWork(config ATNConfig, configs ATNConfigSet,
|
|||
|
||||
if PortDebug {
|
||||
fmt.Println("DEBUG 2")
|
||||
fmt.Println(closureBusy)
|
||||
fmt.Println(closureBusy.String())
|
||||
}
|
||||
// target fell off end of rule mark resulting c as having dipped into outer context
|
||||
// We can't get here if incoming config was rule stop and we had context
|
||||
|
@ -1083,7 +1113,7 @@ func (p *ParserATNSimulator) closureWork(config ATNConfig, configs ATNConfigSet,
|
|||
|
||||
if closureBusy.add(c) != c {
|
||||
if PortDebug {
|
||||
fmt.Println("DEBUG 3")
|
||||
fmt.Println("DEBUG 3", i, len(state.GetTransitions()))
|
||||
}
|
||||
// avoid infinite recursion for right-recursive rules
|
||||
continue
|
||||
|
@ -1099,6 +1129,9 @@ func (p *ParserATNSimulator) closureWork(config ATNConfig, configs ATNConfigSet,
|
|||
fmt.Println("DEBUG 4")
|
||||
}
|
||||
if t.(*EpsilonTransition).outermostPrecedenceReturn == p.dfa.atnStartState.GetRuleIndex() {
|
||||
if PortDebug {
|
||||
fmt.Println("setPrecedenceFilterSuppressed")
|
||||
}
|
||||
c.setPrecedenceFilterSuppressed(true)
|
||||
}
|
||||
}
|
||||
|
@ -1115,9 +1148,15 @@ func (p *ParserATNSimulator) closureWork(config ATNConfig, configs ATNConfigSet,
|
|||
newDepth++
|
||||
}
|
||||
}
|
||||
if PortDebug {
|
||||
fmt.Println("closureCheckingStopState")
|
||||
}
|
||||
p.closureCheckingStopState(c, configs, closureBusy, continueCollecting, fullCtx, newDepth, treatEOFAsEpsilon)
|
||||
}
|
||||
}
|
||||
if PortDebug {
|
||||
fmt.Println("closureWork done")
|
||||
}
|
||||
}
|
||||
|
||||
func (p *ParserATNSimulator) getRuleName(index int) string {
|
||||
|
|
|
@ -152,9 +152,18 @@ Lexer.prototype.nextToken = function() {
|
|||
try {
|
||||
ttype = this._interp.match(this._input, this._mode);
|
||||
} catch (e) {
|
||||
if (PORT_DEBUG) {
|
||||
console.log("RecognitionException")
|
||||
}
|
||||
this.notifyListeners(e); // report error
|
||||
this.recover(e);
|
||||
}
|
||||
if (PORT_DEBUG) {
|
||||
console.log("ttype", ttype)
|
||||
}
|
||||
if (PORT_DEBUG) {
|
||||
console.log("curType", this._type)
|
||||
}
|
||||
if (this._input.LA(1) === Token.EOF) {
|
||||
this._hitEOF = true;
|
||||
}
|
||||
|
@ -162,10 +171,16 @@ Lexer.prototype.nextToken = function() {
|
|||
this._type = ttype;
|
||||
}
|
||||
if (this._type === Lexer.SKIP) {
|
||||
if (PORT_DEBUG) {
|
||||
console.log("skip")
|
||||
}
|
||||
continueOuter = true;
|
||||
break;
|
||||
}
|
||||
if (this._type !== Lexer.MORE) {
|
||||
if (PORT_DEBUG) {
|
||||
console.log("no more")
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
|
@ -204,6 +219,9 @@ Lexer.prototype.skip = function() {
|
|||
};
|
||||
|
||||
Lexer.prototype.more = function() {
|
||||
if (PORT_DEBUG) {
|
||||
console.log("more")
|
||||
}
|
||||
this._type = Lexer.MORE;
|
||||
};
|
||||
|
||||
|
|
|
@ -165,7 +165,7 @@ Object.defineProperty(Recognizer.prototype, "state", {
|
|||
},
|
||||
set : function(state) {
|
||||
if (PORT_DEBUG) {
|
||||
console.log("SETTING STATE" + state + " from " + this._stateNumber )
|
||||
console.log("SETTING STATE " + state + " from " + this._stateNumber )
|
||||
}
|
||||
this._stateNumber = state;
|
||||
}
|
||||
|
|
|
@ -138,6 +138,19 @@ LexerATNSimulator.prototype.match = function(input, mode) {
|
|||
} else {
|
||||
if (PORT_DEBUG) {
|
||||
console.log("execATN")
|
||||
if (this.decisionToDFA[mode] && this.decisionToDFA[mode].s0){
|
||||
|
||||
var s = "";
|
||||
// for (var i= 0; i < this.decisionToDFA[mode].s0.edges.length; i++) {
|
||||
// if (this.decisionToDFA[mode].s0.edges[i]){
|
||||
// s += this.decisionToDFA[mode].s0.edges[i].toString();
|
||||
// } else {
|
||||
// s += "<nil>";
|
||||
// }
|
||||
// }
|
||||
// s += "]";
|
||||
console.log("mode", mode, this.decisionToDFA[mode].s0.edges.length)
|
||||
}
|
||||
}
|
||||
var res = this.execATN(input, dfa.s0);
|
||||
return res;
|
||||
|
@ -193,6 +206,10 @@ LexerATNSimulator.prototype.execATN = function(input, ds0) {
|
|||
var t = input.LA(1);
|
||||
var s = ds0; // s is current/from DFA state
|
||||
|
||||
if (PORT_DEBUG) {
|
||||
console.log("enter execATN", t, s.edges ? s.edges.length : 0, input.index, input.size)
|
||||
}
|
||||
|
||||
while (true) { // while more work
|
||||
if (this.debug) {
|
||||
console.log("execATN loop starting closure: " + s.configs);
|
||||
|
@ -217,9 +234,10 @@ LexerATNSimulator.prototype.execATN = function(input, ds0) {
|
|||
// that already has lots of edges out of it. e.g., .* in comments.
|
||||
// print("Target for:" + str(s) + " and:" + str(t))
|
||||
var target = this.getExistingTargetState(s, t);
|
||||
// if (PORT_DEBUG) {
|
||||
// console.log(target)
|
||||
// }
|
||||
if (PORT_DEBUG) {
|
||||
console.log(t)
|
||||
console.log(target != null)
|
||||
}
|
||||
|
||||
if (target === null) {
|
||||
target = this.computeTargetState(input, s, t);
|
||||
|
@ -233,6 +251,9 @@ LexerATNSimulator.prototype.execATN = function(input, ds0) {
|
|||
// position accurately reflect the state of the interpreter at the
|
||||
// end of the token.
|
||||
if (t !== Token.EOF) {
|
||||
if (PORT_DEBUG) {
|
||||
console.log("not eof", t, Token.EOF)
|
||||
}
|
||||
this.consume(input);
|
||||
}
|
||||
if (target.isAcceptState) {
|
||||
|
@ -269,6 +290,9 @@ LexerATNSimulator.prototype.getExistingTargetState = function(s, t) {
|
|||
if(target===undefined) {
|
||||
target = null;
|
||||
}
|
||||
if (PORT_DEBUG) {
|
||||
console.log("len edges", s.edges.length, t, t - LexerATNSimulator.MIN_DFA_EDGE)
|
||||
}
|
||||
if (this.debug && target !== null) {
|
||||
console.log("reuse state " + s.stateNumber + " edge to " + target.stateNumber);
|
||||
}
|
||||
|
@ -307,8 +331,10 @@ LexerATNSimulator.prototype.computeTargetState = function(input, s, t) {
|
|||
LexerATNSimulator.prototype.failOrAccept = function(prevAccept, input, reach, t) {
|
||||
|
||||
if (this.prevAccept.dfaState !== null) {
|
||||
|
||||
var lexerActionExecutor = prevAccept.dfaState.lexerActionExecutor;
|
||||
if (PORT_DEBUG) {
|
||||
console.log(prevAccept.dfaState.toString())
|
||||
}
|
||||
this.accept(input, lexerActionExecutor, this.startIndex,
|
||||
prevAccept.index, prevAccept.line, prevAccept.column);
|
||||
|
||||
|
@ -336,6 +362,7 @@ LexerATNSimulator.prototype.getReachableConfigSet = function(input, closure,
|
|||
var skipAlt = ATN.INVALID_ALT_NUMBER;
|
||||
|
||||
if (PORT_DEBUG) {
|
||||
console.log("getReachableConfigSet")
|
||||
console.log("CLOSURE SIZE" + closure.items.length)
|
||||
}
|
||||
|
||||
|
@ -575,6 +602,9 @@ LexerATNSimulator.prototype.evaluatePredicate = function(input, ruleIndex,
|
|||
var index = input.index;
|
||||
var marker = input.mark();
|
||||
try {
|
||||
if (PORT_DEBUG) {
|
||||
console.log("evalPred")
|
||||
}
|
||||
this.consume(input);
|
||||
return this.recog.sempred(null, ruleIndex, predIndex);
|
||||
} finally {
|
||||
|
@ -681,6 +711,9 @@ LexerATNSimulator.prototype.getText = function(input) {
|
|||
};
|
||||
|
||||
LexerATNSimulator.prototype.consume = function(input) {
|
||||
if (PORT_DEBUG) {
|
||||
console.log("consume", input.index, input.size);
|
||||
}
|
||||
var curChar = input.LA(1);
|
||||
if (curChar === "\n".charCodeAt(0)) {
|
||||
this.line += 1;
|
||||
|
|
|
@ -134,20 +134,34 @@ LexerActionExecutor.prototype.fixOffsetBeforeMatch = function(offset) {
|
|||
// of the token.
|
||||
// /
|
||||
LexerActionExecutor.prototype.execute = function(lexer, input, startIndex) {
|
||||
if (PORT_DEBUG) {
|
||||
console.log("execute");
|
||||
console.log("len(lexerActions)",this.lexerActions.length);
|
||||
}
|
||||
var requiresSeek = false;
|
||||
var stopIndex = input.index;
|
||||
try {
|
||||
for (var i = 0; i < this.lexerActions.length; i++) {
|
||||
var lexerAction = this.lexerActions[i];
|
||||
if (lexerAction instanceof LexerIndexedCustomAction) {
|
||||
if (PORT_DEBUG) {
|
||||
console.log("LexerIndexedCustomAction");
|
||||
}
|
||||
var offset = lexerAction.offset;
|
||||
input.seek(startIndex + offset);
|
||||
lexerAction = lexerAction.action;
|
||||
requiresSeek = (startIndex + offset) !== stopIndex;
|
||||
} else if (lexerAction.isPositionDependent) {
|
||||
if (PORT_DEBUG) {
|
||||
console.log("posDep");
|
||||
}
|
||||
input.seek(stopIndex);
|
||||
requiresSeek = false;
|
||||
}
|
||||
if (PORT_DEBUG) {
|
||||
console.log("exec");
|
||||
console.log(lexerAction.toString());
|
||||
}
|
||||
lexerAction.execute(lexer);
|
||||
}
|
||||
} finally {
|
||||
|
|
|
@ -383,7 +383,13 @@ ParserATNSimulator.prototype.adaptivePredict = function(input, decision, outerCo
|
|||
// appropriate start state for the precedence level rather
|
||||
// than simply setting DFA.s0.
|
||||
//
|
||||
if (PORT_DEBUG){
|
||||
console.log("precfilter", s0_closure.toString())
|
||||
}
|
||||
s0_closure = this.applyPrecedenceFilter(s0_closure);
|
||||
if (PORT_DEBUG){
|
||||
console.log("precfilter", s0_closure.toString())
|
||||
}
|
||||
s0 = this.addDFAState(dfa, new DFAState(null, s0_closure));
|
||||
dfa.setPrecedenceStartState(this.parser.getPrecedence(), s0);
|
||||
} else {
|
||||
|
@ -979,10 +985,20 @@ ParserATNSimulator.prototype.applyPrecedenceFilter = function(configs) {
|
|||
var config;
|
||||
var statesFromAlt1 = [];
|
||||
var configSet = new ATNConfigSet(configs.fullCtx);
|
||||
if (PORT_DEBUG) {
|
||||
console.log("len", configs.items.length)
|
||||
for(var i=0; i<configs.items.length; i++) {
|
||||
config = configs.items[i];
|
||||
console.log(config.precedenceFilterSuppressed)
|
||||
}
|
||||
}
|
||||
for(var i=0; i<configs.items.length; i++) {
|
||||
config = configs.items[i];
|
||||
// handle alt 1 first
|
||||
if (config.alt !== 1) {
|
||||
if (PORT_DEBUG) {
|
||||
console.log("getalt1")
|
||||
}
|
||||
continue;
|
||||
}
|
||||
var updatedContext = config.semanticContext.evalPrecedence(this.parser, this._outerContext);
|
||||
|
@ -992,8 +1008,14 @@ ParserATNSimulator.prototype.applyPrecedenceFilter = function(configs) {
|
|||
}
|
||||
statesFromAlt1[config.state.stateNumber] = config.context;
|
||||
if (updatedContext !== config.semanticContext) {
|
||||
if (PORT_DEBUG) {
|
||||
console.log("add 1")
|
||||
}
|
||||
configSet.add(new ATNConfig({semanticContext:updatedContext}, config), this.mergeCache);
|
||||
} else {
|
||||
if (PORT_DEBUG) {
|
||||
console.log("add 2")
|
||||
}
|
||||
configSet.add(config, this.mergeCache);
|
||||
}
|
||||
}
|
||||
|
@ -1001,18 +1023,27 @@ ParserATNSimulator.prototype.applyPrecedenceFilter = function(configs) {
|
|||
config = configs.items[i];
|
||||
if (config.alt === 1) {
|
||||
// already handled
|
||||
if (PORT_DEBUG) {
|
||||
console.log("getalt2")
|
||||
}
|
||||
continue;
|
||||
}
|
||||
// In the future, this elimination step could be updated to also
|
||||
// filter the prediction context for alternatives predicting alt>1
|
||||
// (basically a graph subtraction algorithm).
|
||||
if (!config.precedenceFilterSuppressed) {
|
||||
if (PORT_DEBUG) {
|
||||
console.log("!precedenceFilterSuppressed")
|
||||
}
|
||||
var context = statesFromAlt1[config.state.stateNumber] || null;
|
||||
if (context!==null && context.equals(config.context)) {
|
||||
// eliminated
|
||||
continue;
|
||||
}
|
||||
}
|
||||
if (PORT_DEBUG) {
|
||||
console.log("add 3", config.precedenceFilterSuppressed)
|
||||
}
|
||||
configSet.add(config, this.mergeCache);
|
||||
}
|
||||
return configSet;
|
||||
|
@ -1327,6 +1358,7 @@ ParserATNSimulator.prototype.closure_ = function(config, configs, closureBusy, c
|
|||
|
||||
if (PORT_DEBUG) {
|
||||
console.log("DEBUG 2")
|
||||
console.log(closureBusy.toString())
|
||||
}
|
||||
// target fell off end of rule; mark resulting c as having dipped into outer context
|
||||
// We can't get here if incoming config was rule stop and we had context
|
||||
|
@ -1336,7 +1368,7 @@ ParserATNSimulator.prototype.closure_ = function(config, configs, closureBusy, c
|
|||
|
||||
if (closureBusy.add(c)!==c) {
|
||||
if (PORT_DEBUG) {
|
||||
console.log("DEBUG 3")
|
||||
console.log("DEBUG 3", i, p.transitions.length)
|
||||
}
|
||||
// avoid infinite recursion for right-recursive rules
|
||||
continue;
|
||||
|
@ -1352,6 +1384,9 @@ ParserATNSimulator.prototype.closure_ = function(config, configs, closureBusy, c
|
|||
console.log("DEBUG 4")
|
||||
}
|
||||
if (t.outermostPrecedenceReturn === this._dfa.atnStartState.ruleIndex) {
|
||||
if (PORT_DEBUG) {
|
||||
console.log("precedenceFilterSuppressed")
|
||||
}
|
||||
c.precedenceFilterSuppressed = true;
|
||||
}
|
||||
}
|
||||
|
@ -1369,9 +1404,16 @@ ParserATNSimulator.prototype.closure_ = function(config, configs, closureBusy, c
|
|||
newDepth += 1;
|
||||
}
|
||||
}
|
||||
if (PORT_DEBUG) {
|
||||
console.log("computeCheckingStopState")
|
||||
}
|
||||
this.closureCheckingStopState(c, configs, closureBusy, continueCollecting, fullCtx, newDepth, treatEofAsEpsilon);
|
||||
}
|
||||
}
|
||||
|
||||
if (PORT_DEBUG) {
|
||||
console.log("closure_ done")
|
||||
}
|
||||
};
|
||||
|
||||
ParserATNSimulator.prototype.getRuleName = function( index) {
|
||||
|
|
|
@ -19,17 +19,16 @@ import (
|
|||
"github.com/pboyer/antlr4/runtime/Go/antlr"
|
||||
)
|
||||
|
||||
// Stopgap to suppress unused import error. We aren't certain
|
||||
// to have these imports used in the generated code below
|
||||
|
||||
var _ = fmt.Printf
|
||||
var _ = reflect.Copy
|
||||
var _ = strconv.Itoa
|
||||
<if(namedActions.header)>
|
||||
|
||||
<namedActions.header>
|
||||
<endif>
|
||||
|
||||
// Suppress unused import errors
|
||||
var _ = fmt.Printf
|
||||
var _ = reflect.Copy
|
||||
var _ = strconv.Itoa
|
||||
|
||||
<if(parser)>
|
||||
|
||||
<parser>
|
||||
|
@ -1335,18 +1334,19 @@ package parser
|
|||
|
||||
import (
|
||||
"fmt"
|
||||
"unicode"
|
||||
|
||||
"github.com/pboyer/antlr4/runtime/Go/antlr"
|
||||
)
|
||||
|
||||
// suppress unused import error, many tests
|
||||
// require fmt.
|
||||
var _ = fmt.Printf
|
||||
<if(namedActions.header)>
|
||||
|
||||
<namedActions.header>
|
||||
<endif>
|
||||
|
||||
// Suppress unused import error
|
||||
var _ = fmt.Printf
|
||||
var _ = unicode.IsLetter
|
||||
|
||||
<if(lexer)>
|
||||
|
||||
<lexer>
|
||||
|
@ -1363,7 +1363,6 @@ var serializedLexerAtn []uint16
|
|||
|
||||
|
||||
var lexerDeserializer = antlr.NewATNDeserializer(nil)
|
||||
|
||||
var lexerAtn = lexerDeserializer.DeserializeFromUInt16(serializedLexerAtn)
|
||||
|
||||
<if(lexer.modes)>
|
||||
|
@ -1415,20 +1414,19 @@ func New<lexer.name>(input antlr.CharStream) *<lexer.name> {
|
|||
lexerDecisionToDFA[index] = antlr.NewDFA(ds, index)
|
||||
}
|
||||
|
||||
this := new(<lexer.name>)
|
||||
l := new(<lexer.name>)
|
||||
|
||||
this.BaseLexer = antlr.NewBaseLexer(input)
|
||||
l.BaseLexer = antlr.NewBaseLexer(input)
|
||||
l.Interpreter = antlr.NewLexerATNSimulator(l, lexerAtn, lexerDecisionToDFA, antlr.NewPredictionContextCache())
|
||||
|
||||
this.Interpreter = antlr.NewLexerATNSimulator(this, lexerAtn, lexerDecisionToDFA, antlr.NewPredictionContextCache())
|
||||
l.modeNames = lexerModeNames
|
||||
l.RuleNames = lexerRuleNames
|
||||
l.LiteralNames = lexerLiteralNames
|
||||
l.SymbolicNames = lexerSymbolicNames
|
||||
l.GrammarFileName = "<lexer.grammarFileName>"
|
||||
// TODO: l.EOF = antlr.TokenEOF
|
||||
|
||||
this.modeNames = lexerModeNames
|
||||
this.RuleNames = lexerRuleNames
|
||||
this.LiteralNames = lexerLiteralNames
|
||||
this.SymbolicNames = lexerSymbolicNames
|
||||
this.GrammarFileName = "<lexer.grammarFileName>"
|
||||
// TODO: lex.EOF = antlr.TokenEOF
|
||||
|
||||
return this
|
||||
return l
|
||||
}
|
||||
<if(rest(lexer.tokens))>
|
||||
|
||||
|
@ -1444,7 +1442,7 @@ const <lexer.name><first(lexer.tokens)> = <lexer.tokens.(first(lexer.tokens))>
|
|||
|
||||
<if(rest(rest(lexer.modes)))>
|
||||
|
||||
// <lexer.name> modes.
|
||||
// <lexer.name> modes
|
||||
const (
|
||||
<first(rest(lexer.modes)):{m | <lexer.name><m> = iota + 1}>
|
||||
<rest(rest(lexer.modes)):{m | <lexer.name><m>}; separator="\n">
|
||||
|
@ -1454,6 +1452,10 @@ const (
|
|||
// <lexer.name><first(rest(lexer.modes))> is the <lexer.name> mode.
|
||||
const <lexer.name><first(rest(lexer.modes))> = 1
|
||||
<endif>
|
||||
<if(namedActions.members)>
|
||||
|
||||
<namedActions.members>
|
||||
<endif>
|
||||
|
||||
<dumpActions(lexer, "", actionFuncs, sempredFuncs)>
|
||||
>>
|
||||
|
|
Loading…
Reference in New Issue