forked from jasder/antlr
add
[git-p4: depot-paths = "//depot/code/antlr4/main/": change = 8642]
This commit is contained in:
parent
a13068c7c2
commit
72ee89294f
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,82 @@
|
|||
/*
|
||||
[The "BSD license"]
|
||||
Copyright (c) 2005-2009 Terence Parr
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions
|
||||
are met:
|
||||
1. Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
2. Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in the
|
||||
documentation and/or other materials provided with the distribution.
|
||||
3. The name of the author may not be used to endorse or promote products
|
||||
derived from this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
|
||||
IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
|
||||
OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
|
||||
IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
|
||||
NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
|
||||
THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
package org.antlr.v4.test;
|
||||
|
||||
import org.antlr.v4.Tool;
|
||||
import org.antlr.v4.misc.Utils;
|
||||
import org.antlr.v4.tool.ANTLRToolListener;
|
||||
import org.antlr.v4.tool.ANTLRMessage;
|
||||
import org.antlr.v4.tool.ToolMessage;
|
||||
import org.stringtemplate.v4.ST;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
public class ErrorQueue implements ANTLRToolListener {
|
||||
public List<String> infos = new ArrayList<String>();
|
||||
public List<ANTLRMessage> errors = new ArrayList<ANTLRMessage>();
|
||||
public List<ANTLRMessage> warnings = new ArrayList<ANTLRMessage>();
|
||||
public List<ANTLRMessage> all = new ArrayList<ANTLRMessage>();
|
||||
|
||||
public void info(String msg) {
|
||||
infos.add(msg);
|
||||
}
|
||||
|
||||
public void error(ANTLRMessage msg) {
|
||||
errors.add(msg);
|
||||
all.add(msg);
|
||||
}
|
||||
|
||||
public void warning(ANTLRMessage msg) {
|
||||
warnings.add(msg);
|
||||
all.add(msg);
|
||||
}
|
||||
|
||||
public void error(ToolMessage msg) {
|
||||
errors.add(msg);
|
||||
all.add(msg);
|
||||
}
|
||||
|
||||
public int size() {
|
||||
return all.size() + infos.size();
|
||||
}
|
||||
|
||||
public String toString() { return Utils.join(all.iterator(), "\n"); }
|
||||
|
||||
public String toString(Tool tool) {
|
||||
StringBuilder buf = new StringBuilder();
|
||||
for (ANTLRMessage m : all) {
|
||||
ST st = tool.errMgr.getMessageTemplate(m);
|
||||
buf.append(st.render());
|
||||
buf.append("\n");
|
||||
}
|
||||
return buf.toString();
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -0,0 +1,428 @@
|
|||
package org.antlr.v4.test;
|
||||
|
||||
import org.antlr.runtime.*;
|
||||
import org.antlr.runtime.tree.*;
|
||||
import org.antlr.v4.gunit.gUnitBase;
|
||||
import org.junit.Test;
|
||||
import org.junit.Before;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
public class TestASTStructure extends gUnitBase {
|
||||
@Before public void setup() {
|
||||
lexerClassName = "org.antlr.v4.parse.ANTLRLexer";
|
||||
parserClassName = "org.antlr.v4.parse.ANTLRParser";
|
||||
adaptorClassName = "org.antlr.v4.parse.GrammarASTAdaptor"; }
|
||||
@Test public void test_grammarSpec1() throws Exception {
|
||||
// gunit test on line 15
|
||||
RuleReturnScope rstruct = (RuleReturnScope)execParser("grammarSpec", "parser grammar P; a : A;", 15);
|
||||
Object actual = ((Tree)rstruct.getTree()).toStringTree();
|
||||
Object expecting = "(PARSER_GRAMMAR P (RULES (RULE a (BLOCK (ALT A)))))";
|
||||
assertEquals("testing rule grammarSpec", expecting, actual);
|
||||
}
|
||||
|
||||
@Test public void test_grammarSpec2() throws Exception {
|
||||
// gunit test on line 18
|
||||
RuleReturnScope rstruct = (RuleReturnScope)execParser("grammarSpec", "\n parser grammar P;\n options {k=2; output=AST;}\n scope S {int x}\n tokens { A; B='33'; }\n @header {foo}\n a : A;\n ", 18);
|
||||
Object actual = ((Tree)rstruct.getTree()).toStringTree();
|
||||
Object expecting = "(PARSER_GRAMMAR P (OPTIONS (= k 2) (= output AST)) (scope S {int x}) (tokens { A (= B '33')) (@ header {foo}) (RULES (RULE a (BLOCK (ALT A)))))";
|
||||
assertEquals("testing rule grammarSpec", expecting, actual);
|
||||
}
|
||||
|
||||
@Test public void test_grammarSpec3() throws Exception {
|
||||
// gunit test on line 34
|
||||
RuleReturnScope rstruct = (RuleReturnScope)execParser("grammarSpec", "\n parser grammar P;\n @header {foo}\n tokens { A; B='33'; }\n options {k=2; ASTLabel=a.b.c; output=AST;}\n scope S {int x}\n a : A;\n ", 34);
|
||||
Object actual = ((Tree)rstruct.getTree()).toStringTree();
|
||||
Object expecting = "(PARSER_GRAMMAR P (@ header {foo}) (tokens { A (= B '33')) (OPTIONS (= k 2) (= ASTLabel a.b.c) (= output AST)) (scope S {int x}) (RULES (RULE a (BLOCK (ALT A)))))";
|
||||
assertEquals("testing rule grammarSpec", expecting, actual);
|
||||
}
|
||||
|
||||
@Test public void test_grammarSpec4() throws Exception {
|
||||
// gunit test on line 50
|
||||
RuleReturnScope rstruct = (RuleReturnScope)execParser("grammarSpec", "\n parser grammar P;\n import A=B, C;\n a : A;\n ", 50);
|
||||
Object actual = ((Tree)rstruct.getTree()).toStringTree();
|
||||
Object expecting = "(PARSER_GRAMMAR P (import (= A B) C) (RULES (RULE a (BLOCK (ALT A)))))";
|
||||
assertEquals("testing rule grammarSpec", expecting, actual);
|
||||
} @Test public void test_delegateGrammars1() throws Exception {
|
||||
// gunit test on line 61
|
||||
RuleReturnScope rstruct = (RuleReturnScope)execParser("delegateGrammars", "import A;", 61);
|
||||
Object actual = ((Tree)rstruct.getTree()).toStringTree();
|
||||
Object expecting = "(import A)";
|
||||
assertEquals("testing rule delegateGrammars", expecting, actual);
|
||||
} @Test public void test_rule1() throws Exception {
|
||||
// gunit test on line 64
|
||||
RuleReturnScope rstruct = (RuleReturnScope)execParser("rule", "a : A<X,Y=a.b.c>;", 64);
|
||||
Object actual = ((Tree)rstruct.getTree()).toStringTree();
|
||||
Object expecting = "(RULE a (BLOCK (ALT (A (ELEMENT_OPTIONS X (= Y a.b.c))))))";
|
||||
assertEquals("testing rule rule", expecting, actual);
|
||||
}
|
||||
|
||||
@Test public void test_rule2() throws Exception {
|
||||
// gunit test on line 66
|
||||
RuleReturnScope rstruct = (RuleReturnScope)execParser("rule", "A : B+;", 66);
|
||||
Object actual = ((Tree)rstruct.getTree()).toStringTree();
|
||||
Object expecting = "(RULE A (BLOCK (ALT (+ (BLOCK (ALT B))))))";
|
||||
assertEquals("testing rule rule", expecting, actual);
|
||||
}
|
||||
|
||||
@Test public void test_rule3() throws Exception {
|
||||
// gunit test on line 68
|
||||
RuleReturnScope rstruct = (RuleReturnScope)execParser("rule", "\n public a[int i] returns [int y]\n options {backtrack=true;}\n scope {int ss;}\n scope S,T;\n @init {blort}\n : ID ;\n ", 68);
|
||||
Object actual = ((Tree)rstruct.getTree()).toStringTree();
|
||||
Object expecting = "(RULE a (RULEMODIFIERS public) int i (returns int y) (OPTIONS (= backtrack true)) (scope {int ss;}) (scope S T) (@ init {blort}) (BLOCK (ALT ID)))";
|
||||
assertEquals("testing rule rule", expecting, actual);
|
||||
}
|
||||
|
||||
@Test public void test_rule4() throws Exception {
|
||||
// gunit test on line 87
|
||||
RuleReturnScope rstruct = (RuleReturnScope)execParser("rule", "\n a[int i] returns [int y]\n @init {blort}\n scope {int ss;}\n options {backtrack=true;}\n scope S,T;\n : ID;\n ", 87);
|
||||
Object actual = ((Tree)rstruct.getTree()).toStringTree();
|
||||
Object expecting = "(RULE a int i (returns int y) (@ init {blort}) (scope {int ss;}) (OPTIONS (= backtrack true)) (scope S T) (BLOCK (ALT ID)))";
|
||||
assertEquals("testing rule rule", expecting, actual);
|
||||
}
|
||||
|
||||
@Test public void test_rule5() throws Exception {
|
||||
// gunit test on line 104
|
||||
RuleReturnScope rstruct = (RuleReturnScope)execParser("rule", "\n a : ID ;\n catch[A b] {foo}\n finally {bar}\n ", 104);
|
||||
Object actual = ((Tree)rstruct.getTree()).toStringTree();
|
||||
Object expecting = "(RULE a (BLOCK (ALT ID)) (catch A b {foo}) (finally {bar}))";
|
||||
assertEquals("testing rule rule", expecting, actual);
|
||||
}
|
||||
|
||||
@Test public void test_rule6() throws Exception {
|
||||
// gunit test on line 113
|
||||
RuleReturnScope rstruct = (RuleReturnScope)execParser("rule", "\n a : ID ;\n catch[A a] {foo}\n catch[B b] {fu}\n finally {bar}\n ", 113);
|
||||
Object actual = ((Tree)rstruct.getTree()).toStringTree();
|
||||
Object expecting = "(RULE a (BLOCK (ALT ID)) (catch A a {foo}) (catch B b {fu}) (finally {bar}))";
|
||||
assertEquals("testing rule rule", expecting, actual);
|
||||
} @Test public void test_block1() throws Exception {
|
||||
// gunit test on line 124
|
||||
RuleReturnScope rstruct = (RuleReturnScope)execParser("block", "( ^(A B) | ^(b C) )", 124);
|
||||
Object actual = ((Tree)rstruct.getTree()).toStringTree();
|
||||
Object expecting = "(BLOCK (ALT (^( A B)) (ALT (^( b C)))";
|
||||
assertEquals("testing rule block", expecting, actual);
|
||||
} @Test public void test_alternative1() throws Exception {
|
||||
// gunit test on line 127
|
||||
RuleReturnScope rstruct = (RuleReturnScope)execParser("alternative", "x+=ID* -> $x*", 127);
|
||||
Object actual = ((Tree)rstruct.getTree()).toStringTree();
|
||||
Object expecting = "(ALT_REWRITE (ALT (* (BLOCK (ALT (+= x ID))))) (-> (ALT (* (REWRITE_BLOCK (ALT x))))))";
|
||||
assertEquals("testing rule alternative", expecting, actual);
|
||||
}
|
||||
|
||||
@Test public void test_alternative2() throws Exception {
|
||||
// gunit test on line 132
|
||||
RuleReturnScope rstruct = (RuleReturnScope)execParser("alternative", "A -> ...", 132);
|
||||
Object actual = ((Tree)rstruct.getTree()).toStringTree();
|
||||
Object expecting = "(ALT_REWRITE (ALT A) (-> ...))";
|
||||
assertEquals("testing rule alternative", expecting, actual);
|
||||
}
|
||||
|
||||
@Test public void test_alternative3() throws Exception {
|
||||
// gunit test on line 133
|
||||
RuleReturnScope rstruct = (RuleReturnScope)execParser("alternative", "A -> ", 133);
|
||||
Object actual = ((Tree)rstruct.getTree()).toStringTree();
|
||||
Object expecting = "(ALT_REWRITE (ALT A) (-> EPSILON))";
|
||||
assertEquals("testing rule alternative", expecting, actual);
|
||||
}
|
||||
|
||||
@Test public void test_alternative4() throws Exception {
|
||||
// gunit test on line 135
|
||||
RuleReturnScope rstruct = (RuleReturnScope)execParser("alternative", "A -> foo(a={x}, b={y})", 135);
|
||||
Object actual = ((Tree)rstruct.getTree()).toStringTree();
|
||||
Object expecting = "(ALT_REWRITE (ALT A) (-> (TEMPLATE foo (ARGLIST (= a {x}) (= b {y})))))";
|
||||
assertEquals("testing rule alternative", expecting, actual);
|
||||
}
|
||||
|
||||
@Test public void test_alternative5() throws Exception {
|
||||
// gunit test on line 140
|
||||
RuleReturnScope rstruct = (RuleReturnScope)execParser("alternative", "A -> template(a={x}, b={y}) <<ick>>", 140);
|
||||
Object actual = ((Tree)rstruct.getTree()).toStringTree();
|
||||
Object expecting = "(ALT_REWRITE (ALT A) (-> (TEMPLATE (ARGLIST (= a {x}) (= b {y})) <<ick>>)))";
|
||||
assertEquals("testing rule alternative", expecting, actual);
|
||||
}
|
||||
|
||||
@Test public void test_alternative6() throws Exception {
|
||||
// gunit test on line 145
|
||||
RuleReturnScope rstruct = (RuleReturnScope)execParser("alternative", "A -> ({name})()", 145);
|
||||
Object actual = ((Tree)rstruct.getTree()).toStringTree();
|
||||
Object expecting = "(ALT_REWRITE (ALT A) (-> (TEMPLATE {name})))";
|
||||
assertEquals("testing rule alternative", expecting, actual);
|
||||
}
|
||||
|
||||
@Test public void test_alternative7() throws Exception {
|
||||
// gunit test on line 147
|
||||
RuleReturnScope rstruct = (RuleReturnScope)execParser("alternative", "A -> {expr}", 147);
|
||||
Object actual = ((Tree)rstruct.getTree()).toStringTree();
|
||||
Object expecting = "(ALT_REWRITE (ALT A) (-> {expr}))";
|
||||
assertEquals("testing rule alternative", expecting, actual);
|
||||
}
|
||||
|
||||
@Test public void test_alternative8() throws Exception {
|
||||
// gunit test on line 149
|
||||
RuleReturnScope rstruct = (RuleReturnScope)execParser("alternative", "\n A -> {p1}? {e1}\n -> {e2}\n ->\n ", 149);
|
||||
Object actual = ((Tree)rstruct.getTree()).toStringTree();
|
||||
Object expecting = "(ALT_REWRITE (ALT A) (-> {p1}? {e1}) (-> {e2}))";
|
||||
assertEquals("testing rule alternative", expecting, actual);
|
||||
}
|
||||
|
||||
@Test public void test_alternative9() throws Exception {
|
||||
// gunit test on line 160
|
||||
RuleReturnScope rstruct = (RuleReturnScope)execParser("alternative", "A -> A", 160);
|
||||
Object actual = ((Tree)rstruct.getTree()).toStringTree();
|
||||
Object expecting = "(ALT_REWRITE (ALT A) (-> (ALT A)))";
|
||||
assertEquals("testing rule alternative", expecting, actual);
|
||||
}
|
||||
|
||||
@Test public void test_alternative10() throws Exception {
|
||||
// gunit test on line 162
|
||||
RuleReturnScope rstruct = (RuleReturnScope)execParser("alternative", "a -> a", 162);
|
||||
Object actual = ((Tree)rstruct.getTree()).toStringTree();
|
||||
Object expecting = "(ALT_REWRITE (ALT a) (-> (ALT a)))";
|
||||
assertEquals("testing rule alternative", expecting, actual);
|
||||
}
|
||||
|
||||
@Test public void test_alternative11() throws Exception {
|
||||
// gunit test on line 164
|
||||
RuleReturnScope rstruct = (RuleReturnScope)execParser("alternative", "a A X? Y* -> A a ^(TOP X)? Y*", 164);
|
||||
Object actual = ((Tree)rstruct.getTree()).toStringTree();
|
||||
Object expecting = "(ALT_REWRITE (ALT a A (? (BLOCK (ALT X))) (* (BLOCK (ALT Y)))) (-> (ALT A a (? (REWRITE_BLOCK (ALT (^( TOP X)))) (* (REWRITE_BLOCK (ALT Y))))))";
|
||||
assertEquals("testing rule alternative", expecting, actual);
|
||||
}
|
||||
|
||||
@Test public void test_alternative12() throws Exception {
|
||||
// gunit test on line 172
|
||||
RuleReturnScope rstruct = (RuleReturnScope)execParser("alternative", "A -> A[33]", 172);
|
||||
Object actual = ((Tree)rstruct.getTree()).toStringTree();
|
||||
Object expecting = "(ALT_REWRITE (ALT A) (-> (ALT (A 33))))";
|
||||
assertEquals("testing rule alternative", expecting, actual);
|
||||
}
|
||||
|
||||
@Test public void test_alternative13() throws Exception {
|
||||
// gunit test on line 174
|
||||
RuleReturnScope rstruct = (RuleReturnScope)execParser("alternative", "A -> 'int' ^(A A)*", 174);
|
||||
Object actual = ((Tree)rstruct.getTree()).toStringTree();
|
||||
Object expecting = "(ALT_REWRITE (ALT A) (-> (ALT 'int' (* (REWRITE_BLOCK (ALT (^( A A)))))))";
|
||||
assertEquals("testing rule alternative", expecting, actual);
|
||||
}
|
||||
|
||||
@Test public void test_alternative14() throws Exception {
|
||||
// gunit test on line 179
|
||||
RuleReturnScope rstruct = (RuleReturnScope)execParser("alternative", "\n A -> {p1}? A\n -> {p2}? B\n ->\n ", 179);
|
||||
Object actual = ((Tree)rstruct.getTree()).toStringTree();
|
||||
Object expecting = "(ALT_REWRITE (ALT A) (-> {p1}? (ALT A)) (-> {p2}? (ALT B)) (-> EPSILON))";
|
||||
assertEquals("testing rule alternative", expecting, actual);
|
||||
} @Test public void test_element1() throws Exception {
|
||||
// gunit test on line 191
|
||||
RuleReturnScope rstruct = (RuleReturnScope)execParser("element", "b+", 191);
|
||||
Object actual = ((Tree)rstruct.getTree()).toStringTree();
|
||||
Object expecting = "(+ (BLOCK (ALT b)))";
|
||||
assertEquals("testing rule element", expecting, actual);
|
||||
}
|
||||
|
||||
@Test public void test_element2() throws Exception {
|
||||
// gunit test on line 192
|
||||
RuleReturnScope rstruct = (RuleReturnScope)execParser("element", "(b)+", 192);
|
||||
Object actual = ((Tree)rstruct.getTree()).toStringTree();
|
||||
Object expecting = "(+ (BLOCK (ALT b)))";
|
||||
assertEquals("testing rule element", expecting, actual);
|
||||
}
|
||||
|
||||
@Test public void test_element3() throws Exception {
|
||||
// gunit test on line 193
|
||||
RuleReturnScope rstruct = (RuleReturnScope)execParser("element", "b?", 193);
|
||||
Object actual = ((Tree)rstruct.getTree()).toStringTree();
|
||||
Object expecting = "(? (BLOCK (ALT b)))";
|
||||
assertEquals("testing rule element", expecting, actual);
|
||||
}
|
||||
|
||||
@Test public void test_element4() throws Exception {
|
||||
// gunit test on line 194
|
||||
RuleReturnScope rstruct = (RuleReturnScope)execParser("element", "(b)?", 194);
|
||||
Object actual = ((Tree)rstruct.getTree()).toStringTree();
|
||||
Object expecting = "(? (BLOCK (ALT b)))";
|
||||
assertEquals("testing rule element", expecting, actual);
|
||||
}
|
||||
|
||||
@Test public void test_element5() throws Exception {
|
||||
// gunit test on line 195
|
||||
RuleReturnScope rstruct = (RuleReturnScope)execParser("element", "(b)*", 195);
|
||||
Object actual = ((Tree)rstruct.getTree()).toStringTree();
|
||||
Object expecting = "(* (BLOCK (ALT b)))";
|
||||
assertEquals("testing rule element", expecting, actual);
|
||||
}
|
||||
|
||||
@Test public void test_element6() throws Exception {
|
||||
// gunit test on line 196
|
||||
RuleReturnScope rstruct = (RuleReturnScope)execParser("element", "b*", 196);
|
||||
Object actual = ((Tree)rstruct.getTree()).toStringTree();
|
||||
Object expecting = "(* (BLOCK (ALT b)))";
|
||||
assertEquals("testing rule element", expecting, actual);
|
||||
}
|
||||
|
||||
@Test public void test_element7() throws Exception {
|
||||
// gunit test on line 197
|
||||
RuleReturnScope rstruct = (RuleReturnScope)execParser("element", "'while'*", 197);
|
||||
Object actual = ((Tree)rstruct.getTree()).toStringTree();
|
||||
Object expecting = "(* (BLOCK (ALT 'while')))";
|
||||
assertEquals("testing rule element", expecting, actual);
|
||||
}
|
||||
|
||||
@Test public void test_element8() throws Exception {
|
||||
// gunit test on line 198
|
||||
RuleReturnScope rstruct = (RuleReturnScope)execParser("element", "'a'+", 198);
|
||||
Object actual = ((Tree)rstruct.getTree()).toStringTree();
|
||||
Object expecting = "(+ (BLOCK (ALT 'a')))";
|
||||
assertEquals("testing rule element", expecting, actual);
|
||||
}
|
||||
|
||||
@Test public void test_element9() throws Exception {
|
||||
// gunit test on line 199
|
||||
RuleReturnScope rstruct = (RuleReturnScope)execParser("element", "a[3]", 199);
|
||||
Object actual = ((Tree)rstruct.getTree()).toStringTree();
|
||||
Object expecting = "(a 3)";
|
||||
assertEquals("testing rule element", expecting, actual);
|
||||
}
|
||||
|
||||
@Test public void test_element10() throws Exception {
|
||||
// gunit test on line 200
|
||||
RuleReturnScope rstruct = (RuleReturnScope)execParser("element", "'a'..'z'+", 200);
|
||||
Object actual = ((Tree)rstruct.getTree()).toStringTree();
|
||||
Object expecting = "(+ (BLOCK (ALT (.. 'a' 'z'))))";
|
||||
assertEquals("testing rule element", expecting, actual);
|
||||
}
|
||||
|
||||
@Test public void test_element11() throws Exception {
|
||||
// gunit test on line 201
|
||||
RuleReturnScope rstruct = (RuleReturnScope)execParser("element", "x=ID", 201);
|
||||
Object actual = ((Tree)rstruct.getTree()).toStringTree();
|
||||
Object expecting = "(= x ID)";
|
||||
assertEquals("testing rule element", expecting, actual);
|
||||
}
|
||||
|
||||
@Test public void test_element12() throws Exception {
|
||||
// gunit test on line 202
|
||||
RuleReturnScope rstruct = (RuleReturnScope)execParser("element", "x=ID?", 202);
|
||||
Object actual = ((Tree)rstruct.getTree()).toStringTree();
|
||||
Object expecting = "(? (BLOCK (ALT (= x ID))))";
|
||||
assertEquals("testing rule element", expecting, actual);
|
||||
}
|
||||
|
||||
@Test public void test_element13() throws Exception {
|
||||
// gunit test on line 203
|
||||
RuleReturnScope rstruct = (RuleReturnScope)execParser("element", "x=ID*", 203);
|
||||
Object actual = ((Tree)rstruct.getTree()).toStringTree();
|
||||
Object expecting = "(* (BLOCK (ALT (= x ID))))";
|
||||
assertEquals("testing rule element", expecting, actual);
|
||||
}
|
||||
|
||||
@Test public void test_element14() throws Exception {
|
||||
// gunit test on line 204
|
||||
RuleReturnScope rstruct = (RuleReturnScope)execParser("element", "x=b", 204);
|
||||
Object actual = ((Tree)rstruct.getTree()).toStringTree();
|
||||
Object expecting = "(= x b)";
|
||||
assertEquals("testing rule element", expecting, actual);
|
||||
}
|
||||
|
||||
@Test public void test_element15() throws Exception {
|
||||
// gunit test on line 205
|
||||
RuleReturnScope rstruct = (RuleReturnScope)execParser("element", "x=(A|B)", 205);
|
||||
Object actual = ((Tree)rstruct.getTree()).toStringTree();
|
||||
Object expecting = "(= x (BLOCK (ALT A) (ALT B)))";
|
||||
assertEquals("testing rule element", expecting, actual);
|
||||
}
|
||||
|
||||
@Test public void test_element16() throws Exception {
|
||||
// gunit test on line 206
|
||||
RuleReturnScope rstruct = (RuleReturnScope)execParser("element", "x=~(A|B)", 206);
|
||||
Object actual = ((Tree)rstruct.getTree()).toStringTree();
|
||||
Object expecting = "(= x (~ (BLOCK (ALT A) (ALT B))))";
|
||||
assertEquals("testing rule element", expecting, actual);
|
||||
}
|
||||
|
||||
@Test public void test_element17() throws Exception {
|
||||
// gunit test on line 207
|
||||
RuleReturnScope rstruct = (RuleReturnScope)execParser("element", "x+=~(A|B)", 207);
|
||||
Object actual = ((Tree)rstruct.getTree()).toStringTree();
|
||||
Object expecting = "(+= x (~ (BLOCK (ALT A) (ALT B))))";
|
||||
assertEquals("testing rule element", expecting, actual);
|
||||
}
|
||||
|
||||
@Test public void test_element18() throws Exception {
|
||||
// gunit test on line 208
|
||||
RuleReturnScope rstruct = (RuleReturnScope)execParser("element", "x+=~(A|B)+", 208);
|
||||
Object actual = ((Tree)rstruct.getTree()).toStringTree();
|
||||
Object expecting = "(+ (BLOCK (ALT (+= x (~ (BLOCK (ALT A) (ALT B)))))))";
|
||||
assertEquals("testing rule element", expecting, actual);
|
||||
}
|
||||
|
||||
@Test public void test_element19() throws Exception {
|
||||
// gunit test on line 209
|
||||
RuleReturnScope rstruct = (RuleReturnScope)execParser("element", "x=b+", 209);
|
||||
Object actual = ((Tree)rstruct.getTree()).toStringTree();
|
||||
Object expecting = "(+ (BLOCK (ALT (= x b))))";
|
||||
assertEquals("testing rule element", expecting, actual);
|
||||
}
|
||||
|
||||
@Test public void test_element20() throws Exception {
|
||||
// gunit test on line 210
|
||||
RuleReturnScope rstruct = (RuleReturnScope)execParser("element", "x+=ID*", 210);
|
||||
Object actual = ((Tree)rstruct.getTree()).toStringTree();
|
||||
Object expecting = "(* (BLOCK (ALT (+= x ID))))";
|
||||
assertEquals("testing rule element", expecting, actual);
|
||||
}
|
||||
|
||||
@Test public void test_element21() throws Exception {
|
||||
// gunit test on line 211
|
||||
RuleReturnScope rstruct = (RuleReturnScope)execParser("element", "x+='int'*", 211);
|
||||
Object actual = ((Tree)rstruct.getTree()).toStringTree();
|
||||
Object expecting = "(* (BLOCK (ALT (+= x 'int'))))";
|
||||
assertEquals("testing rule element", expecting, actual);
|
||||
}
|
||||
|
||||
@Test public void test_element22() throws Exception {
|
||||
// gunit test on line 212
|
||||
RuleReturnScope rstruct = (RuleReturnScope)execParser("element", "x+=b+", 212);
|
||||
Object actual = ((Tree)rstruct.getTree()).toStringTree();
|
||||
Object expecting = "(+ (BLOCK (ALT (+= x b))))";
|
||||
assertEquals("testing rule element", expecting, actual);
|
||||
}
|
||||
|
||||
@Test public void test_element23() throws Exception {
|
||||
// gunit test on line 213
|
||||
RuleReturnScope rstruct = (RuleReturnScope)execParser("element", "('*'^)*", 213);
|
||||
Object actual = ((Tree)rstruct.getTree()).toStringTree();
|
||||
Object expecting = "(* (BLOCK (ALT (^ '*'))))";
|
||||
assertEquals("testing rule element", expecting, actual);
|
||||
}
|
||||
|
||||
@Test public void test_element24() throws Exception {
|
||||
// gunit test on line 214
|
||||
RuleReturnScope rstruct = (RuleReturnScope)execParser("element", "({blort} 'x')*", 214);
|
||||
Object actual = ((Tree)rstruct.getTree()).toStringTree();
|
||||
Object expecting = "(* (BLOCK (ALT {blort} 'x')))";
|
||||
assertEquals("testing rule element", expecting, actual);
|
||||
}
|
||||
|
||||
@Test public void test_element25() throws Exception {
|
||||
// gunit test on line 215
|
||||
RuleReturnScope rstruct = (RuleReturnScope)execParser("element", "A!", 215);
|
||||
Object actual = ((Tree)rstruct.getTree()).toStringTree();
|
||||
Object expecting = "(! A)";
|
||||
assertEquals("testing rule element", expecting, actual);
|
||||
}
|
||||
|
||||
@Test public void test_element26() throws Exception {
|
||||
// gunit test on line 216
|
||||
RuleReturnScope rstruct = (RuleReturnScope)execParser("element", "A^", 216);
|
||||
Object actual = ((Tree)rstruct.getTree()).toStringTree();
|
||||
Object expecting = "(^ A)";
|
||||
assertEquals("testing rule element", expecting, actual);
|
||||
}
|
||||
|
||||
@Test public void test_element27() throws Exception {
|
||||
// gunit test on line 217
|
||||
RuleReturnScope rstruct = (RuleReturnScope)execParser("element", "x=A^", 217);
|
||||
Object actual = ((Tree)rstruct.getTree()).toStringTree();
|
||||
Object expecting = "(= x (^ A))";
|
||||
assertEquals("testing rule element", expecting, actual);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,954 @@
|
|||
package org.antlr.v4.test;
|
||||
|
||||
import org.antlr.v4.Tool;
|
||||
import org.antlr.v4.automata.*;
|
||||
import org.antlr.v4.runtime.atn.*;
|
||||
import org.antlr.v4.semantics.SemanticPipeline;
|
||||
import org.antlr.v4.tool.*;
|
||||
import org.junit.Test;
|
||||
|
||||
public class TestATNConstruction extends BaseTest {
|
||||
@Test
|
||||
public void testA() throws Exception {
|
||||
Grammar g = new Grammar(
|
||||
"parser grammar P;\n"+
|
||||
"a : A;");
|
||||
String expecting =
|
||||
"RuleStart_a_0->s2\n" +
|
||||
"s2-A->s3\n" +
|
||||
"s3->RuleStop_a_1\n" +
|
||||
"RuleStop_a_1-EOF->s4\n";
|
||||
checkRule(g, "a", expecting);
|
||||
}
|
||||
|
||||
@Test public void testAB() throws Exception {
|
||||
Grammar g = new Grammar(
|
||||
"parser grammar P;\n"+
|
||||
"a : A B ;");
|
||||
String expecting =
|
||||
"RuleStart_a_0->s2\n" +
|
||||
"s2-A->s3\n" +
|
||||
"s3->s4\n" +
|
||||
"s4-B->s5\n" +
|
||||
"s5->RuleStop_a_1\n" +
|
||||
"RuleStop_a_1-EOF->s6\n";
|
||||
checkRule(g, "a", expecting);
|
||||
}
|
||||
|
||||
@Test public void testAorB() throws Exception {
|
||||
Grammar g = new Grammar(
|
||||
"parser grammar P;\n"+
|
||||
"a : A | B {;} ;");
|
||||
String expecting =
|
||||
"RuleStart_a_0->BlockStart_8\n" +
|
||||
"BlockStart_8->s2\n" +
|
||||
"BlockStart_8->s4\n" +
|
||||
"s2-A->s3\n" +
|
||||
"s4-B->s5\n" +
|
||||
"s3->BlockEnd_9\n" +
|
||||
"s5->s6\n" +
|
||||
"BlockEnd_9->RuleStop_a_1\n" +
|
||||
"s6-{;}->s7\n" +
|
||||
"RuleStop_a_1-EOF->s10\n" +
|
||||
"s7->BlockEnd_9\n";
|
||||
checkRule(g, "a", expecting);
|
||||
}
|
||||
|
||||
@Test public void testRange() throws Exception {
|
||||
LexerGrammar g = new LexerGrammar(
|
||||
"lexer grammar P;\n"+
|
||||
"A : 'a'..'c' ;"
|
||||
);
|
||||
String expecting =
|
||||
"RuleStart_A_1->s3\n" +
|
||||
"s3-'a'..'c'->s4\n" +
|
||||
"s4->RuleStop_A_2\n";
|
||||
checkTokensRule(g, "A", expecting);
|
||||
}
|
||||
|
||||
@Test public void testRangeOrRange() throws Exception {
|
||||
LexerGrammar g = new LexerGrammar(
|
||||
"lexer grammar P;\n"+
|
||||
"A : ('a'..'c' 'h' | 'q' 'j'..'l') ;"
|
||||
);
|
||||
String expecting =
|
||||
"RuleStart_A_1->BlockStart_11\n" +
|
||||
"BlockStart_11->s3\n" +
|
||||
"BlockStart_11->s7\n" +
|
||||
"s3-'a'..'c'->s4\n" +
|
||||
"s7-'q'->s8\n" +
|
||||
"s4->s5\n" +
|
||||
"s8->s9\n" +
|
||||
"s5-'h'->s6\n" +
|
||||
"s9-'j'..'l'->s10\n" +
|
||||
"s6->BlockEnd_12\n" +
|
||||
"s10->BlockEnd_12\n" +
|
||||
"BlockEnd_12->RuleStop_A_2\n";
|
||||
checkTokensRule(g, "A", expecting);
|
||||
}
|
||||
|
||||
@Test public void testStringLiteralInParser() throws Exception {
|
||||
Grammar g = new Grammar(
|
||||
"grammar P;\n"+
|
||||
"a : A|'b' ;"
|
||||
);
|
||||
String expecting =
|
||||
"RuleStart_a_0->BlockStart_6\n" +
|
||||
"BlockStart_6->s2\n" +
|
||||
"BlockStart_6->s4\n" +
|
||||
"s2-A->s3\n" +
|
||||
"s4-'b'->s5\n" +
|
||||
"s3->BlockEnd_7\n" +
|
||||
"s5->BlockEnd_7\n" +
|
||||
"BlockEnd_7->RuleStop_a_1\n" +
|
||||
"RuleStop_a_1-EOF->s8\n";
|
||||
checkRule(g, "a", expecting);
|
||||
}
|
||||
|
||||
@Test public void testABorCD() throws Exception {
|
||||
Grammar g = new Grammar(
|
||||
"parser grammar P;\n"+
|
||||
"a : A B | C D;");
|
||||
String expecting =
|
||||
"RuleStart_a_0->BlockStart_10\n" +
|
||||
"BlockStart_10->s2\n" +
|
||||
"BlockStart_10->s6\n" +
|
||||
"s2-A->s3\n" +
|
||||
"s6-C->s7\n" +
|
||||
"s3->s4\n" +
|
||||
"s7->s8\n" +
|
||||
"s4-B->s5\n" +
|
||||
"s8-D->s9\n" +
|
||||
"s5->BlockEnd_11\n" +
|
||||
"s9->BlockEnd_11\n" +
|
||||
"BlockEnd_11->RuleStop_a_1\n" +
|
||||
"RuleStop_a_1-EOF->s12\n";
|
||||
checkRule(g, "a", expecting);
|
||||
}
|
||||
|
||||
@Test public void testbA() throws Exception {
|
||||
Grammar g = new Grammar(
|
||||
"parser grammar P;\n"+
|
||||
"a : b A ;\n"+
|
||||
"b : B ;");
|
||||
String expecting =
|
||||
"RuleStart_a_0->s4\n" +
|
||||
"s4->RuleStart_b_2\n" +
|
||||
"s5->s6\n" +
|
||||
"s6-A->s7\n" +
|
||||
"s7->RuleStop_a_1\n" +
|
||||
"RuleStop_a_1-EOF->s10\n";
|
||||
checkRule(g, "a", expecting);
|
||||
expecting =
|
||||
"RuleStart_b_2->s8\n" +
|
||||
"s8-B->s9\n" +
|
||||
"s9->RuleStop_b_3\n" +
|
||||
"RuleStop_b_3->s5\n";
|
||||
checkRule(g, "b", expecting);
|
||||
}
|
||||
|
||||
@Test public void testFollow() throws Exception {
|
||||
Grammar g = new Grammar(
|
||||
"parser grammar P;\n"+
|
||||
"a : b A ;\n"+
|
||||
"b : B ;\n"+
|
||||
"c : b C;");
|
||||
String expecting =
|
||||
"RuleStart_b_2->s10\n" +
|
||||
"s10-B->s11\n" +
|
||||
"s11->RuleStop_b_3\n" +
|
||||
"RuleStop_b_3->s7\n" +
|
||||
"RuleStop_b_3->s13\n";
|
||||
checkRule(g, "b", expecting);
|
||||
}
|
||||
|
||||
@Test public void testAorEpsilon() throws Exception {
|
||||
Grammar g = new Grammar(
|
||||
"parser grammar P;\n"+
|
||||
"a : A | ;");
|
||||
String expecting =
|
||||
"RuleStart_a_0->BlockStart_6\n" +
|
||||
"BlockStart_6->s2\n" +
|
||||
"BlockStart_6->s4\n" +
|
||||
"s2-A->s3\n" +
|
||||
"s4->s5\n" +
|
||||
"s3->BlockEnd_7\n" +
|
||||
"s5->BlockEnd_7\n" +
|
||||
"BlockEnd_7->RuleStop_a_1\n" +
|
||||
"RuleStop_a_1-EOF->s8\n";
|
||||
checkRule(g, "a", expecting);
|
||||
}
|
||||
|
||||
@Test public void testAOptional() throws Exception {
|
||||
Grammar g = new Grammar(
|
||||
"parser grammar P;\n"+
|
||||
"a : A?;");
|
||||
String expecting =
|
||||
"RuleStart_a_0->BlockStart_4\n" +
|
||||
"BlockStart_4->s2\n" +
|
||||
"BlockStart_4->BlockEnd_5\n" +
|
||||
"s2-A->s3\n" +
|
||||
"BlockEnd_5->RuleStop_a_1\n" +
|
||||
"s3->BlockEnd_5\n" +
|
||||
"RuleStop_a_1-EOF->s6\n";
|
||||
checkRule(g, "a", expecting);
|
||||
}
|
||||
|
||||
@Test public void testAorBoptional() throws Exception {
|
||||
Grammar g = new Grammar(
|
||||
"parser grammar P;\n"+
|
||||
"a : (A|B)?;");
|
||||
String expecting =
|
||||
"RuleStart_a_0->BlockStart_6\n" +
|
||||
"BlockStart_6->s2\n" +
|
||||
"BlockStart_6->s4\n" +
|
||||
"BlockStart_6->BlockEnd_7\n" +
|
||||
"s2-A->s3\n" +
|
||||
"s4-B->s5\n" +
|
||||
"BlockEnd_7->RuleStop_a_1\n" +
|
||||
"s3->BlockEnd_7\n" +
|
||||
"s5->BlockEnd_7\n" +
|
||||
"RuleStop_a_1-EOF->s8\n";
|
||||
checkRule(g, "a", expecting);
|
||||
}
|
||||
|
||||
@Test public void testAorBthenC() throws Exception {
|
||||
Grammar g = new Grammar(
|
||||
"parser grammar P;\n"+
|
||||
"a : (A | B) C;");
|
||||
String expecting =
|
||||
"RuleStart_a_0->BlockStart_6\n" +
|
||||
"BlockStart_6->s2\n" +
|
||||
"BlockStart_6->s4\n" +
|
||||
"s2-A->s3\n" +
|
||||
"s4-B->s5\n" +
|
||||
"s3->BlockEnd_7\n" +
|
||||
"s5->BlockEnd_7\n" +
|
||||
"BlockEnd_7->s8\n" +
|
||||
"s8-C->s9\n" +
|
||||
"s9->RuleStop_a_1\n" +
|
||||
"RuleStop_a_1-EOF->s10\n";
|
||||
checkRule(g, "a", expecting);
|
||||
}
|
||||
|
||||
@Test public void testAplus() throws Exception {
|
||||
Grammar g = new Grammar(
|
||||
"parser grammar P;\n"+
|
||||
"a : A+;");
|
||||
String expecting =
|
||||
"RuleStart_a_0->PlusBlockStart_4\n" +
|
||||
"PlusBlockStart_4->s2\n" +
|
||||
"s2-A->s3\n" +
|
||||
"s3->BlockEnd_5\n" +
|
||||
"BlockEnd_5->PlusLoopBack_6\n" +
|
||||
"PlusLoopBack_6->s2\n" +
|
||||
"PlusLoopBack_6->s7\n" +
|
||||
"s7->RuleStop_a_1\n" +
|
||||
"RuleStop_a_1-EOF->s8\n";
|
||||
checkRule(g, "a", expecting);
|
||||
}
|
||||
|
||||
@Test public void testAorBplus() throws Exception {
|
||||
Grammar g = new Grammar(
|
||||
"parser grammar P;\n"+
|
||||
"a : (A|B)+;");
|
||||
String expecting =
|
||||
"RuleStart_a_0->PlusBlockStart_6\n" +
|
||||
"PlusBlockStart_6->s2\n" +
|
||||
"PlusBlockStart_6->s4\n" +
|
||||
"s2-A->s3\n" +
|
||||
"s4-B->s5\n" +
|
||||
"s3->BlockEnd_7\n" +
|
||||
"s5->BlockEnd_7\n" +
|
||||
"BlockEnd_7->PlusLoopBack_8\n" +
|
||||
"PlusLoopBack_8->s2\n" +
|
||||
"PlusLoopBack_8->s4\n" +
|
||||
"PlusLoopBack_8->s9\n" +
|
||||
"s9->RuleStop_a_1\n" +
|
||||
"RuleStop_a_1-EOF->s10\n";
|
||||
checkRule(g, "a", expecting);
|
||||
}
|
||||
|
||||
@Test public void testAorBorEmptyPlus() throws Exception {
|
||||
Grammar g = new Grammar(
|
||||
"parser grammar P;\n"+
|
||||
"a : (A | B | )+ ;");
|
||||
String expecting =
|
||||
"RuleStart_a_0->PlusBlockStart_8\n" +
|
||||
"PlusBlockStart_8->s2\n" +
|
||||
"PlusBlockStart_8->s4\n" +
|
||||
"PlusBlockStart_8->s6\n" +
|
||||
"s2-A->s3\n" +
|
||||
"s4-B->s5\n" +
|
||||
"s6->s7\n" +
|
||||
"s3->BlockEnd_9\n" +
|
||||
"s5->BlockEnd_9\n" +
|
||||
"s7->BlockEnd_9\n" +
|
||||
"BlockEnd_9->PlusLoopBack_10\n" +
|
||||
"PlusLoopBack_10->s2\n" +
|
||||
"PlusLoopBack_10->s4\n" +
|
||||
"PlusLoopBack_10->s6\n" +
|
||||
"PlusLoopBack_10->s11\n" +
|
||||
"s11->RuleStop_a_1\n" +
|
||||
"RuleStop_a_1-EOF->s12\n";
|
||||
checkRule(g, "a", expecting);
|
||||
}
|
||||
|
||||
@Test public void testAStar() throws Exception {
|
||||
Grammar g = new Grammar(
|
||||
"parser grammar P;\n"+
|
||||
"a : A*;");
|
||||
String expecting =
|
||||
"RuleStart_a_0->StarBlockStart_4\n" +
|
||||
"StarBlockStart_4->s2\n" +
|
||||
"StarBlockStart_4->s7\n" +
|
||||
"s2-A->s3\n" +
|
||||
"s7->RuleStop_a_1\n" +
|
||||
"s3->BlockEnd_5\n" +
|
||||
"RuleStop_a_1-EOF->s8\n" +
|
||||
"BlockEnd_5->StarLoopBack_6\n" +
|
||||
"StarLoopBack_6->StarBlockStart_4\n";
|
||||
checkRule(g, "a", expecting);
|
||||
}
|
||||
|
||||
@Test public void testNestedAstar() throws Exception {
|
||||
Grammar g = new Grammar(
|
||||
"parser grammar P;\n"+
|
||||
"a : (',' ID*)*;");
|
||||
String expecting =
|
||||
"RuleStart_a_0->StarBlockStart_10\n" +
|
||||
"StarBlockStart_10->s2\n" +
|
||||
"StarBlockStart_10->s13\n" +
|
||||
"s2-','->s3\n" +
|
||||
"s13->RuleStop_a_1\n" +
|
||||
"s3->StarBlockStart_6\n" +
|
||||
"RuleStop_a_1-EOF->s14\n" +
|
||||
"StarBlockStart_6->s4\n" +
|
||||
"StarBlockStart_6->s9\n" +
|
||||
"s4-ID->s5\n" +
|
||||
"s9->BlockEnd_11\n" +
|
||||
"s5->BlockEnd_7\n" +
|
||||
"BlockEnd_11->StarLoopBack_12\n" +
|
||||
"BlockEnd_7->StarLoopBack_8\n" +
|
||||
"StarLoopBack_12->StarBlockStart_10\n" +
|
||||
"StarLoopBack_8->StarBlockStart_6\n";
|
||||
checkRule(g, "a", expecting);
|
||||
}
|
||||
|
||||
@Test public void testAorBstar() throws Exception {
|
||||
Grammar g = new Grammar(
|
||||
"parser grammar P;\n"+
|
||||
"a : (A | B)* ;");
|
||||
String expecting =
|
||||
"RuleStart_a_0->StarBlockStart_6\n" +
|
||||
"StarBlockStart_6->s2\n" +
|
||||
"StarBlockStart_6->s4\n" +
|
||||
"StarBlockStart_6->s9\n" +
|
||||
"s2-A->s3\n" +
|
||||
"s4-B->s5\n" +
|
||||
"s9->RuleStop_a_1\n" +
|
||||
"s3->BlockEnd_7\n" +
|
||||
"s5->BlockEnd_7\n" +
|
||||
"RuleStop_a_1-EOF->s10\n" +
|
||||
"BlockEnd_7->StarLoopBack_8\n" +
|
||||
"StarLoopBack_8->StarBlockStart_6\n";
|
||||
checkRule(g, "a", expecting);
|
||||
}
|
||||
|
||||
@Test public void testPredicatedAorB() throws Exception {
|
||||
Grammar g = new Grammar(
|
||||
"parser grammar P;\n"+
|
||||
"a : {p1}? A | {p2}? B ;");
|
||||
String expecting =
|
||||
"RuleStart_a_0->BlockStart_10\n" +
|
||||
"BlockStart_10->s2\n" +
|
||||
"BlockStart_10->s6\n" +
|
||||
"s2-{p1}?->s3\n" +
|
||||
"s6-{p2}?->s7\n" +
|
||||
"s3->s4\n" +
|
||||
"s7->s8\n" +
|
||||
"s4-A->s5\n" +
|
||||
"s8-B->s9\n" +
|
||||
"s5->BlockEnd_11\n" +
|
||||
"s9->BlockEnd_11\n" +
|
||||
"BlockEnd_11->RuleStop_a_1\n" +
|
||||
"RuleStop_a_1-EOF->s12\n";
|
||||
checkRule(g, "a", expecting);
|
||||
}
|
||||
|
||||
/*
|
||||
@Test public void testMultiplePredicates() throws Exception {
|
||||
Grammar g = new Grammar(
|
||||
"parser grammar P;\n"+
|
||||
"a : {p1}? {p1a}? A | {p2}? B | {p3} b;\n" +
|
||||
"b : {p4}? B ;");
|
||||
String expecting =
|
||||
"\n";
|
||||
checkRule(g, "a", expecting);
|
||||
}
|
||||
|
||||
@Test public void testSets() throws Exception {
|
||||
Grammar g = new Grammar(
|
||||
"parser grammar P;\n"+
|
||||
"a : ( A | B )+ ;\n" +
|
||||
"b : ( A | B{;} )+ ;\n" +
|
||||
"c : (A|B) (A|B) ;\n" +
|
||||
"d : ( A | B )* ;\n" +
|
||||
"e : ( A | B )? ;");
|
||||
String expecting =
|
||||
"\n";
|
||||
checkRule(g, "a", expecting);
|
||||
expecting =
|
||||
"\n";
|
||||
checkRule(g, "b", expecting);
|
||||
expecting =
|
||||
"\n";
|
||||
checkRule(g, "c", expecting);
|
||||
expecting =
|
||||
"\n";
|
||||
checkRule(g, "d", expecting);
|
||||
expecting =
|
||||
"\n";
|
||||
checkRule(g, "e", expecting);
|
||||
}
|
||||
|
||||
@Test public void testNotSet() throws Exception {
|
||||
Grammar g = new Grammar(
|
||||
"parser grammar P;\n"+
|
||||
"tokens { A; B; C; }\n"+
|
||||
"a : ~A ;\n");
|
||||
String expecting =
|
||||
"\n";
|
||||
checkRule(g, "a", expecting);
|
||||
|
||||
}
|
||||
|
||||
@Test public void testNotSingletonBlockSet() throws Exception {
|
||||
Grammar g = new Grammar(
|
||||
"parser grammar P;\n"+
|
||||
"tokens { A; B; C; }\n"+
|
||||
"a : ~(A) ;\n");
|
||||
String expecting =
|
||||
"\n";
|
||||
checkRule(g, "a", expecting);
|
||||
}
|
||||
|
||||
@Test public void testNotCharSet() throws Exception {
|
||||
Grammar g = new Grammar(
|
||||
"lexer grammar P;\n"+
|
||||
"A : ~'3' ;\n");
|
||||
String expecting =
|
||||
"RuleStart_A_1->s5\n" +
|
||||
"s5-{'\\u0000'..'2', '4'..'\\uFFFE'}->s6\n" +
|
||||
"s6->RuleStop_A_2\n";
|
||||
checkRule(g, "A", expecting);
|
||||
}
|
||||
|
||||
@Test public void testNotBlockSet() throws Exception {
|
||||
Grammar g = new Grammar(
|
||||
"lexer grammar P;\n"+
|
||||
"A : ~('3'|'b') ;\n");
|
||||
String expecting =
|
||||
"\n";
|
||||
checkRule(g, "A", expecting);
|
||||
}
|
||||
|
||||
@Test public void testNotSetLoop() throws Exception {
|
||||
Grammar g = new Grammar(
|
||||
"lexer grammar P;\n"+
|
||||
"A : ~('3')* ;\n");
|
||||
String expecting =
|
||||
"\n";
|
||||
checkRule(g, "A", expecting);
|
||||
}
|
||||
|
||||
@Test public void testNotBlockSetLoop() throws Exception {
|
||||
Grammar g = new Grammar(
|
||||
"lexer grammar P;\n"+
|
||||
"A : ~('3'|'b')* ;\n");
|
||||
String expecting =
|
||||
"\n";
|
||||
checkRule(g, "A", expecting);
|
||||
}
|
||||
|
||||
@Test public void testLabeledNotSet() throws Exception {
|
||||
Grammar g = new Grammar(
|
||||
"parser grammar P;\n"+
|
||||
"tokens { A; B; C; }\n"+
|
||||
"a : t=~A ;\n");
|
||||
String expecting =
|
||||
".s0->.s1\n" +
|
||||
".s1->.s2\n" +
|
||||
".s2-B..C->.s3\n" +
|
||||
".s3->:s4\n" +
|
||||
":s4-EOF->.s5\n";
|
||||
checkRule(g, "a", expecting);
|
||||
}
|
||||
|
||||
@Test public void testLabeledNotCharSet() throws Exception {
|
||||
Grammar g = new Grammar(
|
||||
"lexer grammar P;\n"+
|
||||
"A : t=~'3' ;\n");
|
||||
String expecting =
|
||||
".s0->.s1\n" +
|
||||
".s1->.s2\n" +
|
||||
".s2-{'\\u0000'..'2', '4'..'\\uFFFF'}->.s3\n" +
|
||||
".s3->:s4\n" +
|
||||
":s4-<EOT>->.s5\n";
|
||||
checkRule(g, "A", expecting);
|
||||
}
|
||||
|
||||
@Test public void testLabeledNotBlockSet() throws Exception {
|
||||
Grammar g = new Grammar(
|
||||
"lexer grammar P;\n"+
|
||||
"A : t=~('3'|'b') ;\n");
|
||||
String expecting =
|
||||
".s0->.s1\n" +
|
||||
".s1->.s2\n" +
|
||||
".s2-{'\\u0000'..'2', '4'..'a', 'c'..'\\uFFFF'}->.s3\n" +
|
||||
".s3->:s4\n" +
|
||||
":s4-<EOT>->.s5\n";
|
||||
checkRule(g, "A", expecting);
|
||||
}
|
||||
|
||||
@Test public void testEscapedCharLiteral() throws Exception {
|
||||
Grammar g = new Grammar(
|
||||
"grammar P;\n"+
|
||||
"a : '\\n';");
|
||||
String expecting =
|
||||
".s0->.s1\n" +
|
||||
".s1->.s2\n" +
|
||||
".s2-'\\n'->.s3\n" +
|
||||
".s3->:s4\n" +
|
||||
":s4-EOF->.s5\n";
|
||||
checkRule(g, "a", expecting);
|
||||
}
|
||||
|
||||
@Test public void testEscapedStringLiteral() throws Exception {
|
||||
Grammar g = new Grammar(
|
||||
"grammar P;\n"+
|
||||
"a : 'a\\nb\\u0030c\\'';");
|
||||
String expecting =
|
||||
"RuleStart_a_0->s2\n" +
|
||||
"s2-'a\\nb\\u0030c\\''->s3\n" +
|
||||
"s3->RuleStop_a_1\n" +
|
||||
"RuleStop_a_1-EOF->s4\n";
|
||||
checkRule(g, "a", expecting);
|
||||
}
|
||||
|
||||
// AUTO BACKTRACKING STUFF
|
||||
|
||||
@Test public void testAutoBacktracking_RuleBlock() throws Exception {
|
||||
Grammar g = new Grammar(
|
||||
"grammar t;\n" +
|
||||
"options {backtrack=true;}\n"+
|
||||
"a : 'a'{;}|'b';"
|
||||
);
|
||||
String expecting =
|
||||
".s0->.s1\n" +
|
||||
".s1->.s2\n" +
|
||||
".s1->.s9\n" +
|
||||
".s10-'b'->.s11\n" +
|
||||
".s11->.s6\n" +
|
||||
".s2-{synpred1_t}?->.s3\n" +
|
||||
".s3-'a'->.s4\n" +
|
||||
".s4-{}->.s5\n" +
|
||||
".s5->.s6\n" +
|
||||
".s6->:s7\n" +
|
||||
".s9->.s10\n" +
|
||||
":s7-EOF->.s8\n";
|
||||
checkRule(g, "a", expecting);
|
||||
}
|
||||
|
||||
@Test public void testAutoBacktracking_RuleSetBlock() throws Exception {
|
||||
Grammar g = new Grammar(
|
||||
"grammar t;\n" +
|
||||
"options {backtrack=true;}\n"+
|
||||
"a : 'a'|'b';"
|
||||
);
|
||||
String expecting =
|
||||
".s0->.s1\n" +
|
||||
".s1->.s2\n" +
|
||||
".s2-'a'..'b'->.s3\n" +
|
||||
".s3->:s4\n" +
|
||||
":s4-EOF->.s5\n";
|
||||
checkRule(g, "a", expecting);
|
||||
}
|
||||
|
||||
@Test public void testAutoBacktracking_SimpleBlock() throws Exception {
|
||||
Grammar g = new Grammar(
|
||||
"grammar t;\n" +
|
||||
"options {backtrack=true;}\n"+
|
||||
"a : ('a'{;}|'b') ;"
|
||||
);
|
||||
String expecting =
|
||||
".s0->.s1\n" +
|
||||
".s1->.s2\n" +
|
||||
".s10->.s11\n" +
|
||||
".s11-'b'->.s12\n" +
|
||||
".s12->.s7\n" +
|
||||
".s2->.s10\n" +
|
||||
".s2->.s3\n" +
|
||||
".s3-{synpred1_t}?->.s4\n" +
|
||||
".s4-'a'->.s5\n" +
|
||||
".s5-{}->.s6\n" +
|
||||
".s6->.s7\n" +
|
||||
".s7->:s8\n" +
|
||||
":s8-EOF->.s9\n";
|
||||
checkRule(g, "a", expecting);
|
||||
}
|
||||
|
||||
@Test public void testAutoBacktracking_SetBlock() throws Exception {
|
||||
Grammar g = new Grammar(
|
||||
"grammar t;\n" +
|
||||
"options {backtrack=true;}\n"+
|
||||
"a : ('a'|'b') ;"
|
||||
);
|
||||
String expecting =
|
||||
".s0->.s1\n" +
|
||||
".s1->.s2\n" +
|
||||
".s2-'a'..'b'->.s3\n" +
|
||||
".s3->:s4\n" +
|
||||
":s4-EOF->.s5\n";
|
||||
checkRule(g, "a", expecting);
|
||||
}
|
||||
|
||||
@Test public void testAutoBacktracking_StarBlock() throws Exception {
|
||||
Grammar g = new Grammar(
|
||||
"grammar t;\n" +
|
||||
"options {backtrack=true;}\n"+
|
||||
"a : ('a'{;}|'b')* ;"
|
||||
);
|
||||
String expecting =
|
||||
".s0->.s1\n" +
|
||||
".s1->.s2\n" +
|
||||
".s12->.s13\n" +
|
||||
".s13-{synpred2_t}?->.s14\n" +
|
||||
".s14-'b'->.s15\n" +
|
||||
".s15->.s8\n" +
|
||||
".s16->.s9\n" +
|
||||
".s2->.s16\n" +
|
||||
".s2->.s3\n" +
|
||||
".s3->.s12\n" +
|
||||
".s3->.s4\n" +
|
||||
".s4-{synpred1_t}?->.s5\n" +
|
||||
".s5-'a'->.s6\n" +
|
||||
".s6-{}->.s7\n" +
|
||||
".s7->.s8\n" +
|
||||
".s8->.s3\n" +
|
||||
".s8->.s9\n" +
|
||||
".s9->:s10\n" +
|
||||
":s10-EOF->.s11\n";
|
||||
checkRule(g, "a", expecting);
|
||||
}
|
||||
|
||||
@Test public void testAutoBacktracking_StarSetBlock_IgnoresPreds() throws Exception {
|
||||
Grammar g = new Grammar(
|
||||
"grammar t;\n" +
|
||||
"options {backtrack=true;}\n"+
|
||||
"a : ('a'|'b')* ;"
|
||||
);
|
||||
String expecting =
|
||||
".s0->.s1\n" +
|
||||
".s1->.s2\n" +
|
||||
".s2->.s3\n" +
|
||||
".s2->.s9\n" +
|
||||
".s3->.s4\n" +
|
||||
".s4-'a'..'b'->.s5\n" +
|
||||
".s5->.s3\n" +
|
||||
".s5->.s6\n" +
|
||||
".s6->:s7\n" +
|
||||
".s9->.s6\n" +
|
||||
":s7-EOF->.s8\n";
|
||||
checkRule(g, "a", expecting);
|
||||
}
|
||||
|
||||
@Test public void testAutoBacktracking_StarSetBlock() throws Exception {
|
||||
Grammar g = new Grammar(
|
||||
"grammar t;\n" +
|
||||
"options {backtrack=true;}\n"+
|
||||
"a : ('a'|'b'{;})* ;"
|
||||
);
|
||||
String expecting =
|
||||
".s0->.s1\n" +
|
||||
".s1->.s2\n" +
|
||||
".s11->.s12\n" +
|
||||
".s12-{synpred2_t}?->.s13\n" +
|
||||
".s13-'b'->.s14\n" +
|
||||
".s14-{}->.s15\n" +
|
||||
".s15->.s7\n" +
|
||||
".s16->.s8\n" +
|
||||
".s2->.s16\n" +
|
||||
".s2->.s3\n" +
|
||||
".s3->.s11\n" +
|
||||
".s3->.s4\n" +
|
||||
".s4-{synpred1_t}?->.s5\n" +
|
||||
".s5-'a'->.s6\n" +
|
||||
".s6->.s7\n" +
|
||||
".s7->.s3\n" +
|
||||
".s7->.s8\n" +
|
||||
".s8->:s9\n" +
|
||||
":s9-EOF->.s10\n";
|
||||
checkRule(g, "a", expecting);
|
||||
}
|
||||
|
||||
@Test public void testAutoBacktracking_StarBlock1Alt() throws Exception {
|
||||
Grammar g = new Grammar(
|
||||
"grammar t;\n" +
|
||||
"options {backtrack=true;}\n"+
|
||||
"a : ('a')* ;"
|
||||
);
|
||||
String expecting =
|
||||
".s0->.s1\n" +
|
||||
".s1->.s2\n" +
|
||||
".s10->.s7\n" +
|
||||
".s2->.s10\n" +
|
||||
".s2->.s3\n" +
|
||||
".s3->.s4\n" +
|
||||
".s4-{synpred1_t}?->.s5\n" +
|
||||
".s5-'a'->.s6\n" +
|
||||
".s6->.s3\n" +
|
||||
".s6->.s7\n" +
|
||||
".s7->:s8\n" +
|
||||
":s8-EOF->.s9\n";
|
||||
checkRule(g, "a", expecting);
|
||||
}
|
||||
|
||||
@Test public void testAutoBacktracking_PlusBlock() throws Exception {
|
||||
Grammar g = new Grammar(
|
||||
"grammar t;\n" +
|
||||
"options {backtrack=true;}\n"+
|
||||
"a : ('a'{;}|'b')+ ;"
|
||||
);
|
||||
String expecting =
|
||||
".s0->.s1\n" +
|
||||
".s1->.s2\n" +
|
||||
".s12->.s13\n" +
|
||||
".s13-{synpred2_t}?->.s14\n" +
|
||||
".s14-'b'->.s15\n" +
|
||||
".s15->.s8\n" +
|
||||
".s2->.s3\n" +
|
||||
".s3->.s12\n" +
|
||||
".s3->.s4\n" +
|
||||
".s4-{synpred1_t}?->.s5\n" +
|
||||
".s5-'a'->.s6\n" +
|
||||
".s6-{}->.s7\n" +
|
||||
".s7->.s8\n" +
|
||||
".s8->.s3\n" +
|
||||
".s8->.s9\n" +
|
||||
".s9->:s10\n" +
|
||||
":s10-EOF->.s11\n";
|
||||
checkRule(g, "a", expecting);
|
||||
}
|
||||
|
||||
@Test public void testAutoBacktracking_PlusSetBlock() throws Exception {
|
||||
Grammar g = new Grammar(
|
||||
"grammar t;\n" +
|
||||
"options {backtrack=true;}\n"+
|
||||
"a : ('a'|'b'{;})+ ;"
|
||||
);
|
||||
String expecting =
|
||||
".s0->.s1\n" +
|
||||
".s1->.s2\n" +
|
||||
".s11->.s12\n" +
|
||||
".s12-{synpred2_t}?->.s13\n" +
|
||||
".s13-'b'->.s14\n" +
|
||||
".s14-{}->.s15\n" +
|
||||
".s15->.s7\n" +
|
||||
".s2->.s3\n" +
|
||||
".s3->.s11\n" +
|
||||
".s3->.s4\n" +
|
||||
".s4-{synpred1_t}?->.s5\n" +
|
||||
".s5-'a'->.s6\n" +
|
||||
".s6->.s7\n" +
|
||||
".s7->.s3\n" +
|
||||
".s7->.s8\n" +
|
||||
".s8->:s9\n" +
|
||||
":s9-EOF->.s10\n";
|
||||
checkRule(g, "a", expecting);
|
||||
}
|
||||
|
||||
@Test public void testAutoBacktracking_PlusBlock1Alt() throws Exception {
|
||||
Grammar g = new Grammar(
|
||||
"grammar t;\n" +
|
||||
"options {backtrack=true;}\n"+
|
||||
"a : ('a')+ ;"
|
||||
);
|
||||
String expecting =
|
||||
".s0->.s1\n" +
|
||||
".s1->.s2\n" +
|
||||
".s2->.s3\n" +
|
||||
".s3->.s4\n" +
|
||||
".s4-{synpred1_t}?->.s5\n" +
|
||||
".s5-'a'->.s6\n" +
|
||||
".s6->.s3\n" +
|
||||
".s6->.s7\n" +
|
||||
".s7->:s8\n" +
|
||||
":s8-EOF->.s9\n";
|
||||
checkRule(g, "a", expecting);
|
||||
}
|
||||
|
||||
@Test public void testAutoBacktracking_OptionalBlock2Alts() throws Exception {
|
||||
Grammar g = new Grammar(
|
||||
"grammar t;\n" +
|
||||
"options {backtrack=true;}\n"+
|
||||
"a : ('a'{;}|'b')?;"
|
||||
);
|
||||
String expecting =
|
||||
".s0->.s1\n" +
|
||||
".s1->.s2\n" +
|
||||
".s10->.s11\n" +
|
||||
".s10->.s14\n" +
|
||||
".s11-{synpred2_t}?->.s12\n" +
|
||||
".s12-'b'->.s13\n" +
|
||||
".s13->.s7\n" +
|
||||
".s14->.s7\n" +
|
||||
".s2->.s10\n" +
|
||||
".s2->.s3\n" +
|
||||
".s3-{synpred1_t}?->.s4\n" +
|
||||
".s4-'a'->.s5\n" +
|
||||
".s5-{}->.s6\n" +
|
||||
".s6->.s7\n" +
|
||||
".s7->:s8\n" +
|
||||
":s8-EOF->.s9\n";
|
||||
checkRule(g, "a", expecting);
|
||||
}
|
||||
|
||||
@Test public void testAutoBacktracking_OptionalBlock1Alt() throws Exception {
|
||||
Grammar g = new Grammar(
|
||||
"grammar t;\n" +
|
||||
"options {backtrack=true;}\n"+
|
||||
"a : ('a')?;"
|
||||
);
|
||||
String expecting =
|
||||
".s0->.s1\n" +
|
||||
".s1->.s2\n" +
|
||||
".s2->.s3\n" +
|
||||
".s2->.s9\n" +
|
||||
".s3-{synpred1_t}?->.s4\n" +
|
||||
".s4-'a'->.s5\n" +
|
||||
".s5->.s6\n" +
|
||||
".s6->:s7\n" +
|
||||
".s9->.s6\n" +
|
||||
":s7-EOF->.s8\n";
|
||||
checkRule(g, "a", expecting);
|
||||
}
|
||||
|
||||
@Test public void testAutoBacktracking_ExistingPred() throws Exception {
|
||||
Grammar g = new Grammar(
|
||||
"grammar t;\n" +
|
||||
"options {backtrack=true;}\n"+
|
||||
"a : ('a')=> 'a' | 'b';"
|
||||
);
|
||||
String expecting =
|
||||
".s0->.s1\n" +
|
||||
".s1->.s2\n" +
|
||||
".s1->.s8\n" +
|
||||
".s10->.s5\n" +
|
||||
".s2-{synpred1_t}?->.s3\n" +
|
||||
".s3-'a'->.s4\n" +
|
||||
".s4->.s5\n" +
|
||||
".s5->:s6\n" +
|
||||
".s8->.s9\n" +
|
||||
".s9-'b'->.s10\n" +
|
||||
":s6-EOF->.s7\n";
|
||||
checkRule(g, "a", expecting);
|
||||
}
|
||||
*/
|
||||
|
||||
@Test public void testDefaultMode() throws Exception {
|
||||
LexerGrammar g = new LexerGrammar(
|
||||
"lexer grammar L;\n"+
|
||||
"A : 'a' ;\n" +
|
||||
"X : 'x' ;\n" +
|
||||
"mode FOO;\n" +
|
||||
"B : 'b' ;\n" +
|
||||
"C : 'c' ;\n");
|
||||
String expecting =
|
||||
"BlockStart_0->RuleStart_A_2\n" +
|
||||
"BlockStart_0->RuleStart_X_4\n" +
|
||||
"RuleStart_A_2->s10\n" +
|
||||
"RuleStart_X_4->s12\n" +
|
||||
"s10-'a'->s11\n" +
|
||||
"s12-'x'->s13\n" +
|
||||
"s11->RuleStop_A_3\n" +
|
||||
"s13->RuleStop_X_5\n";
|
||||
checkTokensRule(g, "DEFAULT_MODE", expecting);
|
||||
}
|
||||
|
||||
@Test public void testMode() throws Exception {
|
||||
LexerGrammar g = new LexerGrammar(
|
||||
"lexer grammar L;\n"+
|
||||
"A : 'a' ;\n" +
|
||||
"X : 'x' ;\n" +
|
||||
"mode FOO;\n" +
|
||||
"B : 'b' ;\n" +
|
||||
"C : 'c' ;\n");
|
||||
String expecting =
|
||||
"BlockStart_1->RuleStart_B_6\n" +
|
||||
"BlockStart_1->RuleStart_C_8\n" +
|
||||
"RuleStart_B_6->s14\n" +
|
||||
"RuleStart_C_8->s16\n" +
|
||||
"s14-'b'->s15\n" +
|
||||
"s16-'c'->s17\n" +
|
||||
"s15->RuleStop_B_7\n" +
|
||||
"s17->RuleStop_C_9\n";
|
||||
checkTokensRule(g, "FOO", expecting);
|
||||
}
|
||||
|
||||
void checkTokensRule(LexerGrammar g, String modeName, String expecting) {
|
||||
if ( g.ast!=null && !g.ast.hasErrors ) {
|
||||
System.out.println(g.ast.toStringTree());
|
||||
Tool antlr = new Tool();
|
||||
SemanticPipeline sem = new SemanticPipeline(g);
|
||||
sem.process();
|
||||
if ( g.getImportedGrammars()!=null ) { // process imported grammars (if any)
|
||||
for (Grammar imp : g.getImportedGrammars()) {
|
||||
antlr.processNonCombinedGrammar(imp);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if ( g.modes.get(modeName)==null ) {
|
||||
System.err.println("no such mode "+modeName);
|
||||
return;
|
||||
}
|
||||
|
||||
ParserATNFactory f = new LexerATNFactory((LexerGrammar)g);
|
||||
ATN nfa = f.createATN();
|
||||
ATNState startState = nfa.modeNameToStartState.get(modeName);
|
||||
ATNPrinter serializer = new ATNPrinter(g, startState);
|
||||
String result = serializer.toString();
|
||||
|
||||
//System.out.print(result);
|
||||
assertEquals(expecting, result);
|
||||
}
|
||||
|
||||
void checkRule(Grammar g, String ruleName, String expecting) {
|
||||
if ( g.ast!=null && !g.ast.hasErrors ) {
|
||||
System.out.println(g.ast.toStringTree());
|
||||
Tool antlr = new Tool();
|
||||
SemanticPipeline sem = new SemanticPipeline(g);
|
||||
sem.process();
|
||||
if ( g.getImportedGrammars()!=null ) { // process imported grammars (if any)
|
||||
for (Grammar imp : g.getImportedGrammars()) {
|
||||
antlr.processNonCombinedGrammar(imp);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ParserATNFactory f = new ParserATNFactory(g);
|
||||
ATN atn = f.createATN();
|
||||
|
||||
DOTGenerator dot = new DOTGenerator(g);
|
||||
System.out.println(dot.getDOT(atn.ruleToStartState.get(g.getRule(ruleName))));
|
||||
|
||||
Rule r = g.getRule(ruleName);
|
||||
ATNState startState = atn.ruleToStartState.get(r);
|
||||
ATNPrinter serializer = new ATNPrinter(g, startState);
|
||||
String result = serializer.toString();
|
||||
|
||||
//System.out.print(result);
|
||||
assertEquals(expecting, result);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,127 @@
|
|||
package org.antlr.v4.test;
|
||||
|
||||
import org.antlr.v4.misc.Utils;
|
||||
import org.antlr.v4.runtime.atn.ATN;
|
||||
import org.antlr.v4.runtime.atn.ParserInterpreter;
|
||||
import org.antlr.v4.tool.Grammar;
|
||||
import org.antlr.v4.tool.LexerGrammar;
|
||||
import org.junit.Test;
|
||||
|
||||
public class TestATNDeserialization extends BaseTest {
|
||||
@Test public void testSimpleNoBlock() throws Exception {
|
||||
Grammar g = new Grammar(
|
||||
"parser grammar T;\n"+
|
||||
"a : A B ;");
|
||||
checkDeserializationIsStable(g);
|
||||
}
|
||||
|
||||
@Test public void testNot() throws Exception {
|
||||
Grammar g = new Grammar(
|
||||
"parser grammar T;\n"+
|
||||
"tokens {A; B; C;}\n" +
|
||||
"a : ~A ;");
|
||||
checkDeserializationIsStable(g);
|
||||
}
|
||||
|
||||
@Test public void testWildcard() throws Exception {
|
||||
Grammar g = new Grammar(
|
||||
"parser grammar T;\n"+
|
||||
"tokens {A; B; C;}\n" +
|
||||
"a : . ;");
|
||||
checkDeserializationIsStable(g);
|
||||
}
|
||||
|
||||
@Test public void testPEGAchillesHeel() throws Exception {
|
||||
Grammar g = new Grammar(
|
||||
"parser grammar T;\n"+
|
||||
"a : A | A B ;");
|
||||
checkDeserializationIsStable(g);
|
||||
}
|
||||
|
||||
@Test public void test3Alts() throws Exception {
|
||||
Grammar g = new Grammar(
|
||||
"parser grammar T;\n"+
|
||||
"a : A | A B | A B C ;");
|
||||
checkDeserializationIsStable(g);
|
||||
}
|
||||
|
||||
@Test public void testSimpleLoop() throws Exception {
|
||||
Grammar g = new Grammar(
|
||||
"parser grammar T;\n"+
|
||||
"a : A+ B ;");
|
||||
checkDeserializationIsStable(g);
|
||||
}
|
||||
|
||||
@Test public void testRuleRef() throws Exception {
|
||||
Grammar g = new Grammar(
|
||||
"parser grammar T;\n"+
|
||||
"a : e ;\n" +
|
||||
"e : E ;\n");
|
||||
checkDeserializationIsStable(g);
|
||||
}
|
||||
|
||||
@Test public void testLexerTwoRules() throws Exception {
|
||||
LexerGrammar lg = new LexerGrammar(
|
||||
"lexer grammar L;\n"+
|
||||
"A : 'a' ;\n" +
|
||||
"B : 'b' ;\n");
|
||||
checkDeserializationIsStable(lg);
|
||||
}
|
||||
|
||||
@Test public void testLexerRange() throws Exception {
|
||||
LexerGrammar lg = new LexerGrammar(
|
||||
"lexer grammar L;\n"+
|
||||
"INT : '0'..'9' ;\n");
|
||||
checkDeserializationIsStable(lg);
|
||||
}
|
||||
|
||||
@Test public void testLexerLoops() throws Exception {
|
||||
LexerGrammar lg = new LexerGrammar(
|
||||
"lexer grammar L;\n"+
|
||||
"INT : '0'..'9'+ ;\n");
|
||||
checkDeserializationIsStable(lg);
|
||||
}
|
||||
|
||||
@Test public void testLexerNotSet() throws Exception {
|
||||
LexerGrammar lg = new LexerGrammar(
|
||||
"lexer grammar L;\n"+
|
||||
"ID : ~('a'|'b')\n ;");
|
||||
checkDeserializationIsStable(lg);
|
||||
}
|
||||
|
||||
@Test public void testLexerNotSetWithRange() throws Exception {
|
||||
LexerGrammar lg = new LexerGrammar(
|
||||
"lexer grammar L;\n"+
|
||||
"ID : ~('a'|'b'|'e'|'p'..'t')\n ;");
|
||||
checkDeserializationIsStable(lg);
|
||||
}
|
||||
|
||||
@Test public void testLexerNotSetWithRange2() throws Exception {
|
||||
LexerGrammar lg = new LexerGrammar(
|
||||
"lexer grammar L;\n"+
|
||||
"ID : ~('a'|'b') ~('e'|'p'..'t')\n ;");
|
||||
checkDeserializationIsStable(lg);
|
||||
}
|
||||
|
||||
@Test public void test2ModesInLexer() throws Exception {
|
||||
LexerGrammar lg = new LexerGrammar(
|
||||
"lexer grammar L;\n"+
|
||||
"A : 'a'\n ;\n" +
|
||||
"mode M;\n" +
|
||||
"B : 'b';\n" +
|
||||
"mode M2;\n" +
|
||||
"C : 'c';\n");
|
||||
checkDeserializationIsStable(lg);
|
||||
}
|
||||
|
||||
protected void checkDeserializationIsStable(Grammar g) {
|
||||
ATN atn = createATN(g);
|
||||
char[] data = Utils.toCharArray(atn.getSerialized());
|
||||
String atnData = atn.getDecoded();
|
||||
ATN atn2 = ParserInterpreter.deserialize(data);
|
||||
atn2.g = g;
|
||||
String atn2Data = atn2.getDecoded();
|
||||
|
||||
assertEquals(atnData, atn2Data);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,289 @@
|
|||
package org.antlr.v4.test;
|
||||
|
||||
import org.antlr.v4.automata.ParserATNFactory;
|
||||
import org.antlr.v4.runtime.*;
|
||||
import org.antlr.v4.runtime.atn.*;
|
||||
import org.antlr.v4.tool.*;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/** */
|
||||
public class TestATNInterpreter extends BaseTest {
|
||||
@Test public void testSimpleNoBlock() throws Exception {
|
||||
LexerGrammar lg = new LexerGrammar(
|
||||
"lexer grammar L;\n" +
|
||||
"A : 'a' ;\n" +
|
||||
"B : 'b' ;\n" +
|
||||
"C : 'c' ;\n");
|
||||
Grammar g = new Grammar(
|
||||
"parser grammar T;\n"+
|
||||
"a : A B ;");
|
||||
checkMatchedAlt(lg, g, "ab", 1);
|
||||
}
|
||||
|
||||
@Test public void testSet() throws Exception {
|
||||
LexerGrammar lg = new LexerGrammar(
|
||||
"lexer grammar L;\n" +
|
||||
"A : 'a' ;\n" +
|
||||
"B : 'b' ;\n" +
|
||||
"C : 'c' ;\n");
|
||||
Grammar g = new Grammar(
|
||||
"parser grammar T;\n"+
|
||||
"tokens {A; B; C;}\n" +
|
||||
"a : ~A ;");
|
||||
checkMatchedAlt(lg, g, "b", 1);
|
||||
}
|
||||
|
||||
@Test public void testPEGAchillesHeel() throws Exception {
|
||||
LexerGrammar lg = new LexerGrammar(
|
||||
"lexer grammar L;\n" +
|
||||
"A : 'a' ;\n" +
|
||||
"B : 'b' ;\n" +
|
||||
"C : 'c' ;\n");
|
||||
Grammar g = new Grammar(
|
||||
"parser grammar T;\n"+
|
||||
"a : A | A B ;");
|
||||
checkMatchedAlt(lg, g, "a", 1);
|
||||
checkMatchedAlt(lg, g, "ab", 2);
|
||||
checkMatchedAlt(lg, g, "abc", 2);
|
||||
}
|
||||
|
||||
@Test public void testMustTrackPreviousGoodAlt() throws Exception {
|
||||
LexerGrammar lg = new LexerGrammar(
|
||||
"lexer grammar L;\n" +
|
||||
"A : 'a' ;\n" +
|
||||
"B : 'b' ;\n" +
|
||||
"C : 'c' ;\n");
|
||||
Grammar g = new Grammar(
|
||||
"parser grammar T;\n"+
|
||||
"a : A | A B ;");
|
||||
int errorIndex = 0;
|
||||
int errorTokenType = 0;
|
||||
try {
|
||||
checkMatchedAlt(lg, g, "ac", 1);
|
||||
}
|
||||
catch (NoViableAltException re) {
|
||||
errorIndex = re.index;
|
||||
errorTokenType = re.token.getType();
|
||||
}
|
||||
assertEquals(1, errorIndex);
|
||||
assertEquals(errorTokenType, 5);
|
||||
}
|
||||
|
||||
@Test public void testMustTrackPreviousGoodAlt2() throws Exception {
|
||||
LexerGrammar lg = new LexerGrammar(
|
||||
"lexer grammar L;\n" +
|
||||
"A : 'a' ;\n" +
|
||||
"B : 'b' ;\n" +
|
||||
"C : 'c' ;\n" +
|
||||
"D : 'd' ;\n");
|
||||
Grammar g = new Grammar(
|
||||
"parser grammar T;\n"+
|
||||
"a : A | A B | A B C ;");
|
||||
checkMatchedAlt(lg, g, "a", 1 );
|
||||
checkMatchedAlt(lg, g, "ab", 2);
|
||||
checkMatchedAlt(lg, g, "abc", 3);
|
||||
int errorIndex = 0;
|
||||
int errorTokenType = 0;
|
||||
try {
|
||||
checkMatchedAlt(lg, g, "abd", 1);
|
||||
}
|
||||
catch (NoViableAltException re) {
|
||||
errorIndex = re.index;
|
||||
errorTokenType = re.token.getType();
|
||||
}
|
||||
assertEquals(2, errorIndex);
|
||||
assertEquals(errorTokenType, 6);
|
||||
}
|
||||
|
||||
@Test public void testMustTrackPreviousGoodAlt3() throws Exception {
|
||||
LexerGrammar lg = new LexerGrammar(
|
||||
"lexer grammar L;\n" +
|
||||
"A : 'a' ;\n" +
|
||||
"B : 'b' ;\n" +
|
||||
"C : 'c' ;\n" +
|
||||
"D : 'd' ;\n");
|
||||
Grammar g = new Grammar(
|
||||
"parser grammar T;\n"+
|
||||
"a : A B | A | A B C ;");
|
||||
int errorIndex = 0;
|
||||
int errorTokenType = 0;
|
||||
try {
|
||||
checkMatchedAlt(lg, g, "abd", 1);
|
||||
}
|
||||
catch (NoViableAltException re) {
|
||||
errorIndex = re.index;
|
||||
errorTokenType = re.token.getType();
|
||||
}
|
||||
assertEquals(2, errorIndex);
|
||||
assertEquals(errorTokenType, 6);
|
||||
}
|
||||
|
||||
@Test public void testAmbigAltChooseFirst() throws Exception {
|
||||
LexerGrammar lg = new LexerGrammar(
|
||||
"lexer grammar L;\n" +
|
||||
"A : 'a' ;\n" +
|
||||
"B : 'b' ;\n" +
|
||||
"C : 'c' ;\n" +
|
||||
"D : 'd' ;\n");
|
||||
Grammar g = new Grammar(
|
||||
"parser grammar T;\n"+
|
||||
"a : A B | A B ;"); // first alt
|
||||
checkMatchedAlt(lg, g, "ab", 1);
|
||||
checkMatchedAlt(lg, g, "abc", 1);
|
||||
}
|
||||
|
||||
@Test public void testAmbigAltChooseFirstWithFollowingToken() throws Exception {
|
||||
LexerGrammar lg = new LexerGrammar(
|
||||
"lexer grammar L;\n" +
|
||||
"A : 'a' ;\n" +
|
||||
"B : 'b' ;\n" +
|
||||
"C : 'c' ;\n" +
|
||||
"D : 'd' ;\n");
|
||||
Grammar g = new Grammar(
|
||||
"parser grammar T;\n"+
|
||||
"a : (A B | A B) C ;"); // first alt
|
||||
checkMatchedAlt(lg, g, "abc", 1);
|
||||
checkMatchedAlt(lg, g, "abcd", 1);
|
||||
}
|
||||
|
||||
@Test public void testAmbigAltChooseFirstWithFollowingToken2() throws Exception {
|
||||
LexerGrammar lg = new LexerGrammar(
|
||||
"lexer grammar L;\n" +
|
||||
"A : 'a' ;\n" +
|
||||
"B : 'b' ;\n" +
|
||||
"C : 'c' ;\n" +
|
||||
"D : 'd' ;\n");
|
||||
Grammar g = new Grammar(
|
||||
"parser grammar T;\n"+
|
||||
"a : (A B | A B | D) C ;");
|
||||
checkMatchedAlt(lg, g, "abc", 1);
|
||||
checkMatchedAlt(lg, g, "abcd", 1);
|
||||
checkMatchedAlt(lg, g, "dc", 3);
|
||||
}
|
||||
|
||||
@Test public void testAmbigAltChooseFirst2() throws Exception {
|
||||
LexerGrammar lg = new LexerGrammar(
|
||||
"lexer grammar L;\n" +
|
||||
"A : 'a' ;\n" +
|
||||
"B : 'b' ;\n" +
|
||||
"C : 'c' ;\n" +
|
||||
"D : 'd' ;\n");
|
||||
Grammar g = new Grammar(
|
||||
"parser grammar T;\n"+
|
||||
"a : A B | A B | A B C ;");
|
||||
checkMatchedAlt(lg, g, "ab", 1);
|
||||
checkMatchedAlt(lg, g, "abc", 3);
|
||||
|
||||
int errorIndex = 0;
|
||||
int errorTokenType = 0;
|
||||
try {
|
||||
checkMatchedAlt(lg, g, "abd", 1);
|
||||
}
|
||||
catch (NoViableAltException re) {
|
||||
errorIndex = re.index;
|
||||
errorTokenType = re.token.getType();
|
||||
}
|
||||
assertEquals(2, errorIndex);
|
||||
assertEquals(6, errorTokenType);
|
||||
|
||||
checkMatchedAlt(lg, g, "abcd", 3); // ignores d on end
|
||||
}
|
||||
|
||||
@Test public void testSimpleLoop() throws Exception {
|
||||
LexerGrammar lg = new LexerGrammar(
|
||||
"lexer grammar L;\n" +
|
||||
"A : 'a' ;\n" +
|
||||
"B : 'b' ;\n" +
|
||||
"C : 'c' ;\n" +
|
||||
"D : 'd' ;\n");
|
||||
Grammar g = new Grammar(
|
||||
"parser grammar T;\n"+
|
||||
"a : A+ B ;");
|
||||
checkMatchedAlt(lg, g, "ab", 1);
|
||||
checkMatchedAlt(lg, g, "aab", 1);
|
||||
checkMatchedAlt(lg, g, "aaaaaab", 1);
|
||||
checkMatchedAlt(lg, g, "aabd", 1);
|
||||
}
|
||||
|
||||
@Test public void testCommonLeftPrefix() throws Exception {
|
||||
LexerGrammar lg = new LexerGrammar(
|
||||
"lexer grammar L;\n" +
|
||||
"A : 'a' ;\n" +
|
||||
"B : 'b' ;\n" +
|
||||
"C : 'c' ;\n");
|
||||
Grammar g = new Grammar(
|
||||
"parser grammar T;\n"+
|
||||
"a : A B | A C ;");
|
||||
checkMatchedAlt(lg, g, "ab", 1);
|
||||
checkMatchedAlt(lg, g, "ac", 2);
|
||||
}
|
||||
|
||||
@Test public void testArbitraryLeftPrefix() throws Exception {
|
||||
LexerGrammar lg = new LexerGrammar(
|
||||
"lexer grammar L;\n" +
|
||||
"A : 'a' ;\n" +
|
||||
"B : 'b' ;\n" +
|
||||
"C : 'c' ;\n");
|
||||
Grammar g = new Grammar(
|
||||
"parser grammar T;\n"+
|
||||
"a : A+ B | A+ C ;");
|
||||
checkMatchedAlt(lg, g, "aac", 2);
|
||||
}
|
||||
|
||||
@Test public void testRecursiveLeftPrefix() throws Exception {
|
||||
LexerGrammar lg = new LexerGrammar(
|
||||
"lexer grammar L;\n" +
|
||||
"A : 'a' ;\n" +
|
||||
"B : 'b' ;\n" +
|
||||
"C : 'c' ;\n" +
|
||||
"LP : '(' ;\n" +
|
||||
"RP : ')' ;\n" +
|
||||
"INT : '0'..'9'+ ;\n"
|
||||
);
|
||||
Grammar g = new Grammar(
|
||||
"parser grammar T;\n"+
|
||||
"a : e B | e C ;\n" +
|
||||
"e : LP e RP\n" +
|
||||
" | INT\n" +
|
||||
" ;");
|
||||
checkMatchedAlt(lg, g, "34b", 1);
|
||||
checkMatchedAlt(lg, g, "34c", 2);
|
||||
checkMatchedAlt(lg, g, "(34)b", 1);
|
||||
checkMatchedAlt(lg, g, "(34)c", 2);
|
||||
checkMatchedAlt(lg, g, "((34))b", 1);
|
||||
checkMatchedAlt(lg, g, "((34))c", 2);
|
||||
}
|
||||
|
||||
public void checkMatchedAlt(LexerGrammar lg, Grammar g,
|
||||
String inputString,
|
||||
int expected)
|
||||
{
|
||||
ATN lexatn = createATN(lg);
|
||||
LexerInterpreter lexInterp = new LexerInterpreter(lexatn);
|
||||
List<Integer> types = getTokenTypes(inputString, lexInterp);
|
||||
System.out.println(types);
|
||||
|
||||
semanticProcess(lg);
|
||||
g.importVocab(lg);
|
||||
semanticProcess(g);
|
||||
|
||||
ParserATNFactory f = new ParserATNFactory(g);
|
||||
ATN atn = f.createATN();
|
||||
|
||||
ParserInterpreter interp = new ParserInterpreter(atn);
|
||||
TokenStream input = new IntTokenStream(types);
|
||||
ATNState startState = atn.ruleToStartState.get(g.getRule("a"));
|
||||
if ( startState.transition(0).target instanceof BlockStartState ) {
|
||||
startState = startState.transition(0).target;
|
||||
}
|
||||
|
||||
DOTGenerator dot = new DOTGenerator(g);
|
||||
System.out.println(dot.getDOT(atn.ruleToStartState.get(g.getRule("a"))));
|
||||
System.out.println(dot.getDOT(atn.ruleToStartState.get(g.getRule("e"))));
|
||||
|
||||
int result = interp.matchATN(input, startState);
|
||||
assertEquals(expected, result);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,144 @@
|
|||
package org.antlr.v4.test;
|
||||
|
||||
import org.antlr.v4.misc.Utils;
|
||||
import org.antlr.v4.runtime.*;
|
||||
import org.antlr.v4.runtime.atn.*;
|
||||
import org.antlr.v4.tool.*;
|
||||
import org.junit.*;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
public class TestATNLexerInterpreter extends BaseTest {
|
||||
@Test public void testLexerTwoRules() throws Exception {
|
||||
LexerGrammar lg = new LexerGrammar(
|
||||
"lexer grammar L;\n"+
|
||||
"A : 'a' ;\n" +
|
||||
"B : 'b' ;\n");
|
||||
String expecting = "A, B, A, B, EOF";
|
||||
checkLexerMatches(lg, "abab", expecting);
|
||||
}
|
||||
|
||||
@Test public void testShortLongRule() throws Exception {
|
||||
LexerGrammar lg = new LexerGrammar(
|
||||
"lexer grammar L;\n"+
|
||||
"A : 'xy'\n" +
|
||||
" | 'xyz'\n" +
|
||||
" ;\n");
|
||||
checkLexerMatches(lg, "xy", "A, EOF");
|
||||
checkLexerMatches(lg, "xyz", "A, EOF");
|
||||
}
|
||||
|
||||
@Test public void testLexerLoops() throws Exception {
|
||||
LexerGrammar lg = new LexerGrammar(
|
||||
"lexer grammar L;\n"+
|
||||
"INT : '0'..'9'+ ;\n" +
|
||||
"ID : 'a'..'z'+ ;\n");
|
||||
String expecting = "ID, INT, ID, INT, EOF";
|
||||
checkLexerMatches(lg, "a34bde3", expecting);
|
||||
}
|
||||
|
||||
@Test public void testLexerNotSet() throws Exception {
|
||||
LexerGrammar lg = new LexerGrammar(
|
||||
"lexer grammar L;\n"+
|
||||
"ID : ~('a'|'b')\n ;");
|
||||
String expecting = "ID, EOF";
|
||||
checkLexerMatches(lg, "c", expecting);
|
||||
}
|
||||
|
||||
@Test public void testLexerKeywordIDAmbiguity() throws Exception {
|
||||
LexerGrammar lg = new LexerGrammar(
|
||||
"lexer grammar L;\n"+
|
||||
"KEND : 'end' ;\n" +
|
||||
"ID : 'a'..'z'+ ;\n" +
|
||||
"WS : (' '|'\n')+ ;");
|
||||
String expecting = "ID, EOF";
|
||||
//checkLexerMatches(lg, "e", expecting);
|
||||
expecting = "KEND, EOF";
|
||||
checkLexerMatches(lg, "end", expecting);
|
||||
expecting = "ID, EOF";
|
||||
checkLexerMatches(lg, "ending", expecting);
|
||||
expecting = "ID, WS, KEND, WS, ID, EOF";
|
||||
checkLexerMatches(lg, "a end bcd", expecting);
|
||||
}
|
||||
|
||||
@Test public void testLexerRuleRef() throws Exception {
|
||||
LexerGrammar lg = new LexerGrammar(
|
||||
"lexer grammar L;\n"+
|
||||
"INT : DIGIT+ ;\n" +
|
||||
"fragment DIGIT : '0'..'9' ;\n" +
|
||||
"WS : (' '|'\n')+ ;");
|
||||
String expecting = "INT, WS, INT, EOF";
|
||||
checkLexerMatches(lg, "32 99", expecting);
|
||||
}
|
||||
|
||||
@Test public void testRecursiveLexerRuleRef() throws Exception {
|
||||
LexerGrammar lg = new LexerGrammar(
|
||||
"lexer grammar L;\n"+
|
||||
"CMT : '/*' (CMT | ~'*')+ '*/' ;\n" +
|
||||
"WS : (' '|'\n')+ ;");
|
||||
String expecting = "CMT, WS, CMT, EOF";
|
||||
checkLexerMatches(lg, "/* ick */\n/* /*nested*/ */", expecting);
|
||||
}
|
||||
|
||||
@Ignore public void testLexerWildcardNonGreedyLoopByDefault() throws Exception {
|
||||
LexerGrammar lg = new LexerGrammar(
|
||||
"lexer grammar L;\n"+
|
||||
"CMT : '//' .* '\\n' ;\n");
|
||||
String expecting = "CMT, CMT, EOF";
|
||||
checkLexerMatches(lg, "//x\n//y\n", expecting);
|
||||
}
|
||||
|
||||
// should not work. no priority within a single rule. the subrule won't work. need modes
|
||||
@Ignore
|
||||
public void testLexerEscapeInString() throws Exception {
|
||||
LexerGrammar lg = new LexerGrammar(
|
||||
"lexer grammar L;\n"+
|
||||
"STR : '\"' ('\\\\' '\"' | .)* '\"' ;\n"); // STR : '"' ('\\' '"' | .)* '"'
|
||||
checkLexerMatches(lg, "\"a\\\"b\"", "STR, EOF");
|
||||
checkLexerMatches(lg, "\"a\"", "STR, EOF");
|
||||
}
|
||||
|
||||
@Ignore public void testLexerWildcardNonGreedyPlusLoopByDefault() throws Exception {
|
||||
LexerGrammar lg = new LexerGrammar(
|
||||
"lexer grammar L;\n"+
|
||||
"CMT : '//' .+ '\\n' ;\n");
|
||||
String expecting = "CMT, CMT, EOF";
|
||||
checkLexerMatches(lg, "//x\n//y\n", expecting);
|
||||
}
|
||||
|
||||
@Ignore public void testLexerGreedyOptionalShouldWorkAsWeExpect() throws Exception {
|
||||
LexerGrammar lg = new LexerGrammar(
|
||||
"lexer grammar L;\n"+
|
||||
"CMT : '/*' ('*/')? '*/' ;\n");
|
||||
String expecting = "CMT, EOF";
|
||||
checkLexerMatches(lg, "/**/", expecting);
|
||||
}
|
||||
|
||||
@Ignore public void testNonGreedyBetweenRules() throws Exception {
|
||||
LexerGrammar lg = new LexerGrammar(
|
||||
"lexer grammar L;\n"+
|
||||
"A : '<a>' ;\n" +
|
||||
"B : '<' .+ '>' ;\n");
|
||||
String expecting = "A, B, EOF";
|
||||
checkLexerMatches(lg, "<a><x>", expecting);
|
||||
}
|
||||
|
||||
protected void checkLexerMatches(LexerGrammar lg, String inputString, String expecting) {
|
||||
ATN atn = createATN(lg);
|
||||
CharStream input = new ANTLRStringStream(inputString);
|
||||
ATNState startState = atn.modeNameToStartState.get("DEFAULT_MODE");
|
||||
DOTGenerator dot = new DOTGenerator(lg);
|
||||
System.out.println(dot.getDOT(startState));
|
||||
|
||||
List<String> tokenTypes = getTokenTypes(lg, atn, input, false);
|
||||
String result = Utils.join(tokenTypes.iterator(), ", ");
|
||||
System.out.println(tokenTypes);
|
||||
assertEquals(expecting, result);
|
||||
|
||||
// try now adaptive DFA
|
||||
input.seek(0);
|
||||
List<String> tokenTypes2 = getTokenTypes(lg, atn, input, true);
|
||||
assertEquals("interp vs adaptive types differ", tokenTypes, tokenTypes2);
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,586 @@
|
|||
package org.antlr.v4.test;
|
||||
|
||||
import org.antlr.v4.Tool;
|
||||
import org.antlr.v4.automata.ParserATNFactory;
|
||||
import org.antlr.v4.runtime.*;
|
||||
import org.antlr.v4.runtime.atn.*;
|
||||
import org.antlr.v4.runtime.dfa.DFA;
|
||||
import org.antlr.v4.tool.*;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
public class TestATNParserPrediction extends BaseTest {
|
||||
@Test public void testAorB() throws Exception {
|
||||
LexerGrammar lg = new LexerGrammar(
|
||||
"lexer grammar L;\n" +
|
||||
"A : 'a' ;\n" +
|
||||
"B : 'b' ;\n" +
|
||||
"C : 'c' ;\n");
|
||||
Grammar g = new Grammar(
|
||||
"parser grammar T;\n"+
|
||||
"a : A | B ;");
|
||||
int decision = 0;
|
||||
checkPredictedAlt(lg, g, decision, "a", 1);
|
||||
checkPredictedAlt(lg, g, decision, "b", 2);
|
||||
|
||||
// After matching these inputs for decision, what is DFA after each prediction?
|
||||
String[] inputs = {
|
||||
"a",
|
||||
"b",
|
||||
"a"
|
||||
};
|
||||
String[] dfa = {
|
||||
"s0-'a'->:s1=>1\n",
|
||||
|
||||
"s0-'a'->:s1=>1\n" +
|
||||
"s0-'b'->:s2=>2\n",
|
||||
|
||||
"s0-'a'->:s1=>1\n" + // don't change after it works
|
||||
"s0-'b'->:s2=>2\n",
|
||||
};
|
||||
checkDFAConstruction(lg, g, decision, inputs, dfa);
|
||||
}
|
||||
|
||||
@Test public void testEmptyInput() throws Exception {
|
||||
LexerGrammar lg = new LexerGrammar(
|
||||
"lexer grammar L;\n" +
|
||||
"A : 'a' ;\n" +
|
||||
"B : 'b' ;\n" +
|
||||
"C : 'c' ;\n");
|
||||
Grammar g = new Grammar(
|
||||
"parser grammar T;\n"+
|
||||
"a : A | ;");
|
||||
int decision = 0;
|
||||
checkPredictedAlt(lg, g, decision, "a", 1);
|
||||
checkPredictedAlt(lg, g, decision, "", 2);
|
||||
|
||||
// After matching these inputs for decision, what is DFA after each prediction?
|
||||
String[] inputs = {
|
||||
"a",
|
||||
"",
|
||||
};
|
||||
String[] dfa = {
|
||||
"s0-'a'->:s1=>1\n",
|
||||
|
||||
"s0-EOF->:s2=>2\n" +
|
||||
"s0-'a'->:s1=>1\n",
|
||||
};
|
||||
checkDFAConstruction(lg, g, decision, inputs, dfa);
|
||||
}
|
||||
|
||||
@Test public void testPEGAchillesHeel() throws Exception {
|
||||
LexerGrammar lg = new LexerGrammar(
|
||||
"lexer grammar L;\n" +
|
||||
"A : 'a' ;\n" +
|
||||
"B : 'b' ;\n" +
|
||||
"C : 'c' ;\n");
|
||||
Grammar g = new Grammar(
|
||||
"parser grammar T;\n"+
|
||||
"a : A | A B ;");
|
||||
checkPredictedAlt(lg, g, 0, "a", 1);
|
||||
checkPredictedAlt(lg, g, 0, "ab", 2);
|
||||
checkPredictedAlt(lg, g, 0, "abc", 2);
|
||||
|
||||
String[] inputs = {
|
||||
"a",
|
||||
"ab",
|
||||
"abc"
|
||||
};
|
||||
String[] dfa = {
|
||||
"s0-'a'->s1\n" +
|
||||
"s1-EOF->:s2=>1\n",
|
||||
|
||||
"s0-'a'->s1\n" +
|
||||
"s1-EOF->:s2=>1\n" +
|
||||
"s1-'b'->:s3=>2\n",
|
||||
|
||||
"s0-'a'->s1\n" +
|
||||
"s1-EOF->:s2=>1\n" +
|
||||
"s1-'b'->:s3=>2\n"
|
||||
};
|
||||
checkDFAConstruction(lg, g, 0, inputs, dfa);
|
||||
}
|
||||
|
||||
@Test public void testRuleRefxory() throws Exception {
|
||||
LexerGrammar lg = new LexerGrammar(
|
||||
"lexer grammar L;\n" +
|
||||
"A : 'a' ;\n" +
|
||||
"B : 'b' ;\n" +
|
||||
"C : 'c' ;\n");
|
||||
Grammar g = new Grammar(
|
||||
"parser grammar T;\n"+
|
||||
"a : x | y ;\n" +
|
||||
"x : A ;\n" +
|
||||
"y : B ;\n");
|
||||
int decision = 0;
|
||||
checkPredictedAlt(lg, g, decision, "a", 1);
|
||||
checkPredictedAlt(lg, g, decision, "b", 2);
|
||||
|
||||
// After matching these inputs for decision, what is DFA after each prediction?
|
||||
String[] inputs = {
|
||||
"a",
|
||||
"b",
|
||||
"a"
|
||||
};
|
||||
String[] dfa = {
|
||||
"s0-'a'->:s1=>1\n",
|
||||
|
||||
"s0-'a'->:s1=>1\n" +
|
||||
"s0-'b'->:s2=>2\n",
|
||||
|
||||
"s0-'a'->:s1=>1\n" + // don't change after it works
|
||||
"s0-'b'->:s2=>2\n",
|
||||
};
|
||||
checkDFAConstruction(lg, g, decision, inputs, dfa);
|
||||
}
|
||||
|
||||
@Test public void testOptionalRuleChasesGlobalFollow() throws Exception {
|
||||
LexerGrammar lg = new LexerGrammar(
|
||||
"lexer grammar L;\n" +
|
||||
"A : 'a' ;\n" +
|
||||
"B : 'b' ;\n" +
|
||||
"C : 'c' ;\n");
|
||||
Grammar g = new Grammar(
|
||||
"parser grammar T;\n"+
|
||||
"a : x B ;\n" +
|
||||
"b : x C ;\n" +
|
||||
"x : A | ;\n");
|
||||
int decision = 0;
|
||||
checkPredictedAlt(lg, g, decision, "a", 1);
|
||||
checkPredictedAlt(lg, g, decision, "b", 2);
|
||||
checkPredictedAlt(lg, g, decision, "c", 2);
|
||||
|
||||
// After matching these inputs for decision, what is DFA after each prediction?
|
||||
String[] inputs = {
|
||||
"a",
|
||||
"b",
|
||||
"c",
|
||||
"c",
|
||||
};
|
||||
String[] dfa = {
|
||||
"s0-'a'->:s1=>1\n",
|
||||
|
||||
"s0-'a'->:s1=>1\n" +
|
||||
"s0-'b'->:s2=>2\n",
|
||||
|
||||
"s0-'a'->:s1=>1\n" +
|
||||
"s0-'b'->:s2=>2\n" +
|
||||
"s0-'c'->:s3=>2\n",
|
||||
|
||||
"s0-'a'->:s1=>1\n" +
|
||||
"s0-'b'->:s2=>2\n" +
|
||||
"s0-'c'->:s3=>2\n",
|
||||
};
|
||||
checkDFAConstruction(lg, g, decision, inputs, dfa);
|
||||
}
|
||||
|
||||
@Test public void testLL1Ambig() throws Exception {
|
||||
LexerGrammar lg = new LexerGrammar(
|
||||
"lexer grammar L;\n" +
|
||||
"A : 'a' ;\n" +
|
||||
"B : 'b' ;\n" +
|
||||
"C : 'c' ;\n");
|
||||
Grammar g = new Grammar(
|
||||
"parser grammar T;\n"+
|
||||
"a : A | A | A B ;");
|
||||
int decision = 0;
|
||||
checkPredictedAlt(lg, g, decision, "a", 1);
|
||||
checkPredictedAlt(lg, g, decision, "ab", 3);
|
||||
|
||||
// After matching these inputs for decision, what is DFA after each prediction?
|
||||
String[] inputs = {
|
||||
"a",
|
||||
"ab",
|
||||
"ab"
|
||||
};
|
||||
String[] dfa = {
|
||||
"s0-'a'->s1\n" +
|
||||
"s1-EOF->:s2=>1\n",
|
||||
|
||||
"s0-'a'->s1\n" +
|
||||
"s1-EOF->:s2=>1\n" +
|
||||
"s1-'b'->:s3=>3\n",
|
||||
|
||||
"s0-'a'->s1\n" +
|
||||
"s1-EOF->:s2=>1\n" +
|
||||
"s1-'b'->:s3=>3\n",
|
||||
};
|
||||
checkDFAConstruction(lg, g, decision, inputs, dfa);
|
||||
}
|
||||
|
||||
@Test public void testLL2Ambig() throws Exception {
|
||||
LexerGrammar lg = new LexerGrammar(
|
||||
"lexer grammar L;\n" +
|
||||
"A : 'a' ;\n" +
|
||||
"B : 'b' ;\n" +
|
||||
"C : 'c' ;\n");
|
||||
Grammar g = new Grammar(
|
||||
"parser grammar T;\n"+
|
||||
"a : A B | A B | A B C ;");
|
||||
int decision = 0;
|
||||
checkPredictedAlt(lg, g, decision, "ab", 1);
|
||||
checkPredictedAlt(lg, g, decision, "abc", 3);
|
||||
|
||||
// After matching these inputs for decision, what is DFA after each prediction?
|
||||
String[] inputs = {
|
||||
"ab",
|
||||
"abc",
|
||||
"ab"
|
||||
};
|
||||
String[] dfa = {
|
||||
"s0-'a'->s1\n" +
|
||||
"s1-'b'->s2\n" +
|
||||
"s2-EOF->:s3=>1\n",
|
||||
|
||||
"s0-'a'->s1\n" +
|
||||
"s1-'b'->s2\n" +
|
||||
"s2-EOF->:s3=>1\n" +
|
||||
"s2-'c'->:s4=>3\n",
|
||||
|
||||
"s0-'a'->s1\n" +
|
||||
"s1-'b'->s2\n" +
|
||||
"s2-EOF->:s3=>1\n" +
|
||||
"s2-'c'->:s4=>3\n",
|
||||
};
|
||||
checkDFAConstruction(lg, g, decision, inputs, dfa);
|
||||
}
|
||||
|
||||
@Test public void testFullLLContextParse() throws Exception {
|
||||
LexerGrammar lg = new LexerGrammar(
|
||||
"lexer grammar L;\n" +
|
||||
"A : 'a' ;\n" +
|
||||
"B : 'b' ;\n" +
|
||||
"C : 'c' ;\n");
|
||||
// AB predicted in both alts of e but in diff contexts.
|
||||
Grammar g = new Grammar(
|
||||
"parser grammar T;\n"+
|
||||
"a : e B ;\n" +
|
||||
"b : e A B ;\n" +
|
||||
"e : A | ;\n"); // TODO: try with three alts
|
||||
|
||||
ATN lexatn = createATN(lg);
|
||||
LexerInterpreter lexInterp = new LexerInterpreter(lexatn);
|
||||
|
||||
semanticProcess(lg);
|
||||
g.importVocab(lg);
|
||||
semanticProcess(g);
|
||||
|
||||
ParserATNFactory f = new ParserATNFactory(g);
|
||||
ATN atn = f.createATN();
|
||||
|
||||
RuleStartState aStart = atn.ruleToStartState.get(g.getRule("a"));
|
||||
RuleStartState bStart = atn.ruleToStartState.get(g.getRule("b"));
|
||||
RuleStartState eStart = atn.ruleToStartState.get(g.getRule("e"));
|
||||
ATNState a_e_invoke = aStart.transition(0).target; //
|
||||
ATNState b_e_invoke = bStart.transition(0).target; //
|
||||
RuleContext a_ctx = new RuleContext(null, -1, a_e_invoke.stateNumber);
|
||||
RuleContext b_ctx = new RuleContext(null, -1, b_e_invoke.stateNumber);
|
||||
RuleContext a_e_ctx = new RuleContext(a_ctx, a_e_invoke.stateNumber, bStart.stateNumber);
|
||||
RuleContext b_e_ctx = new RuleContext(b_ctx, b_e_invoke.stateNumber, bStart.stateNumber);
|
||||
|
||||
ParserInterpreter interp = new ParserInterpreter(atn);
|
||||
interp.setContextSensitive(true);
|
||||
List<Integer> types = getTokenTypes("ab", lexInterp);
|
||||
System.out.println(types);
|
||||
TokenStream input = new IntTokenStream(types);
|
||||
int alt = interp.adaptivePredict(input, 0, b_e_ctx);
|
||||
assertEquals(alt, 2);
|
||||
DFA dfa = interp.decisionToDFA[0];
|
||||
String expecting =
|
||||
"s0-'a'->s1\n" +
|
||||
"s1-'b'->s2\n" +
|
||||
"s2-EOF->:s3@{[10]=2}\n";
|
||||
assertEquals(expecting, dfa.toString(g.getTokenDisplayNames()));
|
||||
|
||||
alt = interp.adaptivePredict(input, 0, b_e_ctx); // cached
|
||||
assertEquals(alt, 2);
|
||||
expecting =
|
||||
"s0-'a'->s1\n" +
|
||||
"s1-'b'->s2\n" +
|
||||
"s2-EOF->:s3@{[10]=2}\n";
|
||||
assertEquals(expecting, dfa.toString(g.getTokenDisplayNames()));
|
||||
|
||||
alt = interp.adaptivePredict(input, 0, a_e_ctx); // forces new context-sens ATN match
|
||||
assertEquals(alt, 1);
|
||||
expecting =
|
||||
"s0-'a'->s1\n" +
|
||||
"s1-'b'->s2\n" +
|
||||
"s2-EOF->:s3@{[10]=2, [6]=1}\n";
|
||||
assertEquals(expecting, dfa.toString(g.getTokenDisplayNames()));
|
||||
|
||||
alt = interp.adaptivePredict(input, 0, b_e_ctx); // cached
|
||||
assertEquals(alt, 2);
|
||||
expecting =
|
||||
"s0-'a'->s1\n" +
|
||||
"s1-'b'->s2\n" +
|
||||
"s2-EOF->:s3@{[10]=2, [6]=1}\n";
|
||||
assertEquals(expecting, dfa.toString(g.getTokenDisplayNames()));
|
||||
|
||||
alt = interp.adaptivePredict(input, 0, a_e_ctx); // cached
|
||||
assertEquals(alt, 1);
|
||||
expecting =
|
||||
"s0-'a'->s1\n" +
|
||||
"s1-'b'->s2\n" +
|
||||
"s2-EOF->:s3@{[10]=2, [6]=1}\n";
|
||||
assertEquals(expecting, dfa.toString(g.getTokenDisplayNames()));
|
||||
|
||||
types = getTokenTypes("b", lexInterp);
|
||||
System.out.println(types);
|
||||
input = new IntTokenStream(types);
|
||||
alt = interp.adaptivePredict(input, 0, null); // ctx irrelevant
|
||||
assertEquals(alt, 2);
|
||||
expecting =
|
||||
"s0-'a'->s1\n" +
|
||||
"s0-'b'->:s4=>2\n" +
|
||||
"s1-'b'->s2\n" +
|
||||
"s2-EOF->:s3@{[10]=2, [6]=1}\n";
|
||||
assertEquals(expecting, dfa.toString(g.getTokenDisplayNames()));
|
||||
|
||||
types = getTokenTypes("aab", lexInterp);
|
||||
System.out.println(types);
|
||||
input = new IntTokenStream(types);
|
||||
alt = interp.adaptivePredict(input, 0, null);
|
||||
assertEquals(alt, 1);
|
||||
expecting =
|
||||
"s0-'a'->s1\n" +
|
||||
"s0-'b'->:s4=>2\n" +
|
||||
"s1-'a'->:s5=>1\n" +
|
||||
"s1-'b'->s2\n" +
|
||||
"s2-EOF->:s3@{[10]=2, [6]=1}\n";
|
||||
assertEquals(expecting, dfa.toString(g.getTokenDisplayNames()));
|
||||
}
|
||||
|
||||
@Test public void testRecursiveLeftPrefix() throws Exception {
|
||||
LexerGrammar lg = new LexerGrammar(
|
||||
"lexer grammar L;\n" +
|
||||
"A : 'a' ;\n" +
|
||||
"B : 'b' ;\n" +
|
||||
"C : 'c' ;\n" +
|
||||
"LP : '(' ;\n" +
|
||||
"RP : ')' ;\n" +
|
||||
"INT : '0'..'9'+ ;\n"
|
||||
);
|
||||
Grammar g = new Grammar(
|
||||
"parser grammar T;\n"+
|
||||
"a : e B | e C ;\n" +
|
||||
"e : LP e RP\n" +
|
||||
" | INT\n" +
|
||||
" ;");
|
||||
int decision = 0;
|
||||
checkPredictedAlt(lg, g, decision, "34b", 1);
|
||||
checkPredictedAlt(lg, g, decision, "34c", 2);
|
||||
checkPredictedAlt(lg, g, decision, "((34))b", 1);
|
||||
checkPredictedAlt(lg, g, decision, "((34))c", 2);
|
||||
|
||||
// After matching these inputs for decision, what is DFA after each prediction?
|
||||
String[] inputs = {
|
||||
"34b",
|
||||
"34c",
|
||||
"((34))b",
|
||||
"((34))c"
|
||||
};
|
||||
String[] dfa = {
|
||||
"s0-INT->s1\n" +
|
||||
"s1-'b'->:s2=>1\n",
|
||||
|
||||
"s0-INT->s1\n" +
|
||||
"s1-'b'->:s2=>1\n" +
|
||||
"s1-'c'->:s3=>2\n",
|
||||
|
||||
"s0-'('->s4\n" +
|
||||
"s0-INT->s1\n" +
|
||||
"s1-'b'->:s2=>1\n" +
|
||||
"s1-'c'->:s3=>2\n" +
|
||||
"s4-'('->s5\n" +
|
||||
"s5-INT->s6\n" +
|
||||
"s6-')'->s7\n" +
|
||||
"s7-')'->s1\n",
|
||||
|
||||
"s0-'('->s4\n" +
|
||||
"s0-INT->s1\n" +
|
||||
"s1-'b'->:s2=>1\n" +
|
||||
"s1-'c'->:s3=>2\n" +
|
||||
"s4-'('->s5\n" +
|
||||
"s5-INT->s6\n" +
|
||||
"s6-')'->s7\n" +
|
||||
"s7-')'->s1\n",
|
||||
};
|
||||
checkDFAConstruction(lg, g, decision, inputs, dfa);
|
||||
}
|
||||
|
||||
@Test public void testRecursiveLeftPrefixWithAorABIssue() throws Exception {
|
||||
LexerGrammar lg = new LexerGrammar(
|
||||
"lexer grammar L;\n" +
|
||||
"A : 'a' ;\n" +
|
||||
"B : 'b' ;\n" +
|
||||
"C : 'c' ;\n" +
|
||||
"LP : '(' ;\n" +
|
||||
"RP : ')' ;\n" +
|
||||
"INT : '0'..'9'+ ;\n"
|
||||
);
|
||||
Grammar g = new Grammar(
|
||||
"parser grammar T;\n"+
|
||||
"a : e A | e A B ;\n" +
|
||||
"e : LP e RP\n" +
|
||||
" | INT\n" +
|
||||
" ;");
|
||||
int decision = 0;
|
||||
checkPredictedAlt(lg, g, decision, "34a", 1);
|
||||
checkPredictedAlt(lg, g, decision, "34ab", 2); // PEG would miss this one!
|
||||
checkPredictedAlt(lg, g, decision, "((34))a", 1);
|
||||
checkPredictedAlt(lg, g, decision, "((34))ab", 2);
|
||||
|
||||
// After matching these inputs for decision, what is DFA after each prediction?
|
||||
String[] inputs = {
|
||||
"34a",
|
||||
"34ab",
|
||||
"((34))a",
|
||||
"((34))ab",
|
||||
};
|
||||
String[] dfa = {
|
||||
"s0-INT->s1\n" +
|
||||
"s1-'a'->s2\n" +
|
||||
"s2-EOF->:s3=>1\n",
|
||||
|
||||
"s0-INT->s1\n" +
|
||||
"s1-'a'->s2\n" +
|
||||
"s2-EOF->:s3=>1\n" +
|
||||
"s2-'b'->:s4=>2\n",
|
||||
|
||||
"s0-'('->s5\n" +
|
||||
"s0-INT->s1\n" +
|
||||
"s1-'a'->s2\n" +
|
||||
"s2-EOF->:s3=>1\n" +
|
||||
"s2-'b'->:s4=>2\n" +
|
||||
"s5-'('->s6\n" +
|
||||
"s6-INT->s7\n" +
|
||||
"s7-')'->s8\n" +
|
||||
"s8-')'->s1\n",
|
||||
|
||||
"s0-'('->s5\n" +
|
||||
"s0-INT->s1\n" +
|
||||
"s1-'a'->s2\n" +
|
||||
"s2-EOF->:s3=>1\n" +
|
||||
"s2-'b'->:s4=>2\n" +
|
||||
"s5-'('->s6\n" +
|
||||
"s6-INT->s7\n" +
|
||||
"s7-')'->s8\n" +
|
||||
"s8-')'->s1\n",
|
||||
};
|
||||
checkDFAConstruction(lg, g, decision, inputs, dfa);
|
||||
}
|
||||
|
||||
/** first check that the ATN predicts right alt.
|
||||
* Then check adaptive prediction.
|
||||
*/
|
||||
public void checkPredictedAlt(LexerGrammar lg, Grammar g, int decision,
|
||||
String inputString, int expectedAlt)
|
||||
{
|
||||
Tool.internalOption_ShowATNConfigsInDFA = true;
|
||||
ATN lexatn = createATN(lg);
|
||||
LexerInterpreter lexInterp = new LexerInterpreter(lexatn);
|
||||
List<Integer> types = getTokenTypes(inputString, lexInterp);
|
||||
System.out.println(types);
|
||||
|
||||
semanticProcess(lg);
|
||||
g.importVocab(lg);
|
||||
semanticProcess(g);
|
||||
|
||||
ParserATNFactory f = new ParserATNFactory(g);
|
||||
ATN atn = f.createATN();
|
||||
|
||||
DOTGenerator dot = new DOTGenerator(g);
|
||||
System.out.println(dot.getDOT(atn.ruleToStartState.get(g.getRule("a"))));
|
||||
System.out.println(dot.getDOT(atn.ruleToStartState.get(g.getRule("b"))));
|
||||
System.out.println(dot.getDOT(atn.ruleToStartState.get(g.getRule("e"))));
|
||||
|
||||
// Check ATN prediction
|
||||
ParserInterpreter interp = new ParserInterpreter(atn);
|
||||
TokenStream input = new IntTokenStream(types);
|
||||
ATNState startState = atn.decisionToATNState.get(decision);
|
||||
DFA dfa = new DFA(startState);
|
||||
int alt = interp.predictATN(dfa, input, decision, RuleContext.EMPTY, false);
|
||||
|
||||
System.out.println(dot.getDOT(dfa, false));
|
||||
|
||||
assertEquals(expectedAlt, alt);
|
||||
|
||||
// Check adaptive prediction
|
||||
input.seek(0);
|
||||
alt = interp.adaptivePredict(input, decision, null);
|
||||
assertEquals(expectedAlt, alt);
|
||||
// run 2x; first time creates DFA in atn
|
||||
input.seek(0);
|
||||
alt = interp.adaptivePredict(input, decision, null);
|
||||
assertEquals(expectedAlt, alt);
|
||||
}
|
||||
|
||||
public DFA getDFA(LexerGrammar lg, Grammar g, String ruleName,
|
||||
String inputString, RuleContext ctx)
|
||||
{
|
||||
Tool.internalOption_ShowATNConfigsInDFA = true;
|
||||
ATN lexatn = createATN(lg);
|
||||
LexerInterpreter lexInterp = new LexerInterpreter(lexatn);
|
||||
|
||||
semanticProcess(lg);
|
||||
g.importVocab(lg);
|
||||
semanticProcess(g);
|
||||
|
||||
ParserATNFactory f = new ParserATNFactory(g);
|
||||
ATN atn = f.createATN();
|
||||
|
||||
// DOTGenerator dot = new DOTGenerator(g);
|
||||
// System.out.println(dot.getDOT(atn.ruleToStartState.get(g.getRule("a"))));
|
||||
// System.out.println(dot.getDOT(atn.ruleToStartState.get(g.getRule("b"))));
|
||||
// System.out.println(dot.getDOT(atn.ruleToStartState.get(g.getRule("e"))));
|
||||
|
||||
ParserInterpreter interp = new ParserInterpreter(atn);
|
||||
List<Integer> types = getTokenTypes(inputString, lexInterp);
|
||||
System.out.println(types);
|
||||
TokenStream input = new IntTokenStream(types);
|
||||
try {
|
||||
ATNState startState = atn.decisionToATNState.get(0);
|
||||
DFA dfa = new DFA(startState);
|
||||
// Rule r = g.getRule(ruleName);
|
||||
//ATNState startState = atn.ruleToStartState.get(r);
|
||||
interp.predictATN(dfa, input, 0, ctx, false);
|
||||
}
|
||||
catch (NoViableAltException nvae) {
|
||||
nvae.printStackTrace(System.err);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
public void checkDFAConstruction(LexerGrammar lg, Grammar g, int decision,
|
||||
String[] inputString, String[] dfaString)
|
||||
{
|
||||
// Tool.internalOption_ShowATNConfigsInDFA = true;
|
||||
ATN lexatn = createATN(lg);
|
||||
LexerInterpreter lexInterp = new LexerInterpreter(lexatn);
|
||||
|
||||
semanticProcess(lg);
|
||||
g.importVocab(lg);
|
||||
semanticProcess(g);
|
||||
|
||||
ParserATNFactory f = new ParserATNFactory(g);
|
||||
ATN atn = f.createATN();
|
||||
|
||||
ParserInterpreter interp = new ParserInterpreter(atn);
|
||||
for (int i=0; i<inputString.length; i++) {
|
||||
// Check DFA
|
||||
List<Integer> types = getTokenTypes(inputString[i], lexInterp);
|
||||
System.out.println(types);
|
||||
TokenStream input = new IntTokenStream(types);
|
||||
try {
|
||||
interp.adaptivePredict(input, decision, RuleContext.EMPTY);
|
||||
}
|
||||
catch (NoViableAltException nvae) {
|
||||
nvae.printStackTrace(System.err);
|
||||
}
|
||||
DFA dfa = interp.decisionToDFA[decision];
|
||||
ATNInterpreter.dump(dfa,g);
|
||||
assertEquals(dfaString[i], dfa.toString(g.getTokenDisplayNames()));
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,494 @@
|
|||
package org.antlr.v4.test;
|
||||
|
||||
import org.antlr.v4.runtime.atn.ATN;
|
||||
import org.antlr.v4.tool.*;
|
||||
import org.junit.Test;
|
||||
|
||||
public class TestATNSerialization extends BaseTest {
|
||||
@Test public void testSimpleNoBlock() throws Exception {
|
||||
Grammar g = new Grammar(
|
||||
"parser grammar T;\n"+
|
||||
"a : A B ;");
|
||||
String expecting =
|
||||
"max type 4\n" +
|
||||
"0:RULE_START 1\n" +
|
||||
"1:RULE_STOP 1\n" +
|
||||
"2:BASIC 1\n" +
|
||||
"3:BASIC 1\n" +
|
||||
"4:BASIC 1\n" +
|
||||
"5:BASIC 1\n" +
|
||||
"6:BASIC 1\n" +
|
||||
"rule 1:0 0,0\n" +
|
||||
"0->2 EPSILON 0,0\n" +
|
||||
"1->6 ATOM -1,0\n" +
|
||||
"2->3 ATOM 3,0\n" +
|
||||
"3->4 EPSILON 0,0\n" +
|
||||
"4->5 ATOM 4,0\n" +
|
||||
"5->1 EPSILON 0,0\n";
|
||||
ATN atn = createATN(g);
|
||||
String result = atn.getDecoded();
|
||||
assertEquals(expecting, result);
|
||||
}
|
||||
|
||||
@Test public void testNot() throws Exception {
|
||||
Grammar g = new Grammar(
|
||||
"parser grammar T;\n"+
|
||||
"tokens {A; B; C;}\n" +
|
||||
"a : ~A ;");
|
||||
String expecting =
|
||||
"max type 5\n" +
|
||||
"0:RULE_START 1\n" +
|
||||
"1:RULE_STOP 1\n" +
|
||||
"2:BASIC 1\n" +
|
||||
"3:BASIC 1\n" +
|
||||
"4:BASIC 1\n" +
|
||||
"rule 1:0 0,0\n" +
|
||||
"0->2 EPSILON 0,0\n" +
|
||||
"1->4 ATOM -1,0\n" +
|
||||
"2->3 NOT_ATOM 3,0\n" +
|
||||
"3->1 EPSILON 0,0\n";
|
||||
ATN atn = createATN(g);
|
||||
String result = atn.getDecoded();
|
||||
assertEquals(expecting, result);
|
||||
}
|
||||
|
||||
@Test public void testWildcard() throws Exception {
|
||||
Grammar g = new Grammar(
|
||||
"parser grammar T;\n"+
|
||||
"tokens {A; B; C;}\n" +
|
||||
"a : . ;");
|
||||
String expecting =
|
||||
"max type 5\n" +
|
||||
"0:RULE_START 1\n" +
|
||||
"1:RULE_STOP 1\n" +
|
||||
"2:BASIC 1\n" +
|
||||
"3:BASIC 1\n" +
|
||||
"4:BASIC 1\n" +
|
||||
"rule 1:0 0,0\n" +
|
||||
"0->2 EPSILON 0,0\n" +
|
||||
"1->4 ATOM -1,0\n" +
|
||||
"2->3 WILDCARD 0,0\n" +
|
||||
"3->1 EPSILON 0,0\n";
|
||||
ATN atn = createATN(g);
|
||||
String result = atn.getDecoded();
|
||||
assertEquals(expecting, result);
|
||||
}
|
||||
|
||||
@Test public void testPEGAchillesHeel() throws Exception {
|
||||
Grammar g = new Grammar(
|
||||
"parser grammar T;\n"+
|
||||
"a : A | A B ;");
|
||||
String expecting =
|
||||
"max type 4\n" +
|
||||
"0:RULE_START 1\n" +
|
||||
"1:RULE_STOP 1\n" +
|
||||
"2:BASIC 1\n" +
|
||||
"3:BASIC 1\n" +
|
||||
"4:BASIC 1\n" +
|
||||
"5:BASIC 1\n" +
|
||||
"6:BASIC 1\n" +
|
||||
"7:BASIC 1\n" +
|
||||
"8:BLOCK_START 1\n" +
|
||||
"9:BLOCK_END 1\n" +
|
||||
"10:BASIC 1\n" +
|
||||
"rule 1:0 0,0\n" +
|
||||
"0->8 EPSILON 0,0\n" +
|
||||
"1->10 ATOM -1,0\n" +
|
||||
"2->3 ATOM 3,0\n" +
|
||||
"3->9 EPSILON 0,0\n" +
|
||||
"4->5 ATOM 3,0\n" +
|
||||
"5->6 EPSILON 0,0\n" +
|
||||
"6->7 ATOM 4,0\n" +
|
||||
"7->9 EPSILON 0,0\n" +
|
||||
"8->2 EPSILON 0,0\n" +
|
||||
"8->4 EPSILON 0,0\n" +
|
||||
"9->1 EPSILON 0,0\n" +
|
||||
"0:8\n";
|
||||
ATN atn = createATN(g);
|
||||
String result = atn.getDecoded();
|
||||
assertEquals(expecting, result);
|
||||
}
|
||||
|
||||
@Test public void test3Alts() throws Exception {
|
||||
Grammar g = new Grammar(
|
||||
"parser grammar T;\n"+
|
||||
"a : A | A B | A B C ;");
|
||||
String expecting =
|
||||
"max type 5\n" +
|
||||
"0:RULE_START 1\n" +
|
||||
"1:RULE_STOP 1\n" +
|
||||
"2:BASIC 1\n" +
|
||||
"3:BASIC 1\n" +
|
||||
"4:BASIC 1\n" +
|
||||
"5:BASIC 1\n" +
|
||||
"6:BASIC 1\n" +
|
||||
"7:BASIC 1\n" +
|
||||
"8:BASIC 1\n" +
|
||||
"9:BASIC 1\n" +
|
||||
"10:BASIC 1\n" +
|
||||
"11:BASIC 1\n" +
|
||||
"12:BASIC 1\n" +
|
||||
"13:BASIC 1\n" +
|
||||
"14:BLOCK_START 1\n" +
|
||||
"15:BLOCK_END 1\n" +
|
||||
"16:BASIC 1\n" +
|
||||
"rule 1:0 0,0\n" +
|
||||
"0->14 EPSILON 0,0\n" +
|
||||
"1->16 ATOM -1,0\n" +
|
||||
"2->3 ATOM 3,0\n" +
|
||||
"3->15 EPSILON 0,0\n" +
|
||||
"4->5 ATOM 3,0\n" +
|
||||
"5->6 EPSILON 0,0\n" +
|
||||
"6->7 ATOM 4,0\n" +
|
||||
"7->15 EPSILON 0,0\n" +
|
||||
"8->9 ATOM 3,0\n" +
|
||||
"9->10 EPSILON 0,0\n" +
|
||||
"10->11 ATOM 4,0\n" +
|
||||
"11->12 EPSILON 0,0\n" +
|
||||
"12->13 ATOM 5,0\n" +
|
||||
"13->15 EPSILON 0,0\n" +
|
||||
"14->2 EPSILON 0,0\n" +
|
||||
"14->4 EPSILON 0,0\n" +
|
||||
"14->8 EPSILON 0,0\n" +
|
||||
"15->1 EPSILON 0,0\n" +
|
||||
"0:14\n";
|
||||
ATN atn = createATN(g);
|
||||
String result = atn.getDecoded();
|
||||
assertEquals(expecting, result);
|
||||
}
|
||||
|
||||
@Test public void testSimpleLoop() throws Exception {
|
||||
Grammar g = new Grammar(
|
||||
"parser grammar T;\n"+
|
||||
"a : A+ B ;");
|
||||
String expecting =
|
||||
"max type 4\n" +
|
||||
"0:RULE_START 1\n" +
|
||||
"1:RULE_STOP 1\n" +
|
||||
"2:BASIC 1\n" +
|
||||
"3:BASIC 1\n" +
|
||||
"4:PLUS_BLOCK_START 1\n" +
|
||||
"5:BLOCK_END 1\n" +
|
||||
"6:PLUS_LOOP_BACK 1\n" +
|
||||
"7:BASIC 1\n" +
|
||||
"8:BASIC 1\n" +
|
||||
"9:BASIC 1\n" +
|
||||
"10:BASIC 1\n" +
|
||||
"rule 1:0 0,0\n" +
|
||||
"0->4 EPSILON 0,0\n" +
|
||||
"1->10 ATOM -1,0\n" +
|
||||
"2->3 ATOM 3,0\n" +
|
||||
"3->5 EPSILON 0,0\n" +
|
||||
"4->2 EPSILON 0,0\n" +
|
||||
"5->6 EPSILON 0,0\n" +
|
||||
"6->2 EPSILON 0,0\n" +
|
||||
"6->7 EPSILON 0,0\n" +
|
||||
"7->8 EPSILON 0,0\n" +
|
||||
"8->9 ATOM 4,0\n" +
|
||||
"9->1 EPSILON 0,0\n" +
|
||||
"0:4\n" +
|
||||
"1:4\n" +
|
||||
"2:6\n";
|
||||
ATN atn = createATN(g);
|
||||
String result = atn.getDecoded();
|
||||
assertEquals(expecting, result);
|
||||
}
|
||||
|
||||
@Test public void testRuleRef() throws Exception {
|
||||
Grammar g = new Grammar(
|
||||
"parser grammar T;\n"+
|
||||
"a : e ;\n" +
|
||||
"e : E ;\n");
|
||||
String expecting =
|
||||
"max type 3\n" +
|
||||
"0:RULE_START 1\n" +
|
||||
"1:RULE_STOP 1\n" +
|
||||
"2:RULE_START 2\n" +
|
||||
"3:RULE_STOP 2\n" +
|
||||
"4:BASIC 1\n" +
|
||||
"5:BASIC 1\n" +
|
||||
"6:BASIC 2\n" +
|
||||
"7:BASIC 2\n" +
|
||||
"8:BASIC 2\n" +
|
||||
"rule 1:0 0,0\n" +
|
||||
"rule 2:2 0,0\n" +
|
||||
"0->4 EPSILON 0,0\n" +
|
||||
"1->8 ATOM -1,0\n" +
|
||||
"2->6 EPSILON 0,0\n" +
|
||||
"3->5 EPSILON 0,0\n" +
|
||||
"4->5 RULE 2,2\n" +
|
||||
"5->1 EPSILON 0,0\n" +
|
||||
"6->7 ATOM 3,0\n" +
|
||||
"7->3 EPSILON 0,0\n";
|
||||
ATN atn = createATN(g);
|
||||
String result = atn.getDecoded();
|
||||
assertEquals(expecting, result);
|
||||
}
|
||||
|
||||
@Test public void testLexerTwoRules() throws Exception {
|
||||
LexerGrammar lg = new LexerGrammar(
|
||||
"lexer grammar L;\n"+
|
||||
"A : 'a' ;\n" +
|
||||
"B : 'b' ;\n");
|
||||
String expecting =
|
||||
"max type 4\n" +
|
||||
"0:TOKEN_START 0\n" +
|
||||
"1:RULE_START 1\n" +
|
||||
"2:RULE_STOP 1\n" +
|
||||
"3:RULE_START 2\n" +
|
||||
"4:RULE_STOP 2\n" +
|
||||
"5:BASIC 1\n" +
|
||||
"6:BASIC 1\n" +
|
||||
"7:BASIC 2\n" +
|
||||
"8:BASIC 2\n" +
|
||||
"rule 1:1 3,0\n" +
|
||||
"rule 2:3 4,0\n" +
|
||||
"mode 0:0\n" +
|
||||
"0->1 EPSILON 0,0\n" +
|
||||
"0->3 EPSILON 0,0\n" +
|
||||
"1->5 EPSILON 0,0\n" +
|
||||
"3->7 EPSILON 0,0\n" +
|
||||
"5->6 ATOM 97,0\n" +
|
||||
"6->2 EPSILON 0,0\n" +
|
||||
"7->8 ATOM 98,0\n" +
|
||||
"8->4 EPSILON 0,0\n" +
|
||||
"0:0\n";
|
||||
ATN atn = createATN(lg);
|
||||
String result = atn.getDecoded();
|
||||
assertEquals(expecting, result);
|
||||
}
|
||||
|
||||
@Test public void testLexerRange() throws Exception {
|
||||
LexerGrammar lg = new LexerGrammar(
|
||||
"lexer grammar L;\n"+
|
||||
"INT : '0'..'9' ;\n");
|
||||
String expecting =
|
||||
"max type 3\n" +
|
||||
"0:TOKEN_START 0\n" +
|
||||
"1:RULE_START 1\n" +
|
||||
"2:RULE_STOP 1\n" +
|
||||
"3:BASIC 1\n" +
|
||||
"4:BASIC 1\n" +
|
||||
"rule 1:1 3,0\n" +
|
||||
"mode 0:0\n" +
|
||||
"0->1 EPSILON 0,0\n" +
|
||||
"1->3 EPSILON 0,0\n" +
|
||||
"3->4 RANGE 48,57\n" +
|
||||
"4->2 EPSILON 0,0\n" +
|
||||
"0:0\n";
|
||||
ATN atn = createATN(lg);
|
||||
String result = atn.getDecoded();
|
||||
assertEquals(expecting, result);
|
||||
}
|
||||
|
||||
@Test public void testLexerLoops() throws Exception {
|
||||
LexerGrammar lg = new LexerGrammar(
|
||||
"lexer grammar L;\n"+
|
||||
"INT : '0'..'9'+ ;\n");
|
||||
String expecting =
|
||||
"max type 3\n" +
|
||||
"0:TOKEN_START 0\n" +
|
||||
"1:RULE_START 1\n" +
|
||||
"2:RULE_STOP 1\n" +
|
||||
"3:BASIC 1\n" +
|
||||
"4:BASIC 1\n" +
|
||||
"5:PLUS_BLOCK_START 1\n" +
|
||||
"6:BLOCK_END 1\n" +
|
||||
"7:PLUS_LOOP_BACK 1\n" +
|
||||
"8:BASIC 1\n" +
|
||||
"rule 1:1 3,0\n" +
|
||||
"mode 0:0\n" +
|
||||
"0->1 EPSILON 0,0\n" +
|
||||
"1->5 EPSILON 0,0\n" +
|
||||
"3->4 RANGE 48,57\n" +
|
||||
"4->6 EPSILON 0,0\n" +
|
||||
"5->3 EPSILON 0,0\n" +
|
||||
"6->7 EPSILON 0,0\n" +
|
||||
"7->3 EPSILON 0,0\n" +
|
||||
"7->8 EPSILON 0,0\n" +
|
||||
"8->2 EPSILON 0,0\n" +
|
||||
"0:0\n" +
|
||||
"1:5\n" +
|
||||
"2:5\n" +
|
||||
"3:7\n";
|
||||
ATN atn = createATN(lg);
|
||||
String result = atn.getDecoded();
|
||||
assertEquals(expecting, result);
|
||||
}
|
||||
|
||||
@Test public void testLexerAction() throws Exception {
|
||||
LexerGrammar lg = new LexerGrammar(
|
||||
"lexer grammar L;\n"+
|
||||
"A : 'a' {a} ;\n" +
|
||||
"B : 'b' ;\n" +
|
||||
"C : 'c' {c} ;\n");
|
||||
String expecting =
|
||||
"max type 5\n" +
|
||||
"0:TOKEN_START 0\n" +
|
||||
"1:RULE_START 1\n" +
|
||||
"2:RULE_STOP 1\n" +
|
||||
"3:RULE_START 2\n" +
|
||||
"4:RULE_STOP 2\n" +
|
||||
"5:RULE_START 3\n" +
|
||||
"6:RULE_STOP 3\n" +
|
||||
"7:BASIC 1\n" +
|
||||
"8:BASIC 1\n" +
|
||||
"9:BASIC 1\n" +
|
||||
"10:BASIC 2\n" +
|
||||
"11:BASIC 2\n" +
|
||||
"12:BASIC 3\n" +
|
||||
"13:BASIC 3\n" +
|
||||
"14:BASIC 3\n" +
|
||||
"rule 1:1 3,1\n" +
|
||||
"rule 2:3 4,0\n" +
|
||||
"rule 3:5 5,2\n" +
|
||||
"mode 0:0\n" +
|
||||
"0->1 EPSILON 0,0\n" +
|
||||
"0->3 EPSILON 0,0\n" +
|
||||
"0->5 EPSILON 0,0\n" +
|
||||
"1->7 EPSILON 0,0\n" +
|
||||
"3->10 EPSILON 0,0\n" +
|
||||
"5->12 EPSILON 0,0\n" +
|
||||
"7->8 ATOM 97,0\n" +
|
||||
"8->9 EPSILON 0,0\n" +
|
||||
"9->2 EPSILON 0,0\n" +
|
||||
"10->11 ATOM 98,0\n" +
|
||||
"11->4 EPSILON 0,0\n" +
|
||||
"12->13 ATOM 99,0\n" +
|
||||
"13->14 EPSILON 0,0\n" +
|
||||
"14->6 EPSILON 0,0\n" +
|
||||
"0:0\n";
|
||||
ATN atn = createATN(lg);
|
||||
String result = atn.getDecoded();
|
||||
assertEquals(expecting, result);
|
||||
}
|
||||
|
||||
@Test public void testLexerNotSet() throws Exception {
|
||||
LexerGrammar lg = new LexerGrammar(
|
||||
"lexer grammar L;\n"+
|
||||
"ID : ~('a'|'b')\n ;");
|
||||
String expecting =
|
||||
"max type 3\n" +
|
||||
"0:TOKEN_START 0\n" +
|
||||
"1:RULE_START 1\n" +
|
||||
"2:RULE_STOP 1\n" +
|
||||
"3:BASIC 1\n" +
|
||||
"4:BASIC 1\n" +
|
||||
"rule 1:1 3,0\n" +
|
||||
"mode 0:0\n" +
|
||||
"0:'a'..'b'\n" +
|
||||
"0->1 EPSILON 0,0\n" +
|
||||
"1->3 EPSILON 0,0\n" +
|
||||
"3->4 NOT_SET 0,0\n" +
|
||||
"4->2 EPSILON 0,0\n" +
|
||||
"0:0\n";
|
||||
ATN atn = createATN(lg);
|
||||
String result = atn.getDecoded();
|
||||
assertEquals(expecting, result);
|
||||
}
|
||||
|
||||
@Test public void testLexerNotSetWithRange() throws Exception {
|
||||
LexerGrammar lg = new LexerGrammar(
|
||||
"lexer grammar L;\n"+
|
||||
"ID : ~('a'|'b'|'e'|'p'..'t')\n ;");
|
||||
String expecting =
|
||||
"max type 3\n" +
|
||||
"0:TOKEN_START 0\n" +
|
||||
"1:RULE_START 1\n" +
|
||||
"2:RULE_STOP 1\n" +
|
||||
"3:BASIC 1\n" +
|
||||
"4:BASIC 1\n" +
|
||||
"rule 1:1 3,0\n" +
|
||||
"mode 0:0\n" +
|
||||
"0:'a'..'b', 'e'..'e', 'p'..'t'\n" +
|
||||
"0->1 EPSILON 0,0\n" +
|
||||
"1->3 EPSILON 0,0\n" +
|
||||
"3->4 NOT_SET 0,0\n" +
|
||||
"4->2 EPSILON 0,0\n" +
|
||||
"0:0\n";
|
||||
ATN atn = createATN(lg);
|
||||
String result = atn.getDecoded();
|
||||
assertEquals(expecting, result);
|
||||
}
|
||||
|
||||
@Test public void testLexerNotSetWithRange2() throws Exception {
|
||||
LexerGrammar lg = new LexerGrammar(
|
||||
"lexer grammar L;\n"+
|
||||
"ID : ~('a'|'b') ~('e'|'p'..'t')\n ;");
|
||||
String expecting =
|
||||
"max type 3\n" +
|
||||
"0:TOKEN_START 0\n" +
|
||||
"1:RULE_START 1\n" +
|
||||
"2:RULE_STOP 1\n" +
|
||||
"3:BASIC 1\n" +
|
||||
"4:BASIC 1\n" +
|
||||
"5:BASIC 1\n" +
|
||||
"6:BASIC 1\n" +
|
||||
"rule 1:1 3,0\n" +
|
||||
"mode 0:0\n" +
|
||||
"0:'a'..'b'\n" +
|
||||
"1:'e'..'e', 'p'..'t'\n" +
|
||||
"0->1 EPSILON 0,0\n" +
|
||||
"1->3 EPSILON 0,0\n" +
|
||||
"3->4 NOT_SET 0,0\n" +
|
||||
"4->5 EPSILON 0,0\n" +
|
||||
"5->6 NOT_SET 1,0\n" +
|
||||
"6->2 EPSILON 0,0\n" +
|
||||
"0:0\n";
|
||||
ATN atn = createATN(lg);
|
||||
String result = atn.getDecoded();
|
||||
assertEquals(expecting, result);
|
||||
}
|
||||
|
||||
@Test public void test2ModesInLexer() throws Exception {
|
||||
LexerGrammar lg = new LexerGrammar(
|
||||
"lexer grammar L;\n"+
|
||||
"A : 'a'\n ;\n" +
|
||||
"mode M;\n" +
|
||||
"B : 'b';\n" +
|
||||
"mode M2;\n" +
|
||||
"C : 'c';\n");
|
||||
String expecting =
|
||||
"max type 5\n" +
|
||||
"0:TOKEN_START 0\n" +
|
||||
"1:TOKEN_START 0\n" +
|
||||
"2:TOKEN_START 0\n" +
|
||||
"3:RULE_START 1\n" +
|
||||
"4:RULE_STOP 1\n" +
|
||||
"5:RULE_START 2\n" +
|
||||
"6:RULE_STOP 2\n" +
|
||||
"7:RULE_START 3\n" +
|
||||
"8:RULE_STOP 3\n" +
|
||||
"9:BASIC 1\n" +
|
||||
"10:BASIC 1\n" +
|
||||
"11:BASIC 2\n" +
|
||||
"12:BASIC 2\n" +
|
||||
"13:BASIC 3\n" +
|
||||
"14:BASIC 3\n" +
|
||||
"rule 1:3 3,0\n" +
|
||||
"rule 2:5 4,0\n" +
|
||||
"rule 3:7 5,0\n" +
|
||||
"mode 0:0\n" +
|
||||
"mode 1:1\n" +
|
||||
"mode 2:2\n" +
|
||||
"0->3 EPSILON 0,0\n" +
|
||||
"1->5 EPSILON 0,0\n" +
|
||||
"2->7 EPSILON 0,0\n" +
|
||||
"3->9 EPSILON 0,0\n" +
|
||||
"5->11 EPSILON 0,0\n" +
|
||||
"7->13 EPSILON 0,0\n" +
|
||||
"9->10 ATOM 97,0\n" +
|
||||
"10->4 EPSILON 0,0\n" +
|
||||
"11->12 ATOM 98,0\n" +
|
||||
"12->6 EPSILON 0,0\n" +
|
||||
"13->14 ATOM 99,0\n" +
|
||||
"14->8 EPSILON 0,0\n" +
|
||||
"0:0\n" +
|
||||
"1:1\n" +
|
||||
"2:2\n";
|
||||
ATN atn = createATN(lg);
|
||||
String result = atn.getDecoded();
|
||||
assertEquals(expecting, result);
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,53 @@
|
|||
package org.antlr.v4.test;
|
||||
|
||||
import org.antlr.runtime.ANTLRStringStream;
|
||||
import org.antlr.runtime.Token;
|
||||
import org.antlr.v4.parse.ActionSplitter;
|
||||
import org.antlr.v4.semantics.BlankActionSplitterListener;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
public class TestActionSplitter extends BaseTest {
|
||||
static String[] exprs = {
|
||||
"foo", "['foo'<29>]",
|
||||
"$x", "['$x'<20>]",
|
||||
"\\$x", "['\\$'<6>, 'x'<29>]",
|
||||
"$x.y", "['$x.y'<11>]",
|
||||
"$ID.text", "['$ID.text'<11>]",
|
||||
"$ID", "['$ID'<20>]",
|
||||
"$ID.getText()", "['$ID'<20>, '.getText()'<29>]",
|
||||
"$ID.text = \"test\";", "['$ID.text = \"test\";'<10>]",
|
||||
"$a.line == $b.line", "['$a.line'<11>, ' == '<29>, '$b.line'<11>]",
|
||||
"$r.tree", "['$r.tree'<11>]",
|
||||
"foo $a::n bar", "['foo '<29>, '$a::n'<13>, ' bar'<29>]",
|
||||
"$Symbols[-1]::names.add($id.text);", "['$Symbols[-1]::names'<16>, '.add('<29>, '$id.text'<11>, ');'<29>]",
|
||||
"$Symbols[0]::names.add($id.text);", "['$Symbols[0]::names'<18>, '.add('<29>, '$id.text'<11>, ');'<29>]",
|
||||
"$Symbols::x;", "['$Symbols::x'<13>, ';'<29>]",
|
||||
"$Symbols.size()>0", "['$Symbols'<20>, '.size()>0'<29>]",
|
||||
"$field::x = $field.st;", "['$field::x = $field.st;'<12>]",
|
||||
"$foo.get(\"ick\");", "['$foo'<20>, '.get(\"ick\");'<29>]",
|
||||
};
|
||||
|
||||
@Test public void testExprs() {
|
||||
for (int i = 0; i < exprs.length; i+=2) {
|
||||
String input = exprs[i];
|
||||
String expect = exprs[i+1];
|
||||
List<String> chunks = getActionChunks(input);
|
||||
assertEquals("input: "+input, expect, chunks.toString());
|
||||
}
|
||||
}
|
||||
|
||||
public static List<String> getActionChunks(String a) {
|
||||
List<String> chunks = new ArrayList<String>();
|
||||
ActionSplitter splitter = new ActionSplitter(new ANTLRStringStream(a),
|
||||
new BlankActionSplitterListener());
|
||||
Token t = splitter.nextToken();
|
||||
while ( t.getType()!=Token.EOF ) {
|
||||
chunks.add("'"+t.getText()+"'<"+t.getType()+">");
|
||||
t = splitter.nextToken();
|
||||
}
|
||||
return chunks;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,522 @@
|
|||
package org.antlr.v4.test;
|
||||
|
||||
import org.antlr.runtime.RecognitionException;
|
||||
import org.antlr.v4.automata.ATNFactory;
|
||||
import org.antlr.v4.automata.LexerATNFactory;
|
||||
import org.antlr.v4.automata.ParserATNFactory;
|
||||
import org.antlr.v4.codegen.CodeGenerator;
|
||||
import org.antlr.v4.semantics.SemanticPipeline;
|
||||
import org.antlr.v4.tool.Grammar;
|
||||
import org.antlr.v4.tool.LexerGrammar;
|
||||
import org.junit.Test;
|
||||
import org.stringtemplate.v4.ST;
|
||||
import org.stringtemplate.v4.STGroup;
|
||||
import org.stringtemplate.v4.STGroupString;
|
||||
|
||||
/** */
|
||||
public class TestActionTranslation extends BaseTest {
|
||||
String attributeTemplate =
|
||||
"attributeTemplate(members,init,inline,finally,inline2) ::= <<\n" +
|
||||
"parser grammar A;\n"+
|
||||
"@members {#members#<members>#end-members#}\n" +
|
||||
"a[int x, int x1] returns [int y]\n" +
|
||||
"@init {#init#<init>#end-init#}\n" +
|
||||
" : id=ID ids+=ID lab=b[34] c d {\n" +
|
||||
" #inline#<inline>#end-inline#\n" +
|
||||
" }\n" +
|
||||
" c\n" +
|
||||
" ;\n" +
|
||||
" finally {#finally#<finally>#end-finally#}\n" +
|
||||
"b[int d] returns [int e]\n" +
|
||||
" : {#inline2#<inline2>#end-inline2#}\n" +
|
||||
" ;\n" +
|
||||
"c returns [int x, int y] : ;\n" +
|
||||
"d : ;\n" +
|
||||
">>";
|
||||
|
||||
String scopeTemplate =
|
||||
"scopeTemplate(members,init,inline,finally,inline2) ::= <<\n" +
|
||||
"parser grammar A;\n"+
|
||||
"@members {\n" +
|
||||
"#members#<members>#end-members#\n" +
|
||||
"}\n" +
|
||||
"scope S { int i; }\n" +
|
||||
"a\n" +
|
||||
"scope { int z; }\n" +
|
||||
"scope S;\n" +
|
||||
"@init {#init#<init>#end-init#}\n" +
|
||||
" : {\n" +
|
||||
" #inline#<inline>#end-inline#" +
|
||||
" }\n" +
|
||||
" ;\n" +
|
||||
" finally {#finally#<finally>#end-finally#}\n" +
|
||||
">>";
|
||||
|
||||
@Test public void testEscapedLessThanInAction() throws Exception {
|
||||
String action = "i<3; '<xmltag>'";
|
||||
String expected = "i<3; '<xmltag>'";
|
||||
testActions(attributeTemplate, "members", action, expected);
|
||||
testActions(attributeTemplate, "init", action, expected);
|
||||
testActions(attributeTemplate, "inline", action, expected);
|
||||
testActions(attributeTemplate, "finally", action, expected);
|
||||
testActions(attributeTemplate, "inline2", action, expected);
|
||||
}
|
||||
|
||||
@Test public void testEscaped$InAction() throws Exception {
|
||||
String action = "int \\$n; \"\\$in string\\$\"";
|
||||
String expected = "int \\$n; \"\\$in string\\$\"";
|
||||
testActions(attributeTemplate, "members", action, expected);
|
||||
testActions(attributeTemplate, "init", action, expected);
|
||||
testActions(attributeTemplate, "inline", action, expected);
|
||||
testActions(attributeTemplate, "finally", action, expected);
|
||||
testActions(attributeTemplate, "inline2", action, expected);
|
||||
}
|
||||
|
||||
@Test public void testComplicatedArgParsing() throws Exception {
|
||||
String action = "x, (*a).foo(21,33), 3.2+1, '\\n', "+
|
||||
"\"a,oo\\nick\", {bl, \"fdkj\"eck}";
|
||||
String expected = "x, (*a).foo(21,33), 3.2+1, '\\n', "+
|
||||
"\"a,oo\\nick\", {bl, \"fdkj\"eck}";
|
||||
testActions(attributeTemplate, "members", action, expected);
|
||||
testActions(attributeTemplate, "init", action, expected);
|
||||
testActions(attributeTemplate, "inline", action, expected);
|
||||
testActions(attributeTemplate, "finally", action, expected);
|
||||
testActions(attributeTemplate, "inline2", action, expected);
|
||||
}
|
||||
|
||||
@Test public void testComplicatedArgParsingWithTranslation() throws Exception {
|
||||
String action = "x, $ID.text+\"3242\", (*$ID).foo(21,33), 3.2+1, '\\n', "+
|
||||
"\"a,oo\\nick\", {bl, \"fdkj\"eck}";
|
||||
String expected = "x, (_rID!=null?_rID.getText():null)+\"3242\"," +
|
||||
" (*_tID).foo(21,33), 3.2+1, '\\n', \"a,oo\\nick\", {bl, \"fdkj\"eck}";
|
||||
testActions(attributeTemplate, "inline", action, expected);
|
||||
}
|
||||
|
||||
@Test public void testArguments() throws Exception {
|
||||
String action = "$x; $a.x";
|
||||
String expected = "_ctx.x; _ctx.x";
|
||||
testActions(attributeTemplate, "inline", action, expected);
|
||||
}
|
||||
|
||||
@Test public void testReturnValue() throws Exception {
|
||||
String action = "$x; $a.x";
|
||||
String expected = "_ctx.x; _ctx.x";
|
||||
testActions(attributeTemplate, "inline", action, expected);
|
||||
}
|
||||
|
||||
@Test public void testReturnValueWithNumber() throws Exception {
|
||||
String action = "$a.x1";
|
||||
String expected = "_ctx.x1";
|
||||
testActions(attributeTemplate, "inline", action, expected);
|
||||
}
|
||||
|
||||
@Test public void testReturnValuesCurrentRule() throws Exception {
|
||||
String action = "$y; $a.y;";
|
||||
String expected = "_ctx.y; _ctx.y;";
|
||||
testActions(attributeTemplate, "inline", action, expected);
|
||||
}
|
||||
|
||||
@Test public void testReturnValues() throws Exception {
|
||||
String action = "$lab.e; $b.e;";
|
||||
String expected = "lab.e; _rb.e;";
|
||||
testActions(attributeTemplate, "inline", action, expected);
|
||||
}
|
||||
|
||||
@Test public void testReturnWithMultipleRuleRefs() throws Exception {
|
||||
String action = "$c.x; $c.y;";
|
||||
String expected = "_rc.x; _rc.y;";
|
||||
testActions(attributeTemplate, "inline", action, expected);
|
||||
}
|
||||
|
||||
@Test public void testTokenRefs() throws Exception {
|
||||
String action = "$id; $ID; $id.text; $id.getText(); $id.line;";
|
||||
String expected = "id; _tID; (id!=null?id.getText():null); id.getText(); (id!=null?id.getLine():0);";
|
||||
testActions(attributeTemplate, "inline", action, expected);
|
||||
}
|
||||
|
||||
@Test public void testRuleRefs() throws Exception {
|
||||
String action = "$lab.start; $c.tree;";
|
||||
String expected = "(lab!=null?(()lab.start):null); (_rc!=null?(()_rc.tree):null);";
|
||||
testActions(attributeTemplate, "inline", action, expected);
|
||||
}
|
||||
|
||||
@Test public void testBasicGlobalScope() throws Exception {
|
||||
String action = "$S::i";
|
||||
String expected = "S_stack.peek().i";
|
||||
testActions(scopeTemplate, "members", action, expected);
|
||||
}
|
||||
|
||||
@Test public void test0IndexedGlobalScope() throws Exception {
|
||||
String action = "$S[0]::i";
|
||||
String expected = "S_stack.get(0).i";
|
||||
testActions(scopeTemplate, "members", action, expected);
|
||||
}
|
||||
|
||||
@Test public void testAbsoluteIndexedGlobalScope() throws Exception {
|
||||
String action = "$S[3]::i";
|
||||
String expected = "S_stack.get(3).i";
|
||||
testActions(scopeTemplate, "members", action, expected);
|
||||
}
|
||||
|
||||
@Test public void testNegIndexedGlobalScope() throws Exception {
|
||||
String action = "$S[-1]::i";
|
||||
String expected = "S_stack.get(S_stack.size()-1-1).i";
|
||||
testActions(scopeTemplate, "members", action, expected);
|
||||
}
|
||||
|
||||
@Test public void testNegIndexedGlobalScope2() throws Exception {
|
||||
String action = "$S[-$S::i]::i";
|
||||
String expected = "S_stack.get(S_stack.size()-S_stack.peek().i-1).i";
|
||||
testActions(scopeTemplate, "members", action, expected);
|
||||
}
|
||||
|
||||
@Test public void testBasicRuleScope() throws Exception {
|
||||
String action = "$a::z";
|
||||
String expected = "a_scope_stack.peek().z";
|
||||
testActions(scopeTemplate, "inline", action, expected);
|
||||
}
|
||||
|
||||
@Test public void testBasicGlobalScopeInRule() throws Exception {
|
||||
String action = "$S::i";
|
||||
String expected = "S_stack.peek().i";
|
||||
testActions(scopeTemplate, "inline", action, expected);
|
||||
}
|
||||
|
||||
@Test public void testSetBasicRuleScope() throws Exception {
|
||||
String action = "$a::z = 3;";
|
||||
String expected = "a_scope_stack.peek().z = 3;";
|
||||
testActions(scopeTemplate, "inline", action, expected);
|
||||
}
|
||||
|
||||
@Test public void testSetBasicGlobalScopeInRule() throws Exception {
|
||||
String action = "$S::i = 3;";
|
||||
String expected = "S_stack.peek().i = 3;";
|
||||
testActions(scopeTemplate, "inline", action, expected);
|
||||
}
|
||||
|
||||
@Test public void testSet0IndexedGlobalScope() throws Exception {
|
||||
String action = "$S[0]::i = $S::i;";
|
||||
String expected = "S_stack.get(0).i = S_stack.peek().i;";
|
||||
testActions(scopeTemplate, "members", action, expected);
|
||||
}
|
||||
|
||||
@Test public void testSetAbsoluteIndexedGlobalScope() throws Exception {
|
||||
String action = "$S[3]::i = $S::i;";
|
||||
String expected = "S_stack.get(3).i = S_stack.peek().i;";
|
||||
testActions(scopeTemplate, "members", action, expected);
|
||||
}
|
||||
|
||||
@Test public void testSetNegIndexedGlobalScope() throws Exception {
|
||||
String action = "$S[-1]::i = $S::i;";
|
||||
String expected = "S_stack.get(S_stack.size()-1-1).i = S_stack.peek().i;";
|
||||
testActions(scopeTemplate, "members", action, expected);
|
||||
}
|
||||
|
||||
@Test public void testSetNegIndexedGlobalScope2() throws Exception {
|
||||
String action = "$S[-$S::i]::i = $S::i;";
|
||||
String expected = "S_stack.get(S_stack.size()-S_stack.peek().i-1).i = S_stack.peek().i;";
|
||||
testActions(scopeTemplate, "members", action, expected);
|
||||
}
|
||||
|
||||
@Test public void testIsolatedDynamicRuleScopeRef() throws Exception {
|
||||
String action = "$a;"; // refers to stack not top of stack
|
||||
String expected = "a_scope_stack;";
|
||||
testActions(scopeTemplate, "inline", action, expected);
|
||||
}
|
||||
|
||||
@Test public void testIsolatedGlobalScopeRef() throws Exception {
|
||||
String action = "$S;";
|
||||
String expected = "S_stack;";
|
||||
testActions(scopeTemplate, "inline", action, expected);
|
||||
}
|
||||
|
||||
|
||||
|
||||
@Test public void testDynamicRuleScopeRefInSubrule() throws Exception {
|
||||
String action = "$a::n;";
|
||||
}
|
||||
@Test public void testRuleScopeFromAnotherRule() throws Exception {
|
||||
String action = "$a::n;"; // must be qualified
|
||||
}
|
||||
@Test public void testFullyQualifiedRefToCurrentRuleParameter() throws Exception {
|
||||
String action = "$a.i;";
|
||||
}
|
||||
@Test public void testFullyQualifiedRefToCurrentRuleRetVal() throws Exception {
|
||||
String action = "$a.i;";
|
||||
}
|
||||
@Test public void testSetFullyQualifiedRefToCurrentRuleRetVal() throws Exception {
|
||||
String action = "$a.i = 1;";
|
||||
}
|
||||
@Test public void testIsolatedRefToCurrentRule() throws Exception {
|
||||
String action = "$a;";
|
||||
}
|
||||
@Test public void testIsolatedRefToRule() throws Exception {
|
||||
String action = "$x;";
|
||||
}
|
||||
@Test public void testFullyQualifiedRefToLabelInCurrentRule() throws Exception {
|
||||
String action = "$a.x;";
|
||||
}
|
||||
@Test public void testFullyQualifiedRefToListLabelInCurrentRule() throws Exception {
|
||||
String action = "$a.x;"; // must be qualified
|
||||
}
|
||||
@Test public void testFullyQualifiedRefToTemplateAttributeInCurrentRule() throws Exception {
|
||||
String action = "$a.st;"; // can be qualified
|
||||
}
|
||||
@Test public void testRuleRefWhenRuleHasScope() throws Exception {
|
||||
String action = "$b.start;";
|
||||
}
|
||||
@Test public void testDynamicScopeRefOkEvenThoughRuleRefExists() throws Exception {
|
||||
String action = "$b::n;";
|
||||
}
|
||||
@Test public void testRefToTemplateAttributeForCurrentRule() throws Exception {
|
||||
String action = "$st=null;";
|
||||
}
|
||||
@Test public void testRefToTextAttributeForCurrentRule() throws Exception {
|
||||
String action = "$text";
|
||||
}
|
||||
@Test public void testRefToStartAttributeForCurrentRule() throws Exception {
|
||||
String action = "$start;";
|
||||
}
|
||||
|
||||
@Test public void testTokenLabelFromMultipleAlts() throws Exception {
|
||||
String action = "$ID.text;"; // must be qualified
|
||||
}
|
||||
@Test public void testRuleLabelFromMultipleAlts() throws Exception {
|
||||
String action = "$b.text;"; // must be qualified
|
||||
}
|
||||
@Test public void testUnqualifiedRuleScopeAttribute() throws Exception {
|
||||
String action = "$n;"; // must be qualified
|
||||
}
|
||||
@Test public void testRuleAndTokenLabelTypeMismatch() throws Exception {
|
||||
}
|
||||
@Test public void testListAndTokenLabelTypeMismatch() throws Exception {
|
||||
}
|
||||
@Test public void testListAndRuleLabelTypeMismatch() throws Exception {
|
||||
}
|
||||
@Test public void testArgReturnValueMismatch() throws Exception {
|
||||
}
|
||||
@Test public void testSimplePlusEqualLabel() throws Exception {
|
||||
String action = "$ids.size();"; // must be qualified
|
||||
}
|
||||
@Test public void testPlusEqualStringLabel() throws Exception {
|
||||
String action = "$ids.size();"; // must be qualified
|
||||
}
|
||||
@Test public void testPlusEqualSetLabel() throws Exception {
|
||||
String action = "$ids.size();"; // must be qualified
|
||||
}
|
||||
@Test public void testPlusEqualWildcardLabel() throws Exception {
|
||||
String action = "$ids.size();"; // must be qualified
|
||||
}
|
||||
@Test public void testImplicitTokenLabel() throws Exception {
|
||||
String action = "$ID; $ID.text; $ID.getText()";
|
||||
}
|
||||
|
||||
@Test public void testImplicitRuleLabel() throws Exception {
|
||||
String action = "$r.start;";
|
||||
}
|
||||
|
||||
@Test public void testReuseExistingLabelWithImplicitRuleLabel() throws Exception {
|
||||
String action = "$r.start;";
|
||||
}
|
||||
|
||||
@Test public void testReuseExistingListLabelWithImplicitRuleLabel() throws Exception {
|
||||
String action = "$r.start;";
|
||||
}
|
||||
|
||||
@Test public void testReuseExistingLabelWithImplicitTokenLabel() throws Exception {
|
||||
String action = "$ID.text;";
|
||||
}
|
||||
|
||||
@Test public void testReuseExistingListLabelWithImplicitTokenLabel() throws Exception {
|
||||
String action = "$ID.text;";
|
||||
}
|
||||
|
||||
@Test public void testRuleLabelWithoutOutputOption() throws Exception {
|
||||
}
|
||||
@Test public void testRuleLabelOnTwoDifferentRulesAST() throws Exception {
|
||||
}
|
||||
@Test public void testRuleLabelOnTwoDifferentRulesTemplate() throws Exception {
|
||||
}
|
||||
@Test public void testMissingArgs() throws Exception {
|
||||
}
|
||||
@Test public void testArgsWhenNoneDefined() throws Exception {
|
||||
}
|
||||
@Test public void testReturnInitValue() throws Exception {
|
||||
}
|
||||
@Test public void testMultipleReturnInitValue() throws Exception {
|
||||
}
|
||||
@Test public void testCStyleReturnInitValue() throws Exception {
|
||||
}
|
||||
@Test public void testArgsWithInitValues() throws Exception {
|
||||
}
|
||||
@Test public void testArgsOnToken() throws Exception {
|
||||
}
|
||||
@Test public void testArgsOnTokenInLexer() throws Exception {
|
||||
}
|
||||
@Test public void testLabelOnRuleRefInLexer() throws Exception {
|
||||
String action = "$i.text";
|
||||
}
|
||||
|
||||
@Test public void testRefToRuleRefInLexer() throws Exception {
|
||||
String action = "$ID.text";
|
||||
}
|
||||
|
||||
@Test public void testRefToRuleRefInLexerNoAttribute() throws Exception {
|
||||
String action = "$ID";
|
||||
}
|
||||
|
||||
@Test public void testCharLabelInLexer() throws Exception {
|
||||
}
|
||||
@Test public void testCharListLabelInLexer() throws Exception {
|
||||
}
|
||||
@Test public void testWildcardCharLabelInLexer() throws Exception {
|
||||
}
|
||||
@Test public void testWildcardCharListLabelInLexer() throws Exception {
|
||||
}
|
||||
@Test public void testMissingArgsInLexer() throws Exception {
|
||||
}
|
||||
@Test public void testLexerRulePropertyRefs() throws Exception {
|
||||
String action = "$text $type $line $pos $channel $index $start $stop";
|
||||
}
|
||||
|
||||
@Test public void testLexerLabelRefs() throws Exception {
|
||||
String action = "$a $b.text $c $d.text";
|
||||
}
|
||||
|
||||
@Test public void testSettingLexerRulePropertyRefs() throws Exception {
|
||||
String action = "$text $type=1 $line=1 $pos=1 $channel=1 $index";
|
||||
}
|
||||
|
||||
@Test public void testArgsOnTokenInLexerRuleOfCombined() throws Exception {
|
||||
}
|
||||
@Test public void testMissingArgsOnTokenInLexerRuleOfCombined() throws Exception {
|
||||
}
|
||||
@Test public void testTokenLabelTreeProperty() throws Exception {
|
||||
String action = "$id.tree;";
|
||||
}
|
||||
|
||||
@Test public void testTokenRefTreeProperty() throws Exception {
|
||||
String action = "$ID.tree;";
|
||||
}
|
||||
|
||||
@Test public void testAmbiguousTokenRef() throws Exception {
|
||||
String action = "$ID;";
|
||||
}
|
||||
|
||||
@Test public void testAmbiguousTokenRefWithProp() throws Exception {
|
||||
String action = "$ID.text;";
|
||||
}
|
||||
|
||||
@Test public void testRuleRefWithDynamicScope() throws Exception {
|
||||
String action = "$field::x = $field.st;";
|
||||
}
|
||||
|
||||
@Test public void testAssignToOwnRulenameAttr() throws Exception {
|
||||
String action = "$rule.tree = null;";
|
||||
}
|
||||
|
||||
@Test public void testAssignToOwnParamAttr() throws Exception {
|
||||
String action = "$rule.i = 42; $i = 23;";
|
||||
}
|
||||
|
||||
@Test public void testIllegalAssignToOwnRulenameAttr() throws Exception {
|
||||
String action = "$rule.stop = 0;";
|
||||
}
|
||||
|
||||
@Test public void testIllegalAssignToLocalAttr() throws Exception {
|
||||
String action = "$tree = null; $st = null; $start = 0; $stop = 0; $text = 0;";
|
||||
}
|
||||
|
||||
@Test public void testIllegalAssignRuleRefAttr() throws Exception {
|
||||
String action = "$other.tree = null;";
|
||||
}
|
||||
|
||||
@Test public void testIllegalAssignTokenRefAttr() throws Exception {
|
||||
String action = "$ID.text = \"test\";";
|
||||
}
|
||||
|
||||
@Test public void testAssignToTreeNodeAttribute() throws Exception {
|
||||
String action = "$tree.scope = localScope;";
|
||||
}
|
||||
|
||||
@Test public void testDoNotTranslateAttributeCompare() throws Exception {
|
||||
String action = "$a.line == $b.line";
|
||||
}
|
||||
|
||||
@Test public void testDoNotTranslateScopeAttributeCompare() throws Exception {
|
||||
String action = "if ($rule::foo == \"foo\" || 1) { System.out.println(\"ouch\"); }";
|
||||
}
|
||||
|
||||
@Test public void testTreeRuleStopAttributeIsInvalid() throws Exception {
|
||||
String action = "$r.x; $r.start; $r.stop";
|
||||
}
|
||||
|
||||
@Test public void testRefToTextAttributeForCurrentTreeRule() throws Exception {
|
||||
String action = "$text";
|
||||
}
|
||||
|
||||
@Test public void testTypeOfGuardedAttributeRefIsCorrect() throws Exception {
|
||||
String action = "int x = $b::n;";
|
||||
}
|
||||
|
||||
@Test public void testBracketArgParsing() throws Exception {
|
||||
}
|
||||
|
||||
@Test public void testStringArgParsing() throws Exception {
|
||||
String action = "34, '{', \"it's<\", '\"', \"\\\"\", 19";
|
||||
}
|
||||
@Test public void testComplicatedSingleArgParsing() throws Exception {
|
||||
String action = "(*a).foo(21,33,\",\")";
|
||||
}
|
||||
@Test public void testArgWithLT() throws Exception {
|
||||
String action = "34<50";
|
||||
}
|
||||
@Test public void testGenericsAsArgumentDefinition() throws Exception {
|
||||
String action = "$foo.get(\"ick\");";
|
||||
}
|
||||
@Test public void testGenericsAsArgumentDefinition2() throws Exception {
|
||||
String action = "$foo.get(\"ick\"); x=3;";
|
||||
}
|
||||
@Test public void testGenericsAsReturnValue() throws Exception {
|
||||
}
|
||||
|
||||
|
||||
|
||||
public void testActions(String templates, String actionName, String action, String expected) {
|
||||
int lp = templates.indexOf('(');
|
||||
String name = templates.substring(0, lp);
|
||||
STGroup group = new STGroupString(templates);
|
||||
ST st = group.getInstanceOf(name);
|
||||
st.add(actionName, action);
|
||||
String grammar = st.render();
|
||||
try {
|
||||
ErrorQueue equeue = new ErrorQueue();
|
||||
Grammar g = new Grammar(grammar);
|
||||
if ( g.ast!=null && !g.ast.hasErrors ) {
|
||||
SemanticPipeline sem = new SemanticPipeline(g);
|
||||
sem.process();
|
||||
|
||||
ATNFactory factory = new ParserATNFactory(g);
|
||||
if ( g.isLexer() ) factory = new LexerATNFactory((LexerGrammar)g);
|
||||
g.atn = factory.createATN();
|
||||
|
||||
CodeGenerator gen = new CodeGenerator(g);
|
||||
ST outputFileST = gen.generate();
|
||||
String output = outputFileST.render();
|
||||
//System.out.println(output);
|
||||
String b = "#" + actionName + "#";
|
||||
int start = output.indexOf(b);
|
||||
String e = "#end-" + actionName + "#";
|
||||
int end = output.indexOf(e);
|
||||
String snippet = output.substring(start+b.length(),end);
|
||||
assertEquals(expected, snippet);
|
||||
}
|
||||
if ( equeue.size()>0 ) {
|
||||
System.err.println(equeue.toString(g.tool));
|
||||
}
|
||||
}
|
||||
catch (RecognitionException re) {
|
||||
re.printStackTrace(System.err);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,280 @@
|
|||
package org.antlr.v4.test;
|
||||
|
||||
import org.antlr.runtime.RecognitionException;
|
||||
import org.junit.Test;
|
||||
import org.stringtemplate.v4.ST;
|
||||
|
||||
/** */
|
||||
public class TestAttributeChecks extends BaseTest {
|
||||
String attributeTemplate =
|
||||
"parser grammar A;\n"+
|
||||
"@members {<members>}\n" +
|
||||
"a[int x] returns [int y]\n" +
|
||||
"@init {<init>}\n" +
|
||||
" : id=ID ids+=ID lab=b[34] {\n" +
|
||||
" <inline>\n" +
|
||||
" }\n" +
|
||||
" c\n" +
|
||||
" ;\n" +
|
||||
" finally {<finally>}\n" +
|
||||
"b[int d] returns [int e]\n" +
|
||||
" : {<inline2>}\n" +
|
||||
" ;\n" +
|
||||
"c : ;\n" +
|
||||
"d : ;\n";
|
||||
|
||||
String scopeTemplate =
|
||||
"parser grammar A;\n"+
|
||||
"@members {\n" +
|
||||
"<members>\n" +
|
||||
"}\n" +
|
||||
"scope S { int i; }\n" +
|
||||
"a[int x] returns [int y]\n" +
|
||||
"scope { int z; }\n" +
|
||||
"scope S;\n" +
|
||||
"@init {<init>}\n" +
|
||||
" : lab=b[34] {\n" +
|
||||
" <inline>" +
|
||||
" }\n" +
|
||||
" ;\n" +
|
||||
" finally {<finally>}\n" +
|
||||
"b[int d] returns [int e]\n" +
|
||||
"scope { int f; }\n" +
|
||||
" : {<inline2>}\n" +
|
||||
" ;\n" +
|
||||
"c : ;";
|
||||
|
||||
String[] membersChecks = {
|
||||
"$a", "error(29): A.g:2:11: unknown attribute reference a in $a\n",
|
||||
"$a.y", "error(29): A.g:2:11: unknown attribute reference a in $a.y\n",
|
||||
};
|
||||
|
||||
String[] initChecks = {
|
||||
"$text", "",
|
||||
"$start", "",
|
||||
"$x = $y", "",
|
||||
"$y = $x", "",
|
||||
"$lab.e", "",
|
||||
"$ids", "",
|
||||
|
||||
"$a", "error(33): A.g:4:8: missing attribute access on rule reference a in $a\n",
|
||||
"$c", "error(29): A.g:4:8: unknown attribute reference c in $c\n",
|
||||
"$a.q", "error(31): A.g:4:10: unknown attribute q for rule a in $a.q\n",
|
||||
};
|
||||
|
||||
String[] inlineChecks = {
|
||||
"$text", "",
|
||||
"$start", "",
|
||||
"$x = $y", "",
|
||||
"$y = $x", "",
|
||||
"$a.x = $a.y", "",
|
||||
"$lab.e", "",
|
||||
"$lab.text", "",
|
||||
"$b.e", "",
|
||||
"$c.text", "",
|
||||
"$ID", "",
|
||||
"$ID.text", "",
|
||||
"$id", "",
|
||||
"$id.text", "",
|
||||
"$ids", "",
|
||||
};
|
||||
|
||||
String[] bad_inlineChecks = {
|
||||
"$a", "error(33): A.g:6:4: missing attribute access on rule reference a in $a\n",
|
||||
"$b", "error(33): A.g:6:4: missing attribute access on rule reference b in $b\n",
|
||||
"$lab", "error(33): A.g:6:4: missing attribute access on rule reference lab in $lab\n",
|
||||
"$c", "error(33): A.g:6:4: missing attribute access on rule reference c in $c\n", // no scope
|
||||
"$q", "error(29): A.g:6:4: unknown attribute reference q in $q\n",
|
||||
"$q.y", "error(29): A.g:6:4: unknown attribute reference q in $q.y\n",
|
||||
"$q = 3", "error(29): A.g:6:4: unknown attribute reference q in $q\n",
|
||||
"$q = 3;", "error(29): A.g:6:4: unknown attribute reference q in $q = 3;\n",
|
||||
"$q.y = 3;", "error(29): A.g:6:4: unknown attribute reference q in $q.y = 3;\n",
|
||||
"$q = $blort;", "error(29): A.g:6:4: unknown attribute reference q in $q = $blort;\n" +
|
||||
"error(29): A.g:6:9: unknown attribute reference blort in $blort\n",
|
||||
"$a.ick", "error(31): A.g:6:6: unknown attribute ick for rule a in $a.ick\n",
|
||||
"$a.ick = 3;", "error(31): A.g:6:6: unknown attribute ick for rule a in $a.ick = 3;\n",
|
||||
"$b.d", "error(30): A.g:6:6: cannot access rule d's parameter: $b.d\n", // can't see rule ref's arg
|
||||
"$d.text", "error(29): A.g:6:4: unknown attribute reference d in $d.text\n", // valid rule, but no ref
|
||||
"$lab.d", "error(30): A.g:6:8: cannot access rule d's parameter: $lab.d\n",
|
||||
};
|
||||
|
||||
String[] finallyChecks = {
|
||||
"$text", "",
|
||||
"$start", "",
|
||||
"$x = $y", "",
|
||||
"$y = $x", "",
|
||||
"$lab.e", "",
|
||||
"$lab.text", "",
|
||||
"$id", "",
|
||||
"$id.text", "",
|
||||
"$ids", "",
|
||||
|
||||
"$lab", "error(33): A.g:9:14: missing attribute access on rule reference lab in $lab\n",
|
||||
"$a", "error(33): A.g:9:14: missing attribute access on rule reference a in $a\n",
|
||||
"$q", "error(29): A.g:9:14: unknown attribute reference q in $q\n",
|
||||
"$q.y", "error(29): A.g:9:14: unknown attribute reference q in $q.y\n",
|
||||
"$q = 3", "error(29): A.g:9:14: unknown attribute reference q in $q\n",
|
||||
"$q = 3;", "error(29): A.g:9:14: unknown attribute reference q in $q = 3;\n",
|
||||
"$q.y = 3;", "error(29): A.g:9:14: unknown attribute reference q in $q.y = 3;\n",
|
||||
"$q = $blort;", "error(29): A.g:9:14: unknown attribute reference q in $q = $blort;\n" +
|
||||
"error(29): A.g:9:19: unknown attribute reference blort in $blort\n",
|
||||
"$a.ick", "error(31): A.g:9:16: unknown attribute ick for rule a in $a.ick\n",
|
||||
"$a.ick = 3;", "error(31): A.g:9:16: unknown attribute ick for rule a in $a.ick = 3;\n",
|
||||
"$b", "error(29): A.g:9:14: unknown attribute reference b in $b\n",
|
||||
"$b.e", "error(29): A.g:9:14: unknown attribute reference b in $b.e\n", // can't see rule refs outside alts
|
||||
"$b.d", "error(29): A.g:9:14: unknown attribute reference b in $b.d\n",
|
||||
"$c.text", "error(29): A.g:9:14: unknown attribute reference c in $c.text\n",
|
||||
"$lab.d", "error(30): A.g:9:18: cannot access rule d's parameter: $lab.d\n",
|
||||
};
|
||||
|
||||
String[] dynMembersChecks = {
|
||||
"$S", "",
|
||||
"$S::i", "",
|
||||
"$S::i=$S::i", "",
|
||||
|
||||
"$b::f", "error(54): A.g:3:1: unknown dynamic scope: b in $b::f\n",
|
||||
"$S::j", "error(55): A.g:3:4: unknown dynamically-scoped attribute for scope S: j in $S::j\n",
|
||||
"$S::j = 3;", "error(55): A.g:3:4: unknown dynamically-scoped attribute for scope S: j in $S::j = 3;\n",
|
||||
"$S::j = $S::k;", "error(55): A.g:3:4: unknown dynamically-scoped attribute for scope S: j in $S::j = $S::k;\n" +
|
||||
"error(55): A.g:3:12: unknown dynamically-scoped attribute for scope S: k in $S::k\n",
|
||||
};
|
||||
|
||||
String[] dynInitChecks = {
|
||||
"$a", "",
|
||||
"$b", "",
|
||||
"$lab", "",
|
||||
"$b::f", "",
|
||||
"$S::i", "",
|
||||
"$S::i=$S::i", "",
|
||||
"$a::z", "",
|
||||
"$S", "",
|
||||
|
||||
"$S::j", "error(55): A.g:8:11: unknown dynamically-scoped attribute for scope S: j in $S::j\n",
|
||||
"$S::j = 3;", "error(55): A.g:8:11: unknown dynamically-scoped attribute for scope S: j in $S::j = 3;\n",
|
||||
"$S::j = $S::k;", "error(55): A.g:8:11: unknown dynamically-scoped attribute for scope S: j in $S::j = $S::k;\n" +
|
||||
"error(55): A.g:8:19: unknown dynamically-scoped attribute for scope S: k in $S::k\n",
|
||||
};
|
||||
|
||||
String[] dynInlineChecks = {
|
||||
"$a", "",
|
||||
"$b", "",
|
||||
"$lab", "",
|
||||
"$b::f", "",
|
||||
"$S", "",
|
||||
"$S::i", "",
|
||||
"$S::i=$S::i", "",
|
||||
"$a::z", "",
|
||||
|
||||
"$S::j", "error(55): A.g:10:7: unknown dynamically-scoped attribute for scope S: j in $S::j\n",
|
||||
"$S::j = 3;", "error(55): A.g:10:7: unknown dynamically-scoped attribute for scope S: j in $S::j = 3;\n",
|
||||
"$S::j = $S::k;", "error(55): A.g:10:7: unknown dynamically-scoped attribute for scope S: j in $S::j = $S::k;\n" +
|
||||
"error(55): A.g:10:15: unknown dynamically-scoped attribute for scope S: k in $S::k\n",
|
||||
"$Q[-1]::y", "error(54): A.g:10:4: unknown dynamic scope: Q in $Q[-1]::y\n",
|
||||
"$Q[-i]::y", "error(54): A.g:10:4: unknown dynamic scope: Q in $Q[-i]::y\n",
|
||||
"$Q[i]::y", "error(54): A.g:10:4: unknown dynamic scope: Q in $Q[i]::y\n",
|
||||
"$Q[0]::y", "error(54): A.g:10:4: unknown dynamic scope: Q in $Q[0]::y\n",
|
||||
"$Q[-1]::y = 23;", "error(54): A.g:10:4: unknown dynamic scope: Q in $Q[-1]::y = 23;\n",
|
||||
"$Q[-i]::y = 23;", "error(54): A.g:10:4: unknown dynamic scope: Q in $Q[-i]::y = 23;\n",
|
||||
"$Q[i]::y = 23;", "error(54): A.g:10:4: unknown dynamic scope: Q in $Q[i]::y = 23;\n",
|
||||
"$Q[0]::y = 23;", "error(54): A.g:10:4: unknown dynamic scope: Q in $Q[0]::y = 23;\n",
|
||||
"$S[-1]::y", "error(55): A.g:10:11: unknown dynamically-scoped attribute for scope S: y in $S[-1]::y\n",
|
||||
"$S[-i]::y", "error(55): A.g:10:11: unknown dynamically-scoped attribute for scope S: y in $S[-i]::y\n",
|
||||
"$S[i]::y", "error(55): A.g:10:10: unknown dynamically-scoped attribute for scope S: y in $S[i]::y\n",
|
||||
"$S[0]::y", "error(55): A.g:10:10: unknown dynamically-scoped attribute for scope S: y in $S[0]::y\n",
|
||||
"$S[-1]::y = 23;", "error(55): A.g:10:11: unknown dynamically-scoped attribute for scope S: y in $S[-1]::y = 23;\n",
|
||||
"$S[-i]::y = 23;", "error(55): A.g:10:11: unknown dynamically-scoped attribute for scope S: y in $S[-i]::y = 23;\n",
|
||||
"$S[i]::y = 23;", "error(55): A.g:10:10: unknown dynamically-scoped attribute for scope S: y in $S[i]::y = 23;\n",
|
||||
"$S[0]::y = 23;", "error(55): A.g:10:10: unknown dynamically-scoped attribute for scope S: y in $S[0]::y = 23;\n",
|
||||
"$S[$S::y]::i", "error(55): A.g:10:10: unknown dynamically-scoped attribute for scope S: y in $S::y\n"
|
||||
};
|
||||
|
||||
String[] dynFinallyChecks = {
|
||||
"$a", "",
|
||||
"$b", "",
|
||||
"$lab", "",
|
||||
"$b::f", "",
|
||||
"$S", "",
|
||||
"$S::i", "",
|
||||
"$S::i=$S::i", "",
|
||||
"$a::z", "",
|
||||
|
||||
"$S::j", "error(55): A.g:12:17: unknown dynamically-scoped attribute for scope S: j in $S::j\n",
|
||||
"$S::j = 3;", "error(55): A.g:12:17: unknown dynamically-scoped attribute for scope S: j in $S::j = 3;\n",
|
||||
"$S::j = $S::k;", "error(55): A.g:12:17: unknown dynamically-scoped attribute for scope S: j in $S::j = $S::k;\n" +
|
||||
"error(55): A.g:12:25: unknown dynamically-scoped attribute for scope S: k in $S::k\n",
|
||||
};
|
||||
|
||||
@Test public void testMembersActions() throws RecognitionException {
|
||||
testActions("members", membersChecks, attributeTemplate);
|
||||
}
|
||||
|
||||
@Test public void testInitActions() throws RecognitionException {
|
||||
testActions("init", initChecks, attributeTemplate);
|
||||
}
|
||||
|
||||
@Test public void testInlineActions() throws RecognitionException {
|
||||
testActions("inline", inlineChecks, attributeTemplate);
|
||||
}
|
||||
|
||||
@Test public void testBadInlineActions() throws RecognitionException {
|
||||
testActions("inline", bad_inlineChecks, attributeTemplate);
|
||||
}
|
||||
|
||||
@Test public void testFinallyActions() throws RecognitionException {
|
||||
testActions("finally", finallyChecks, attributeTemplate);
|
||||
}
|
||||
|
||||
@Test public void testDynMembersActions() throws RecognitionException {
|
||||
testActions("members", dynMembersChecks, scopeTemplate);
|
||||
}
|
||||
|
||||
@Test public void testDynInitActions() throws RecognitionException {
|
||||
testActions("init", dynInitChecks, scopeTemplate);
|
||||
}
|
||||
|
||||
@Test public void testDynInlineActions() throws RecognitionException {
|
||||
testActions("inline", dynInlineChecks, scopeTemplate);
|
||||
}
|
||||
|
||||
@Test public void testDynFinallyActions() throws RecognitionException {
|
||||
testActions("finally", dynFinallyChecks, scopeTemplate);
|
||||
}
|
||||
|
||||
@Test public void testTokenRef() throws RecognitionException {
|
||||
String grammar =
|
||||
"parser grammar S;\n" +
|
||||
"a : x=ID {Token t = $x; t = $ID;} ;\n";
|
||||
String expected =
|
||||
"";
|
||||
testErrors(new String[] {grammar, expected}, false);
|
||||
}
|
||||
|
||||
@Test public void testNonDynamicAttributeOutsideRule() throws Exception {
|
||||
String action = "public void foo() { $x; }";
|
||||
}
|
||||
@Test public void testNonDynamicAttributeOutsideRule2() throws Exception {
|
||||
String action = "public void foo() { $x.y; }";
|
||||
}
|
||||
@Test public void testUnknownGlobalScope() throws Exception {
|
||||
String action = "$Symbols::names.add($id.text);";
|
||||
}
|
||||
@Test public void testUnknownDynamicAttribute() throws Exception {
|
||||
String action = "$a::x";
|
||||
}
|
||||
|
||||
@Test public void testUnknownGlobalDynamicAttribute() throws Exception {
|
||||
String action = "$Symbols::x";
|
||||
}
|
||||
|
||||
|
||||
public void testActions(String location, String[] pairs, String template) {
|
||||
for (int i = 0; i < pairs.length; i+=2) {
|
||||
String action = pairs[i];
|
||||
String expected = pairs[i+1];
|
||||
ST st = new ST(template);
|
||||
st.add(location, action);
|
||||
String grammar = st.render();
|
||||
testErrors(new String[] {grammar, expected}, false);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,103 @@
|
|||
package org.antlr.v4.test;
|
||||
|
||||
import org.junit.Test;
|
||||
|
||||
public class TestBasicSemanticErrors extends BaseTest {
|
||||
static String[] A = {
|
||||
// INPUT
|
||||
"grammar A;\n" +
|
||||
"\n" +
|
||||
"options {\n" +
|
||||
" output=template;\n" +
|
||||
"}\n" +
|
||||
"\n" +
|
||||
"a : ID<Foo> -> ID ;\n" +
|
||||
"\n" +
|
||||
"b : A^ | ((B!|C)) -> C;",
|
||||
// YIELDS
|
||||
"error(68): A.g:7:7: alts with rewrites can't use heterogeneous types left of ->\n" +
|
||||
"error(78): A.g:9:4: AST operator with non-AST output option: ^\n" +
|
||||
"error(78): A.g:9:11: AST operator with non-AST output option: !\n" +
|
||||
"error(79): A.g:9:11: rule b alt 2 uses rewrite syntax and also an AST operator\n",
|
||||
|
||||
// INPUT
|
||||
"tree grammar B;\n" +
|
||||
"options {\n" +
|
||||
"\tfilter=true;\n" +
|
||||
"\tbacktrack=false;\n" +
|
||||
"\toutput=template;\n" +
|
||||
"}\n" +
|
||||
"\n" +
|
||||
"a : A;\n" +
|
||||
"\n" +
|
||||
"b : ^(. A) ;",
|
||||
// YIELDS
|
||||
"error(80): B.g:10:6: Wildcard invalid as root; wildcard can itself be a tree\n" +
|
||||
"error(81): B.g:1:5: option backtrack=false conflicts with tree grammar filter mode\n" +
|
||||
"error(81): B.g:1:5: option output=template conflicts with tree grammar filter mode\n"
|
||||
};
|
||||
|
||||
static String[] U = {
|
||||
// INPUT
|
||||
"parser grammar U;\n" +
|
||||
"options { foo=bar; k=*; backtrack=true;}\n" +
|
||||
"tokens {\n" +
|
||||
" f='fkj';\n" +
|
||||
" S = 'a';\n" +
|
||||
"}\n" +
|
||||
"tokens { A; }\n" +
|
||||
"options { x=y; }\n" +
|
||||
"\n" +
|
||||
"a\n" +
|
||||
"options { blech=bar; greedy=true; }\n" +
|
||||
" : ID\n" +
|
||||
" ;\n" +
|
||||
"b : ( options { ick=bar; greedy=true; } : ID )+ ;\n" +
|
||||
"c : ID<blue> ID<x=y> ;",
|
||||
// YIELDS
|
||||
"error(21): U.g:8:0: repeated grammar prequel spec (option, token, or import); please merge\n" +
|
||||
"error(21): U.g:7:0: repeated grammar prequel spec (option, token, or import); please merge\n" +
|
||||
"error(49): U.g:2:10: illegal option foo\n" +
|
||||
"error(26): U.g:4:8: token names must start with an uppercase letter: f\n" +
|
||||
"error(25): U.g:4:8: can't assign string value to token name f in non-combined grammar\n" +
|
||||
"error(25): U.g:5:8: can't assign string value to token name S in non-combined grammar\n" +
|
||||
"error(49): U.g:8:10: illegal option x\n" +
|
||||
"error(49): U.g:11:10: illegal option blech\n" +
|
||||
"error(49): U.g:14:16: illegal option ick\n" +
|
||||
"error(49): U.g:15:16: illegal option x\n",
|
||||
|
||||
// INPUT
|
||||
"tree grammar V;\n" +
|
||||
"options {\n" +
|
||||
" rewrite=true;\n" +
|
||||
" output=template;\n" +
|
||||
"}\n" +
|
||||
"a : A\n" +
|
||||
" | A B -> template() \"kjsfdkdsj\" \n" +
|
||||
" ;",
|
||||
// YIELDS
|
||||
"error(66): V.g:7:4: with rewrite=true, alt 2 not simple node or obvious tree element; text attribute for rule not guaranteed to be correct\n",
|
||||
|
||||
// INPUT
|
||||
"tree grammar V;\n" +
|
||||
"options { rewrite=true; }\n" +
|
||||
"a : A\n" +
|
||||
" | A B -> template() \"kjsfdkdsj\" \n" +
|
||||
" ;",
|
||||
// YIELDS
|
||||
"error(62): V.g:4:8: rule a uses rewrite syntax or operator with no output option\n",
|
||||
};
|
||||
|
||||
static String[] C = {
|
||||
"parser grammar C;\n" +
|
||||
"options {output=AST;}\n" +
|
||||
"tokens { A; B; C; }\n" +
|
||||
"a : A -> B $a A ;", // no problem with or $a.
|
||||
|
||||
""
|
||||
};
|
||||
|
||||
@Test public void testA() { super.testErrors(A, false); }
|
||||
@Test public void testU() { super.testErrors(U, false); }
|
||||
@Test public void testE() { super.testErrors(C, false); }
|
||||
}
|
|
@ -0,0 +1,70 @@
|
|||
package org.antlr.v4.test;
|
||||
|
||||
import org.junit.Test;
|
||||
|
||||
public class TestLexerExec extends BaseTest {
|
||||
@Test public void testRefToRuleDoesNotSetTokenNorEmitAnother() throws Exception {
|
||||
String grammar =
|
||||
"lexer grammar L;\n"+
|
||||
"A : '-' I ;\n" +
|
||||
"I : '0'..'9'+ ;\n"+
|
||||
"WS : (' '|'\\n') {skip();} ;";
|
||||
String found = execLexer("L.g", grammar, "L", "34 -21 3");
|
||||
String expecting =
|
||||
"[@0,0:1='34',<4>,1:0]\n" +
|
||||
"[@1,3:5='-21',<3>,1:3]\n" +
|
||||
"[@2,7:7='3',<4>,1:7]\n" +
|
||||
"[@3,8:8='<EOF>',<-1>,1:8]\n";
|
||||
assertEquals(expecting, found);
|
||||
}
|
||||
|
||||
@Test public void testActionExecutedInDFA() throws Exception {
|
||||
String grammar =
|
||||
"lexer grammar L;\n"+
|
||||
"I : '0'..'9'+ {System.out.println(\"I\");} ;\n"+
|
||||
"WS : (' '|'\\n') {skip();} ;";
|
||||
String found = execLexer("L.g", grammar, "L", "34 34");
|
||||
String expecting =
|
||||
"I\n" +
|
||||
"I\n" +
|
||||
"[@0,0:1='34',<3>,1:0]\n" +
|
||||
"[@1,3:4='34',<3>,1:3]\n" +
|
||||
"[@2,5:5='<EOF>',<-1>,1:5]\n";
|
||||
assertEquals(expecting, found);
|
||||
}
|
||||
|
||||
@Test public void testLexerMode() throws Exception {
|
||||
String grammar =
|
||||
"lexer grammar L;\n" +
|
||||
"STRING_START : '\"' {pushMode(STRING_MODE); more();} ;\n" +
|
||||
"WS : ' '|'\n' {skip();} ;\n"+
|
||||
"mode STRING_MODE;\n"+
|
||||
"STRING : '\"' {popMode();} ;\n"+
|
||||
"ANY : . {more();} ;\n";
|
||||
String found = execLexer("L.g", grammar, "L", "\"abc\" \"ab\"");
|
||||
String expecting =
|
||||
"[@0,0:4='\"abc\"',<5>,1:0]\n" +
|
||||
"[@1,6:9='\"ab\"',<5>,1:6]\n" +
|
||||
"[@2,10:10='<EOF>',<-1>,1:10]\n";
|
||||
assertEquals(expecting, found);
|
||||
}
|
||||
|
||||
@Test public void testKeywordID() throws Exception {
|
||||
String grammar =
|
||||
"lexer grammar L;\n"+
|
||||
"KEND : 'end' ;\n" + // has priority
|
||||
"ID : 'a'..'z'+ ;\n" +
|
||||
"WS : (' '|'\n')+ ;";
|
||||
String found = execLexer("L.g", grammar, "L", "end eend ending a");
|
||||
String expecting =
|
||||
"[@0,0:2='end',<3>,1:0]\n" +
|
||||
"[@1,3:3=' ',<5>,1:3]\n" +
|
||||
"[@2,4:7='eend',<4>,1:4]\n" +
|
||||
"[@3,8:8=' ',<5>,1:8]\n" +
|
||||
"[@4,9:14='ending',<4>,1:9]\n" +
|
||||
"[@5,15:15=' ',<5>,1:15]\n" +
|
||||
"[@6,16:16='a',<4>,1:16]\n" +
|
||||
"[@7,17:17='<EOF>',<-1>,1:17]\n";
|
||||
assertEquals(expecting, found);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,20 @@
|
|||
package org.antlr.v4.test;
|
||||
|
||||
import org.junit.Test;
|
||||
|
||||
public class TestParserExec extends BaseTest {
|
||||
|
||||
@Test public void testBasic() throws Exception {
|
||||
String grammar =
|
||||
"grammar T;\n" +
|
||||
"a : ID INT {System.out.println(state.input);} ;\n" +
|
||||
"ID : 'a'..'z'+ ;\n" +
|
||||
"INT : '0'..'9'+;\n" +
|
||||
"WS : (' '|'\\n') {skip();} ;\n";
|
||||
|
||||
String found = execParser("T.g", grammar, "TParser", "TLexer", "a",
|
||||
"abc 34", false);
|
||||
assertEquals("abc34\n", found);
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,56 @@
|
|||
package org.antlr.v4.test;
|
||||
|
||||
import org.antlr.v4.parse.ScopeParser;
|
||||
import org.junit.Test;
|
||||
|
||||
public class TestScopeParsing extends BaseTest {
|
||||
String[] argPairs = {
|
||||
"", "{}",
|
||||
" ", "{}",
|
||||
"int i", "{i=int i}",
|
||||
"int[] i, int j[]", "{i=int[] i, j=int [] j}",
|
||||
"Map<A\\,B>[] i, int j[]", "{i=Map<A,B>[] i, j=int [] j}",
|
||||
"int i = 34+a[3], int j[] = new int[34]",
|
||||
"{i=int i= 34+a[3], j=int [] j= new int[34]}",
|
||||
"char *foo32[3] = {1\\,2\\,3}", "{3=char *foo32[] 3= {1,2,3}}",
|
||||
|
||||
// python/ruby style
|
||||
"i", "{i=null i}",
|
||||
"i,j", "{i=null i, j=null j}",
|
||||
"i,j, k", "{i=null i, j=null j, k=null k}",
|
||||
};
|
||||
|
||||
String[] scopePairs = {
|
||||
"int i;", "{i=int i}",
|
||||
"int[] i; int j[];", "{i=int[] i, j=int [] j}",
|
||||
"Map<A,B>[] i; int j[];", "{i=Map<A,B>[] i, j=int [] j}",
|
||||
"int i = 34+a[3]; int j[] = new int[34];",
|
||||
"{i=int i= 34+a[3], j=int [] j= new int[34]}",
|
||||
"char *foo32[] = {1,2,3};", "{foo32=char *[] foo32= {1,2,3}}",
|
||||
" int i; int c; int k; ", "{i=int i, c=int c, k=int k}",
|
||||
" { int i; int c; int k; }", "{i=int i, c=int c, k=int k}",
|
||||
|
||||
// python/ruby style
|
||||
"i", "{i=null i}",
|
||||
" i ; j ;", "{i=null i, j=null j}",
|
||||
"i; j; k;", "{i=null i, j=null j, k=null k}",
|
||||
};
|
||||
|
||||
@Test public void testArgs() {
|
||||
for (int i = 0; i < argPairs.length; i+=2) {
|
||||
String input = argPairs[i];
|
||||
String expected = argPairs[i+1];
|
||||
String actual = ScopeParser.parseTypeList(input).attributes.toString();
|
||||
assertEquals(expected, actual);
|
||||
}
|
||||
}
|
||||
|
||||
@Test public void testScopes() {
|
||||
for (int i = 0; i < scopePairs.length; i+=2) {
|
||||
String input = scopePairs[i];
|
||||
String expected = scopePairs[i+1];
|
||||
String actual = ScopeParser.parseDynamicScope(input).attributes.toString();
|
||||
assertEquals(expected, actual);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,61 @@
|
|||
package org.antlr.v4.test;
|
||||
|
||||
import org.junit.*;
|
||||
|
||||
public class TestSemPredEvalLexer extends BaseTest {
|
||||
|
||||
@Test public void testDisableRule() throws Exception {
|
||||
String grammar =
|
||||
"lexer grammar L;\n"+
|
||||
"E1 : {false}? 'enum' ;\n" +
|
||||
"E2 : {true}? 'enum' ;\n" + // winner not E1 or ID
|
||||
"ID : 'a'..'z'+ ;\n"+
|
||||
"WS : (' '|'\\n') {skip();} ;";
|
||||
String found = execLexer("L.g", grammar, "L", "enum abc", true);
|
||||
String expecting =
|
||||
"[@0,0:3='enum',<4>,1:0]\n" +
|
||||
"[@1,5:7='abc',<5>,1:5]\n" +
|
||||
"[@2,8:8='<EOF>',<-1>,1:8]\n"; // no dfa since preds on left edge
|
||||
assertEquals(expecting, found);
|
||||
}
|
||||
|
||||
@Test public void testDisableRuleAfterMatch() throws Exception {
|
||||
String grammar =
|
||||
"lexer grammar L;\n"+
|
||||
"E1 : 'enum' {false}? ;\n" +
|
||||
"E2 : 'enum' {true}? ;\n" + // winner not E1 or ID
|
||||
"ID : 'a'..'z'+ ;\n"+
|
||||
"WS : (' '|'\\n') {skip();} ;";
|
||||
String found = execLexer("L.g", grammar, "L", "enum abc enum", true);
|
||||
String expecting =
|
||||
"[@0,0:3='enum',<4>,1:0]\n" +
|
||||
"[@1,5:7='abc',<5>,1:5]\n" +
|
||||
"[@2,9:12='enum',<4>,1:9]\n" +
|
||||
"[@3,13:13='<EOF>',<-1>,1:13]\n" +
|
||||
"s0-' '->:s4=>6\n" +
|
||||
"s0-'a'->:s5=>5\n" +
|
||||
"s0-'e'->:s1=>5\n" +
|
||||
":s1=>5-'n'->:s2=>5\n" +
|
||||
":s2=>5-'u'->:s3=>5\n" +
|
||||
":s5=>5-'b'->:s5=>5\n" +
|
||||
":s5=>5-'c'->:s5=>5\n";
|
||||
// didn't even created DFA 2nd time; old target of 'u' has "pred" flag set
|
||||
assertEquals(expecting, found);
|
||||
}
|
||||
|
||||
@Ignore
|
||||
public void testMatchNChar() throws Exception { // can't do locals yet
|
||||
String grammar =
|
||||
"lexer grammar L;\n"+
|
||||
"B : {int n=0;} ({n<=2}? DIGIT {n++})+ ;\n" +
|
||||
"fragment DIGIT : '0'..'9' ;\n"+
|
||||
"WS : (' '|'\\n') {skip();} ;";
|
||||
String found = execLexer("L.g", grammar, "L", "1234 56", true);
|
||||
String expecting =
|
||||
"[@0,0:3='enum',<4>,1:0]\n" +
|
||||
"[@1,5:7='abc',<5>,1:5]\n" +
|
||||
"[@2,8:8='<EOF>',<-1>,1:8]\n"; // no dfa since preds on left edge
|
||||
assertEquals(expecting, found);
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,120 @@
|
|||
package org.antlr.v4.test;
|
||||
|
||||
import org.junit.Test;
|
||||
|
||||
public class TestSemPredEvalParser extends BaseTest {
|
||||
@Test public void testToLeft() throws Exception {
|
||||
String grammar =
|
||||
"grammar T;\n" +
|
||||
"s : a+ ;\n" +
|
||||
"a : {false}? ID {System.out.println(\"alt 1\");}\n" +
|
||||
" | {true}? ID {System.out.println(\"alt 2\");}\n" +
|
||||
" ;\n" +
|
||||
"ID : 'a'..'z'+ ;\n" +
|
||||
"INT : '0'..'9'+;\n" +
|
||||
"WS : (' '|'\\n') {skip();} ;\n";
|
||||
|
||||
String found = execParser("T.g", grammar, "TParser", "TLexer", "s",
|
||||
"x x y", false);
|
||||
String expecting =
|
||||
"alt 2\n" +
|
||||
"alt 2\n" +
|
||||
"alt 2\n";
|
||||
assertEquals(expecting, found);
|
||||
}
|
||||
|
||||
@Test public void testToRight() throws Exception {
|
||||
String grammar =
|
||||
"grammar T;\n" +
|
||||
"s : a+ ;\n" +
|
||||
"a : ID {false}? {System.out.println(\"alt 1\");}\n" +
|
||||
" | ID {true}? {System.out.println(\"alt 2\");}\n" +
|
||||
" ;\n" +
|
||||
"ID : 'a'..'z'+ ;\n" +
|
||||
"INT : '0'..'9'+;\n" +
|
||||
"WS : (' '|'\\n') {skip();} ;\n";
|
||||
|
||||
String found = execParser("T.g", grammar, "TParser", "TLexer", "s",
|
||||
"x x y", false);
|
||||
String expecting =
|
||||
"alt 2\n" +
|
||||
"alt 2\n" +
|
||||
"alt 2\n";
|
||||
assertEquals(expecting, found);
|
||||
}
|
||||
|
||||
@Test public void testActionHidesPreds() throws Exception {
|
||||
// can't see preds, resolves to first alt found (1 in this case)
|
||||
String grammar =
|
||||
"grammar T;\n" +
|
||||
"@members {int i;}\n" +
|
||||
"s : a+ ;\n" +
|
||||
"a : {i=1;} ID {i==1}? {System.out.println(\"alt 1\");}\n" +
|
||||
" | {i=2;} ID {i==2}? {System.out.println(\"alt 2\");}\n" +
|
||||
" ;\n" +
|
||||
"ID : 'a'..'z'+ ;\n" +
|
||||
"INT : '0'..'9'+;\n" +
|
||||
"WS : (' '|'\\n') {skip();} ;\n";
|
||||
|
||||
String found = execParser("T.g", grammar, "TParser", "TLexer", "s",
|
||||
"x x y", false);
|
||||
String expecting =
|
||||
"alt 1\n" +
|
||||
"alt 1\n" +
|
||||
"alt 1\n";
|
||||
assertEquals(expecting, found);
|
||||
}
|
||||
|
||||
@Test public void testToLeftWithVaryingPredicate() throws Exception {
|
||||
// alternate predicted alt to ensure DFA doesn't cache
|
||||
// must use forced action since i++ must exec; FOLLOW(a) sees
|
||||
// both preds since it loops around in s.
|
||||
String grammar =
|
||||
"grammar T;\n" +
|
||||
"@members {int i=0;}\n" +
|
||||
"s : ({i++; System.out.println(\"i=\"+i);} a)+ ;\n" +
|
||||
"a : {i \\% 2 == 0}? ID {System.out.println(\"alt 1\");}\n" +
|
||||
" | {i \\% 2 != 0}? ID {System.out.println(\"alt 2\");}\n" +
|
||||
" ;\n" +
|
||||
"ID : 'a'..'z'+ ;\n" +
|
||||
"INT : '0'..'9'+;\n" +
|
||||
"WS : (' '|'\\n') {skip();} ;\n";
|
||||
|
||||
String found = execParser("T.g", grammar, "TParser", "TLexer", "s",
|
||||
"x x y", false);
|
||||
String expecting =
|
||||
"i=1\n" +
|
||||
"alt 2\n" +
|
||||
"i=2\n" +
|
||||
"alt 1\n" +
|
||||
"i=3\n" +
|
||||
"alt 2\n";
|
||||
assertEquals(expecting, found);
|
||||
}
|
||||
|
||||
@Test public void testToRightWithVaryingPredicate() throws Exception {
|
||||
// alternate predicted alt to ensure DFA doesn't cache
|
||||
String grammar =
|
||||
"grammar T;\n" +
|
||||
"@members {int i=0;}\n" +
|
||||
"s : ({i++; System.out.println(\"i=\"+i);} a)+ ;\n" +
|
||||
"a : ID {i \\% 2 == 0}? {System.out.println(\"alt 1\");}\n" +
|
||||
" | ID {i \\% 2 != 0}? {System.out.println(\"alt 2\");}\n" +
|
||||
" ;\n" +
|
||||
"ID : 'a'..'z'+ ;\n" +
|
||||
"INT : '0'..'9'+;\n" +
|
||||
"WS : (' '|'\\n') {skip();} ;\n";
|
||||
|
||||
String found = execParser("T.g", grammar, "TParser", "TLexer", "s",
|
||||
"x x y", false);
|
||||
String expecting =
|
||||
"i=1\n" +
|
||||
"alt 2\n" +
|
||||
"i=2\n" +
|
||||
"alt 1\n" +
|
||||
"i=3\n" +
|
||||
"alt 2\n";
|
||||
assertEquals(expecting, found);
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,102 @@
|
|||
package org.antlr.v4.test;
|
||||
|
||||
import org.junit.Test;
|
||||
|
||||
/** */
|
||||
public class TestSymbolIssues extends BaseTest {
|
||||
static String[] A = {
|
||||
// INPUT
|
||||
"grammar A;\n" +
|
||||
"options { opt='sss'; k=3; }\n" +
|
||||
"tokens { X; Y='y'; X='x'; Y='q'; Z; Z; }\n" +
|
||||
"scope Blort { int x; }\n" +
|
||||
"\n" +
|
||||
"@members {foo}\n" +
|
||||
"@members {bar}\n" +
|
||||
"@lexer::header {package jj;}\n" +
|
||||
"@lexer::header {package kk;}\n" +
|
||||
"\n" +
|
||||
"scope Blort { int x; }\n" +
|
||||
"\n" +
|
||||
"a[int i] returns [foo f] : X ID a[3] b[34] q ;\n" +
|
||||
"b returns [int g] : Y 'y' 'if' a ;\n" +
|
||||
"a : FJKD ;\n" +
|
||||
"\n" +
|
||||
"ID : 'a'..'z'+ ID ;",
|
||||
// YIELDS
|
||||
"error(49): A.g:2:10: illegal option opt\n" +
|
||||
"error(59): A.g:11:6: scope Blort redefinition\n" +
|
||||
"error(18): A.g:15:0: rule a redefinition\n" +
|
||||
"error(58): A.g:7:1: redefinition of members action\n" +
|
||||
"error(58): A.g:9:1: redefinition of header action\n" +
|
||||
"error(72): A.g:3:19: cannot alias X; token name already defined\n" +
|
||||
"error(72): A.g:3:26: cannot alias Y; token name already assigned to 'y'\n" +
|
||||
"error(72): A.g:3:36: cannot alias Z; token name already defined\n" +
|
||||
"error(46): A.g:13:37: rule b has no defined parameters\n" +
|
||||
"error(23): A.g:13:43: reference to undefined rule: q\n" +
|
||||
"error(45): A.g:14:31: missing parameter(s) on rule reference: a\n"
|
||||
};
|
||||
|
||||
static String[] B = {
|
||||
// INPUT
|
||||
"parser grammar B;\n" +
|
||||
"tokens { X='x'; Y; }\n" +
|
||||
"scope s { int i; }\n" +
|
||||
"\n" +
|
||||
"a : s=ID b+=ID X=ID '.' ;\n" +
|
||||
"\n" +
|
||||
"b : x=ID x+=ID ;\n" +
|
||||
"\n" +
|
||||
"s : FOO ;",
|
||||
// YIELDS
|
||||
"error(25): B.g:2:9: can't assign string value to token name X in non-combined grammar\n" +
|
||||
"error(34): B.g:9:0: symbol s conflicts with global dynamic scope with same name\n" +
|
||||
"error(35): B.g:5:9: label b conflicts with rule with same name\n" +
|
||||
"error(34): B.g:5:4: symbol s conflicts with global dynamic scope with same name\n" +
|
||||
"error(36): B.g:5:15: label X conflicts with token with same name\n" +
|
||||
"error(41): B.g:7:9: label x type mismatch with previous definition: TOKEN_LIST_LABEL!=TOKEN_LABEL\n"
|
||||
};
|
||||
|
||||
static String[] C = {
|
||||
// INPUT
|
||||
"grammar C;\n"+
|
||||
"options {output=AST;}\n"+
|
||||
"a : A x=b y+=Z 'hi' -> ID A r $foo $x b $y+ 'hi' 'eh?'\n"+
|
||||
" | ID -> $x A ID // shouldn't see these refs from other alt ('cept ID)\n"+
|
||||
" ;\n"+
|
||||
"b : B ;\n"+
|
||||
"A : 'a';",
|
||||
// YIELDS
|
||||
"error(51): C.g:3:28: reference to rewrite element r not found to left of ->\n" +
|
||||
"error(51): C.g:3:30: reference to rewrite element foo not found to left of ->\n" +
|
||||
"error(51): C.g:3:49: reference to rewrite element 'eh?' not found to left of ->\n" +
|
||||
"error(51): C.g:4:10: reference to rewrite element x not found to left of ->\n"
|
||||
};
|
||||
|
||||
static String[] D = {
|
||||
// INPUT
|
||||
"parser grammar D;\n" +
|
||||
"a[int j] \n" +
|
||||
"scope { int i; }\n" +
|
||||
" : i=ID j=ID ;\n" +
|
||||
"\n" +
|
||||
"b[int i] returns [int i] : ID ;\n" +
|
||||
"\n" +
|
||||
"c[int i] returns [String k]\n" +
|
||||
"scope { int i; int c; int k; }\n" +
|
||||
" : ID ;",
|
||||
|
||||
// YIELDS
|
||||
"error(38): D.g:4:21: label j conflicts with rule a's return value or parameter with same name\n" +
|
||||
"error(37): D.g:4:16: label i conflicts with rule a's dynamically-scoped attribute with same name\n" +
|
||||
"error(42): D.g:6:0: rule b's argument i conflicts a return value with same name\n" +
|
||||
"error(40): D.g:9:6: rule c's dynamically-scoped attribute i conflicts with c's return value or parameter with same name\n" +
|
||||
"error(39): D.g:9:6: rule c's dynamically-scoped attribute c conflicts with the rule name\n" +
|
||||
"error(40): D.g:9:6: rule c's dynamically-scoped attribute k conflicts with c's return value or parameter with same name\n"
|
||||
};
|
||||
|
||||
@Test public void testA() { super.testErrors(A, false); }
|
||||
@Test public void testB() { super.testErrors(B, false); }
|
||||
@Test public void testC() { super.testErrors(C, false); }
|
||||
@Test public void testD() { super.testErrors(D, false); }
|
||||
}
|
|
@ -0,0 +1,143 @@
|
|||
package org.antlr.v4.test;
|
||||
|
||||
import org.junit.Test;
|
||||
|
||||
public class TestSyntaxErrors extends BaseTest {
|
||||
static String[] A = {
|
||||
// INPUT
|
||||
"grammar A;\n" +
|
||||
"",
|
||||
// YIELDS
|
||||
"error(63): A.g::: grammar A has no rules\n",
|
||||
|
||||
"A;",
|
||||
"error(17): <string>:1:0: 'A' came as a complete surprise to me\n",
|
||||
|
||||
"grammar ;",
|
||||
"error(17): <string>:1:8: ';' came as a complete surprise to me while looking for an identifier\n",
|
||||
|
||||
"grammar A\n" +
|
||||
"a : ID ;\n",
|
||||
"error(17): <string>:2:0: missing SEMI at 'a'\n",
|
||||
|
||||
"grammar A;\n" +
|
||||
"a : ID ;;\n"+
|
||||
"b : B ;",
|
||||
"error(17): A.g:2:8: ';' came as a complete surprise to me\n",
|
||||
|
||||
"grammar A;;\n" +
|
||||
"a : ID ;\n",
|
||||
"error(17): A;.g:1:10: ';' came as a complete surprise to me\n",
|
||||
|
||||
"grammar A;\n" +
|
||||
"a @init : ID ;\n",
|
||||
"error(17): A.g:2:8: mismatched input ':' expecting ACTION while matching rule preamble\n",
|
||||
|
||||
"grammar A;\n" +
|
||||
"a ( A | B ) D ;\n" +
|
||||
"b : B ;",
|
||||
"error(17): A.g:2:3: '(' came as a complete surprise to me while matching rule preamble\n" +
|
||||
"error(17): A.g:2:11: mismatched input ')' expecting SEMI while matching a rule\n" +
|
||||
"error(17): A.g:2:15: ';' came as a complete surprise to me while matching rule preamble\n",
|
||||
};
|
||||
|
||||
@Test public void testA() { super.testErrors(A, true); }
|
||||
|
||||
@Test public void testExtraColon() {
|
||||
String[] pair = new String[] {
|
||||
"grammar A;\n" +
|
||||
"a : : A ;\n" +
|
||||
"b : B ;",
|
||||
"error(17): A.g:2:4: ':' came as a complete surprise to me while matching alternative\n",
|
||||
};
|
||||
super.testErrors(pair, true);
|
||||
}
|
||||
|
||||
@Test public void testMissingRuleSemi() {
|
||||
String[] pair = new String[] {
|
||||
"grammar A;\n" +
|
||||
"a : A \n" +
|
||||
"b : B ;",
|
||||
"error(17): A.g:3:0: unterminated rule (missing ';') detected at 'b :' while looking for rule element\n",
|
||||
};
|
||||
super.testErrors(pair, true);
|
||||
}
|
||||
|
||||
@Test public void testMissingRuleSemi2() {
|
||||
String[] pair = new String[] {
|
||||
"lexer grammar A;\n" +
|
||||
"A : 'a' \n" +
|
||||
"B : 'b' ;",
|
||||
"error(17): A.g:3:0: unterminated rule (missing ';') detected at 'B :' while looking for rule element\n",
|
||||
};
|
||||
super.testErrors(pair, true);
|
||||
}
|
||||
|
||||
@Test public void testMissingRuleSemi3() {
|
||||
String[] pair = new String[] {
|
||||
"grammar A;\n" +
|
||||
"a : A \n" +
|
||||
"b[int i] returns [int y] : B ;",
|
||||
"error(17): A.g:3:9: unterminated rule (missing ';') detected at 'returns int y' while looking for rule element\n"
|
||||
};
|
||||
super.testErrors(pair, true);
|
||||
}
|
||||
|
||||
@Test public void testMissingRuleSemi4() {
|
||||
String[] pair = new String[] {
|
||||
"grammar A;\n" +
|
||||
"a : b \n" +
|
||||
" catch [Exception e] {...}\n" +
|
||||
"b : B ;\n",
|
||||
|
||||
"error(17): A.g:2:4: unterminated rule (missing ';') detected at 'b catch' while looking for rule element\n"
|
||||
};
|
||||
super.testErrors(pair, true);
|
||||
}
|
||||
|
||||
@Test public void testMissingRuleSemi5() {
|
||||
String[] pair = new String[] {
|
||||
"grammar A;\n" +
|
||||
"a : A \n" +
|
||||
" catch [Exception e] {...}\n",
|
||||
|
||||
"error(17): A.g:2:4: unterminated rule (missing ';') detected at 'A catch' while looking for rule element\n"
|
||||
};
|
||||
super.testErrors(pair, true);
|
||||
}
|
||||
|
||||
@Test public void testBadRulePrequelStart() {
|
||||
String[] pair = new String[] {
|
||||
"grammar A;\n" +
|
||||
"a @ options {k=1;} : A ;\n" +
|
||||
"b : B ;",
|
||||
|
||||
"error(17): A.g:2:4: 'options {' came as a complete surprise to me while looking for an identifier\n"
|
||||
};
|
||||
super.testErrors(pair, true);
|
||||
}
|
||||
|
||||
@Test public void testBadRulePrequelStart2() {
|
||||
String[] pair = new String[] {
|
||||
"grammar A;\n" +
|
||||
"a } : A ;\n" +
|
||||
"b : B ;",
|
||||
|
||||
"error(17): A.g:2:2: '}' came as a complete surprise to me while matching rule preamble\n"
|
||||
};
|
||||
super.testErrors(pair, true);
|
||||
}
|
||||
|
||||
@Test public void testModeInParser() {
|
||||
String[] pair = new String[] {
|
||||
"grammar A;\n" +
|
||||
"a : A ;\n" +
|
||||
"mode foo;\n" +
|
||||
"b : B ;",
|
||||
|
||||
"error(87): A.g:3:5: lexical modes are only allowed in lexer grammars\n"
|
||||
};
|
||||
super.testErrors(pair, true);
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,207 @@
|
|||
package org.antlr.v4.test;
|
||||
|
||||
import org.antlr.v4.Tool;
|
||||
import org.antlr.v4.runtime.Token;
|
||||
import org.antlr.v4.tool.*;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.util.*;
|
||||
|
||||
public class TestTokenTypeAssignment extends BaseTest {
|
||||
|
||||
@Test
|
||||
public void testParserSimpleTokens() throws Exception {
|
||||
Grammar g = new Grammar(
|
||||
"parser grammar t;\n"+
|
||||
"a : A | B;\n" +
|
||||
"b : C ;");
|
||||
String rules = "a, b";
|
||||
String tokenNames = "A, B, C";
|
||||
checkSymbols(g, rules, tokenNames);
|
||||
}
|
||||
|
||||
@Test public void testParserTokensSection() throws Exception {
|
||||
Grammar g = new Grammar(
|
||||
"parser grammar t;\n" +
|
||||
"tokens {\n" +
|
||||
" C;\n" +
|
||||
" D;" +
|
||||
"}\n"+
|
||||
"a : A | B;\n" +
|
||||
"b : C ;");
|
||||
String rules = "a, b";
|
||||
String tokenNames = "A, B, C, D";
|
||||
checkSymbols(g, rules, tokenNames);
|
||||
}
|
||||
|
||||
@Test public void testLexerTokensSection() throws Exception {
|
||||
LexerGrammar g = new LexerGrammar(
|
||||
"lexer grammar t;\n" +
|
||||
"tokens {\n" +
|
||||
" C;\n" +
|
||||
" D;" +
|
||||
"}\n"+
|
||||
"A : 'a';\n" +
|
||||
"C : 'c' ;");
|
||||
String rules = "A, C";
|
||||
String tokenNames = "A, C, D";
|
||||
checkSymbols(g, rules, tokenNames);
|
||||
}
|
||||
|
||||
@Test public void testTokensSectionWithAssignmentSection() throws Exception {
|
||||
Grammar g = new Grammar(
|
||||
"grammar t;\n" +
|
||||
"tokens {\n" +
|
||||
" C='c';\n" +
|
||||
" D;" +
|
||||
"}\n"+
|
||||
"a : A | B;\n" +
|
||||
"b : C ;");
|
||||
String rules = "a, b";
|
||||
String tokenNames = "A, B, C, D, 'c'";
|
||||
checkSymbols(g, rules, tokenNames);
|
||||
}
|
||||
|
||||
@Test public void testCombinedGrammarLiterals() throws Exception {
|
||||
Grammar g = new Grammar(
|
||||
"grammar t;\n"+
|
||||
"a : 'begin' b 'end';\n" +
|
||||
"b : C ';' ;\n" +
|
||||
"ID : 'a' ;\n" +
|
||||
"FOO : 'foo' ;\n" + // "foo" is not a token name
|
||||
"C : 'c' ;\n"); // nor is 'c'
|
||||
String rules = "a, b";
|
||||
String tokenNames = "C, FOO, ID, 'begin', 'end', ';'";
|
||||
checkSymbols(g, rules, tokenNames);
|
||||
}
|
||||
|
||||
@Test public void testLiteralInParserAndLexer() throws Exception {
|
||||
// 'x' is token and char in lexer rule
|
||||
Grammar g = new Grammar(
|
||||
"grammar t;\n" +
|
||||
"a : 'x' E ; \n" +
|
||||
"E: 'x' '0' ;\n");
|
||||
|
||||
Tool antlr = new Tool();
|
||||
antlr.process(g);
|
||||
|
||||
String literals = "['x']";
|
||||
String foundLiterals = g.stringLiteralToTypeMap.keySet().toString();
|
||||
assertEquals(literals, foundLiterals);
|
||||
|
||||
foundLiterals = g.implicitLexer.stringLiteralToTypeMap.keySet().toString();
|
||||
assertEquals("['x']", foundLiterals); // pushed in lexer from parser
|
||||
|
||||
String[] typeToTokenName = g.getTokenNames();
|
||||
Set<String> tokens = new HashSet<String>();
|
||||
for (String t : typeToTokenName) if ( t!=null ) tokens.add(t);
|
||||
assertEquals("[E]", tokens.toString());
|
||||
}
|
||||
|
||||
@Test public void testCombinedGrammarWithRefToLiteralButNoTokenIDRef() throws Exception {
|
||||
Grammar g = new Grammar(
|
||||
"grammar t;\n"+
|
||||
"a : 'a' ;\n" +
|
||||
"A : 'a' ;\n");
|
||||
String rules = "a";
|
||||
String tokenNames = "A, 'a'";
|
||||
checkSymbols(g, rules, tokenNames);
|
||||
}
|
||||
|
||||
@Test public void testSetDoesNotMissTokenAliases() throws Exception {
|
||||
Grammar g = new Grammar(
|
||||
"grammar t;\n"+
|
||||
"a : 'a'|'b' ;\n" +
|
||||
"A : 'a' ;\n" +
|
||||
"B : 'b' ;\n");
|
||||
String rules = "a";
|
||||
String tokenNames = "A, 'a', B, 'b'";
|
||||
checkSymbols(g, rules, tokenNames);
|
||||
}
|
||||
|
||||
// T E S T L I T E R A L E S C A P E S
|
||||
|
||||
@Test public void testParserCharLiteralWithEscape() throws Exception {
|
||||
Grammar g = new Grammar(
|
||||
"grammar t;\n"+
|
||||
"a : '\\n';\n");
|
||||
Tool antlr = new Tool();
|
||||
antlr.process(g);
|
||||
Set literals = g.stringLiteralToTypeMap.keySet();
|
||||
// must store literals how they appear in the antlr grammar
|
||||
assertEquals("'\\n'", literals.toArray()[0]);
|
||||
}
|
||||
|
||||
@Test public void testTokenInTokensSectionAndTokenRuleDef() throws Exception {
|
||||
// this must return A not I to the parser; calling a nonfragment rule
|
||||
// from a nonfragment rule does not set the overall token.
|
||||
String grammar =
|
||||
"grammar P;\n" +
|
||||
"tokens { B='}'; }\n"+
|
||||
"a : A B {System.out.println(state.input);} ;\n"+
|
||||
"A : 'a' ;\n" +
|
||||
"B : '}' ;\n"+
|
||||
"WS : (' '|'\\n') {skip();} ;";
|
||||
String found = execParser("P.g", grammar, "PParser", "PLexer",
|
||||
"a", "a}", false);
|
||||
assertEquals("a}\n", found);
|
||||
}
|
||||
|
||||
@Test public void testTokenInTokensSectionAndTokenRuleDef2() throws Exception {
|
||||
// this must return A not I to the parser; calling a nonfragment rule
|
||||
// from a nonfragment rule does not set the overall token.
|
||||
String grammar =
|
||||
"grammar P;\n" +
|
||||
"tokens { B='}'; }\n"+
|
||||
"a : A '}' {System.out.println(state.input);} ;\n"+
|
||||
"A : 'a' ;\n" +
|
||||
"B : '}' {/* */} ;\n"+
|
||||
"WS : (' '|'\\n') {skip();} ;";
|
||||
String found = execParser("P.g", grammar, "PParser", "PLexer",
|
||||
"a", "a}", false);
|
||||
assertEquals("a}\n", found);
|
||||
}
|
||||
|
||||
protected void checkSymbols(Grammar g,
|
||||
String rulesStr,
|
||||
String tokensStr)
|
||||
throws Exception
|
||||
{
|
||||
Tool antlr = new Tool();
|
||||
antlr.process(g);
|
||||
|
||||
String[] typeToTokenName = g.getTokenNames();
|
||||
Set<String> tokens = new HashSet<String>();
|
||||
for (String t : typeToTokenName) if ( t!=null ) tokens.add(t);
|
||||
|
||||
// make sure expected tokens are there
|
||||
StringTokenizer st = new StringTokenizer(tokensStr, ", ");
|
||||
while ( st.hasMoreTokens() ) {
|
||||
String tokenName = st.nextToken();
|
||||
assertTrue("token "+tokenName+" expected, but was undefined",
|
||||
g.getTokenType(tokenName) != Token.INVALID_TYPE);
|
||||
tokens.remove(tokenName);
|
||||
}
|
||||
// make sure there are not any others (other than <EOF> etc...)
|
||||
for (Iterator iter = tokens.iterator(); iter.hasNext();) {
|
||||
String tokenName = (String) iter.next();
|
||||
assertTrue("unexpected token name "+tokenName,
|
||||
g.getTokenType(tokenName) < Token.MIN_TOKEN_TYPE);
|
||||
}
|
||||
|
||||
// make sure all expected rules are there
|
||||
st = new StringTokenizer(rulesStr, ", ");
|
||||
int n = 0;
|
||||
while ( st.hasMoreTokens() ) {
|
||||
String ruleName = st.nextToken();
|
||||
assertNotNull("rule "+ruleName+" expected", g.getRule(ruleName));
|
||||
n++;
|
||||
}
|
||||
//System.out.println("rules="+rules);
|
||||
// make sure there are no extra rules
|
||||
assertEquals("number of rules mismatch; expecting "+n+"; found "+g.rules.size(),
|
||||
n, g.rules.size());
|
||||
|
||||
}
|
||||
|
||||
}
|
Loading…
Reference in New Issue