forked from jasder/antlr
Merge pull request #13 from antlr/intermediate-test-generation
Move to new testing mechanism
This commit is contained in:
commit
345bd14c00
|
@ -49,7 +49,7 @@ public class CSharpTarget extends Target {
|
|||
|
||||
@Override
|
||||
public String getVersion() {
|
||||
return "4.5"; // crossing fingers that it's close enough.
|
||||
return "4.5.1"; // crossing fingers that it's close enough.
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -2,7 +2,7 @@ package org.antlr.v4.test.rt.csharp;
|
|||
|
||||
import org.junit.Test;
|
||||
|
||||
public class TestCompositeLexers extends BaseTest {
|
||||
public class TestCompositeLexers extends org.antlr.v4.test.runtime.csharp.BaseTest {
|
||||
|
||||
/* this file and method are generated, any edit will be overwritten by the next generation */
|
||||
@Test
|
||||
|
@ -20,10 +20,10 @@ public class TestCompositeLexers extends BaseTest {
|
|||
sb.append("WS : (' '|'\\n') -> skip ;\n");
|
||||
String grammar = sb.toString();
|
||||
String found = execLexer("M.g4", grammar, "M", "abc", false);
|
||||
assertEquals("S.A\n" +
|
||||
"[@0,0:0='a',<3>,1:0]\n" +
|
||||
"[@1,1:1='b',<1>,1:1]\n" +
|
||||
"[@2,2:2='c',<4>,1:2]\n" +
|
||||
assertEquals("S.A\n" +
|
||||
"[@0,0:0='a',<3>,1:0]\n" +
|
||||
"[@1,1:1='b',<1>,1:1]\n" +
|
||||
"[@2,2:2='c',<4>,1:2]\n" +
|
||||
"[@3,3:2='<EOF>',<-1>,1:3]\n", found);
|
||||
assertNull(this.stderrDuringParse);
|
||||
}
|
||||
|
@ -44,11 +44,11 @@ public class TestCompositeLexers extends BaseTest {
|
|||
sb.append("WS : (' '|'\\n') -> skip ;\n");
|
||||
String grammar = sb.toString();
|
||||
String found = execLexer("M.g4", grammar, "M", "ab", false);
|
||||
assertEquals("M.A\n" +
|
||||
"[@0,0:1='ab',<1>,1:0]\n" +
|
||||
assertEquals("M.A\n" +
|
||||
"[@0,0:1='ab',<1>,1:0]\n" +
|
||||
"[@1,2:1='<EOF>',<-1>,1:2]\n", found);
|
||||
assertNull(this.stderrDuringParse);
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
}
|
||||
|
|
|
@ -5,7 +5,7 @@ import org.junit.Test;
|
|||
import org.antlr.v4.test.tool.ErrorQueue;
|
||||
import org.antlr.v4.tool.Grammar;
|
||||
|
||||
public class TestCompositeParsers extends BaseTest {
|
||||
public class TestCompositeParsers extends org.antlr.v4.test.runtime.csharp.BaseTest {
|
||||
|
||||
/* this file and method are generated, any edit will be overwritten by the next generation */
|
||||
@Test
|
||||
|
@ -337,4 +337,4 @@ public class TestCompositeParsers extends BaseTest {
|
|||
}
|
||||
|
||||
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,7 +2,7 @@ package org.antlr.v4.test.rt.csharp;
|
|||
|
||||
import org.junit.Test;
|
||||
|
||||
public class TestFullContextParsing extends BaseTest {
|
||||
public class TestFullContextParsing extends org.antlr.v4.test.runtime.csharp.BaseTest {
|
||||
|
||||
/* this file and method are generated, any edit will be overwritten by the next generation */
|
||||
@Test
|
||||
|
@ -222,4 +222,4 @@ public class TestFullContextParsing extends BaseTest {
|
|||
}
|
||||
|
||||
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,7 +2,7 @@ package org.antlr.v4.test.rt.csharp;
|
|||
|
||||
import org.junit.Test;
|
||||
|
||||
public class TestLeftRecursion extends BaseTest {
|
||||
public class TestLeftRecursion extends org.antlr.v4.test.runtime.csharp.BaseTest {
|
||||
|
||||
/* this file and method are generated, any edit will be overwritten by the next generation */
|
||||
String testSimple(String input) throws Exception {
|
||||
|
@ -1161,4 +1161,4 @@ public class TestLeftRecursion extends BaseTest {
|
|||
}
|
||||
|
||||
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,7 +2,7 @@ package org.antlr.v4.test.rt.csharp;
|
|||
|
||||
import org.junit.Test;
|
||||
|
||||
public class TestLexerErrors extends BaseTest {
|
||||
public class TestLexerErrors extends org.antlr.v4.test.runtime.csharp.BaseTest {
|
||||
|
||||
/* this file and method are generated, any edit will be overwritten by the next generation */
|
||||
@Test
|
||||
|
@ -26,7 +26,7 @@ public class TestLexerErrors extends BaseTest {
|
|||
sb.append("WS : [ \\t\\r\\n]+ -> skip;\n");
|
||||
String grammar = sb.toString();
|
||||
String found = execLexer("L.g4", grammar, "L", "[\"foo\"]", false);
|
||||
assertEquals("[@0,0:6='[\"foo\"]',<1>,1:0]\n" +
|
||||
assertEquals("[@0,0:6='[\"foo\"]',<1>,1:0]\n" +
|
||||
"[@1,7:6='<EOF>',<-1>,1:7]\n", found);
|
||||
assertNull(this.stderrDuringParse);
|
||||
}
|
||||
|
@ -54,7 +54,7 @@ public class TestLexerErrors extends BaseTest {
|
|||
sb.append("WS : [ \\r\\n\\t]+ -> skip;\n");
|
||||
String grammar = sb.toString();
|
||||
String found = execLexer("L.g4", grammar, "L", "{ { } }", false);
|
||||
assertEquals("[@0,0:6='{ { } }',<1>,1:0]\n" +
|
||||
assertEquals("[@0,0:6='{ { } }',<1>,1:0]\n" +
|
||||
"[@1,7:6='<EOF>',<-1>,1:7]\n", found);
|
||||
assertNull(this.stderrDuringParse);
|
||||
}
|
||||
|
@ -80,7 +80,7 @@ public class TestLexerErrors extends BaseTest {
|
|||
sb.append("A : 'a' 'b' ;\n");
|
||||
String grammar = sb.toString();
|
||||
String found = execLexer("L.g4", grammar, "L", "abx", false);
|
||||
assertEquals("[@0,0:1='ab',<1>,1:0]\n" +
|
||||
assertEquals("[@0,0:1='ab',<1>,1:0]\n" +
|
||||
"[@1,3:2='<EOF>',<-1>,1:3]\n", found);
|
||||
assertEquals("line 1:2 token recognition error at: 'x'\n", this.stderrDuringParse);
|
||||
}
|
||||
|
@ -105,7 +105,7 @@ public class TestLexerErrors extends BaseTest {
|
|||
sb.append("A : 'a' 'b' ;\n");
|
||||
String grammar = sb.toString();
|
||||
String found = execLexer("L.g4", grammar, "L", "abax", false);
|
||||
assertEquals("[@0,0:1='ab',<1>,1:0]\n" +
|
||||
assertEquals("[@0,0:1='ab',<1>,1:0]\n" +
|
||||
"[@1,4:3='<EOF>',<-1>,1:4]\n", found);
|
||||
assertEquals("line 1:2 token recognition error at: 'ax'\n", this.stderrDuringParse);
|
||||
}
|
||||
|
@ -119,8 +119,8 @@ public class TestLexerErrors extends BaseTest {
|
|||
sb.append("B : 'abc' ;\n");
|
||||
String grammar = sb.toString();
|
||||
String found = execLexer("L.g4", grammar, "L", "ababx", false);
|
||||
assertEquals("[@0,0:1='ab',<1>,1:0]\n" +
|
||||
"[@1,2:3='ab',<1>,1:2]\n" +
|
||||
assertEquals("[@0,0:1='ab',<1>,1:0]\n" +
|
||||
"[@1,2:3='ab',<1>,1:2]\n" +
|
||||
"[@2,5:4='<EOF>',<-1>,1:5]\n", found);
|
||||
assertEquals("line 1:4 token recognition error at: 'x'\n", this.stderrDuringParse);
|
||||
}
|
||||
|
@ -135,8 +135,8 @@ public class TestLexerErrors extends BaseTest {
|
|||
sb.append("C : 'abcd' ;\n");
|
||||
String grammar = sb.toString();
|
||||
String found = execLexer("L.g4", grammar, "L", "ababcx", false);
|
||||
assertEquals("[@0,0:1='ab',<1>,1:0]\n" +
|
||||
"[@1,2:4='abc',<2>,1:2]\n" +
|
||||
assertEquals("[@0,0:1='ab',<1>,1:0]\n" +
|
||||
"[@1,2:4='abc',<2>,1:2]\n" +
|
||||
"[@2,6:5='<EOF>',<-1>,1:6]\n", found);
|
||||
assertEquals("line 1:5 token recognition error at: 'x'\n", this.stderrDuringParse);
|
||||
}
|
||||
|
@ -164,12 +164,12 @@ public class TestLexerErrors extends BaseTest {
|
|||
sb.append("ID : [a-z]+;\n");
|
||||
String grammar = sb.toString();
|
||||
String found = execLexer("L.g4", grammar, "LLexer", "x : x", false);
|
||||
assertEquals("[@0,0:0='x',<3>,1:0]\n" +
|
||||
"[@1,2:2=':',<1>,1:2]\n" +
|
||||
"[@2,4:4='x',<3>,1:4]\n" +
|
||||
assertEquals("[@0,0:0='x',<3>,1:0]\n" +
|
||||
"[@1,2:2=':',<1>,1:2]\n" +
|
||||
"[@2,4:4='x',<3>,1:4]\n" +
|
||||
"[@3,5:4='<EOF>',<-1>,1:5]\n", found);
|
||||
assertEquals("line 1:1 token recognition error at: ' '\nline 1:3 token recognition error at: ' '\n", this.stderrDuringParse);
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,7 +2,7 @@ package org.antlr.v4.test.rt.csharp;
|
|||
|
||||
import org.junit.Test;
|
||||
|
||||
public class TestLexerExec extends BaseTest {
|
||||
public class TestLexerExec extends org.antlr.v4.test.runtime.csharp.BaseTest {
|
||||
|
||||
/* this file and method are generated, any edit will be overwritten by the next generation */
|
||||
@Test
|
||||
|
@ -12,7 +12,7 @@ public class TestLexerExec extends BaseTest {
|
|||
sb.append("QUOTE : '\"' ; // make sure this compiles\n");
|
||||
String grammar = sb.toString();
|
||||
String found = execLexer("L.g4", grammar, "L", "\"", false);
|
||||
assertEquals("[@0,0:0='\"',<1>,1:0]\n" +
|
||||
assertEquals("[@0,0:0='\"',<1>,1:0]\n" +
|
||||
"[@1,1:0='<EOF>',<-1>,1:1]\n", found);
|
||||
assertNull(this.stderrDuringParse);
|
||||
}
|
||||
|
@ -27,9 +27,9 @@ public class TestLexerExec extends BaseTest {
|
|||
sb.append("WS : (' '|'\\n') -> skip ;\n");
|
||||
String grammar = sb.toString();
|
||||
String found = execLexer("L.g4", grammar, "L", "34 -21 3", false);
|
||||
assertEquals("[@0,0:1='34',<2>,1:0]\n" +
|
||||
"[@1,3:5='-21',<1>,1:3]\n" +
|
||||
"[@2,7:7='3',<2>,1:7]\n" +
|
||||
assertEquals("[@0,0:1='34',<2>,1:0]\n" +
|
||||
"[@1,3:5='-21',<1>,1:3]\n" +
|
||||
"[@2,7:7='3',<2>,1:7]\n" +
|
||||
"[@3,8:7='<EOF>',<-1>,1:8]\n", found);
|
||||
assertNull(this.stderrDuringParse);
|
||||
}
|
||||
|
@ -46,10 +46,10 @@ public class TestLexerExec extends BaseTest {
|
|||
sb.append("WS : [ \\t] -> skip;\n");
|
||||
String grammar = sb.toString();
|
||||
String found = execLexer("L.g4", grammar, "L", "\\ / \\/ /\\", false);
|
||||
assertEquals("[@0,0:0='\\',<1>,1:0]\n" +
|
||||
"[@1,2:2='/',<2>,1:2]\n" +
|
||||
"[@2,4:5='\\/',<3>,1:4]\n" +
|
||||
"[@3,7:8='/\\',<4>,1:7]\n" +
|
||||
assertEquals("[@0,0:0='\\',<1>,1:0]\n" +
|
||||
"[@1,2:2='/',<2>,1:2]\n" +
|
||||
"[@2,4:5='\\/',<3>,1:4]\n" +
|
||||
"[@3,7:8='/\\',<4>,1:7]\n" +
|
||||
"[@4,9:8='<EOF>',<-1>,1:9]\n", found);
|
||||
assertNull(this.stderrDuringParse);
|
||||
}
|
||||
|
@ -67,8 +67,8 @@ public class TestLexerExec extends BaseTest {
|
|||
sb.append("SEPARATOR: '!';\n");
|
||||
String grammar = sb.toString();
|
||||
String found = execLexer("L.g4", grammar, "L", "-.-.-!", false);
|
||||
assertEquals("[@0,0:4='-.-.-',<1>,1:0]\n" +
|
||||
"[@1,5:5='!',<3>,1:5]\n" +
|
||||
assertEquals("[@0,0:4='-.-.-',<1>,1:0]\n" +
|
||||
"[@1,5:5='!',<3>,1:5]\n" +
|
||||
"[@2,6:5='<EOF>',<-1>,1:6]\n", found);
|
||||
assertNull(this.stderrDuringParse);
|
||||
}
|
||||
|
@ -81,8 +81,8 @@ public class TestLexerExec extends BaseTest {
|
|||
sb.append("STRING : '\"' ('\"\"' | .)*? '\"';\n");
|
||||
String grammar = sb.toString();
|
||||
String found = execLexer("L.g4", grammar, "L", "\"hi\"\"mom\"", false);
|
||||
assertEquals("[@0,0:3='\"hi\"',<1>,1:0]\n" +
|
||||
"[@1,4:8='\"mom\"',<1>,1:4]\n" +
|
||||
assertEquals("[@0,0:3='\"hi\"',<1>,1:0]\n" +
|
||||
"[@1,4:8='\"mom\"',<1>,1:4]\n" +
|
||||
"[@2,9:8='<EOF>',<-1>,1:9]\n", found);
|
||||
assertNull(this.stderrDuringParse);
|
||||
}
|
||||
|
@ -95,7 +95,7 @@ public class TestLexerExec extends BaseTest {
|
|||
sb.append("STRING : '\"' ('\"\"' | .)+? '\"';\n");
|
||||
String grammar = sb.toString();
|
||||
String found = execLexer("L.g4", grammar, "L", "\"\"\"mom\"", false);
|
||||
assertEquals("[@0,0:6='\"\"\"mom\"',<1>,1:0]\n" +
|
||||
assertEquals("[@0,0:6='\"\"\"mom\"',<1>,1:0]\n" +
|
||||
"[@1,7:6='<EOF>',<-1>,1:7]\n", found);
|
||||
assertNull(this.stderrDuringParse);
|
||||
}
|
||||
|
@ -109,7 +109,7 @@ public class TestLexerExec extends BaseTest {
|
|||
sb.append("WS : (' '|'\\t')+;\n");
|
||||
String grammar = sb.toString();
|
||||
String found = execLexer("L.g4", grammar, "L", "//blah\n//blah\n", false);
|
||||
assertEquals("[@0,0:13='//blah\\n//blah\\n',<1>,1:0]\n" +
|
||||
assertEquals("[@0,0:13='//blah\\n//blah\\n',<1>,1:0]\n" +
|
||||
"[@1,14:13='<EOF>',<-1>,3:0]\n", found);
|
||||
assertNull(this.stderrDuringParse);
|
||||
}
|
||||
|
@ -123,8 +123,8 @@ public class TestLexerExec extends BaseTest {
|
|||
sb.append("WS : (' '|'\\t')+;\n");
|
||||
String grammar = sb.toString();
|
||||
String found = execLexer("L.g4", grammar, "L", "//blah\n//blah\n", false);
|
||||
assertEquals("[@0,0:6='//blah\\n',<1>,1:0]\n" +
|
||||
"[@1,7:13='//blah\\n',<1>,2:0]\n" +
|
||||
assertEquals("[@0,0:6='//blah\\n',<1>,1:0]\n" +
|
||||
"[@1,7:13='//blah\\n',<1>,2:0]\n" +
|
||||
"[@2,14:13='<EOF>',<-1>,3:0]\n", found);
|
||||
assertNull(this.stderrDuringParse);
|
||||
}
|
||||
|
@ -138,7 +138,7 @@ public class TestLexerExec extends BaseTest {
|
|||
sb.append("WS : (' '|'\\t')+;\n");
|
||||
String grammar = sb.toString();
|
||||
String found = execLexer("L.g4", grammar, "L", "//blah\n//blah\n", false);
|
||||
assertEquals("[@0,0:13='//blah\\n//blah\\n',<1>,1:0]\n" +
|
||||
assertEquals("[@0,0:13='//blah\\n//blah\\n',<1>,1:0]\n" +
|
||||
"[@1,14:13='<EOF>',<-1>,3:0]\n", found);
|
||||
assertNull(this.stderrDuringParse);
|
||||
}
|
||||
|
@ -152,8 +152,8 @@ public class TestLexerExec extends BaseTest {
|
|||
sb.append("WS : (' '|'\\t')+;\n");
|
||||
String grammar = sb.toString();
|
||||
String found = execLexer("L.g4", grammar, "L", "//blah\n//blah\n", false);
|
||||
assertEquals("[@0,0:6='//blah\\n',<1>,1:0]\n" +
|
||||
"[@1,7:13='//blah\\n',<1>,2:0]\n" +
|
||||
assertEquals("[@0,0:6='//blah\\n',<1>,1:0]\n" +
|
||||
"[@1,7:13='//blah\\n',<1>,2:0]\n" +
|
||||
"[@2,14:13='<EOF>',<-1>,3:0]\n", found);
|
||||
assertNull(this.stderrDuringParse);
|
||||
}
|
||||
|
@ -167,7 +167,7 @@ public class TestLexerExec extends BaseTest {
|
|||
sb.append("WS : (' '|'\\t')+;\n");
|
||||
String grammar = sb.toString();
|
||||
String found = execLexer("L.g4", grammar, "L", "//blah\n//blah\n", false);
|
||||
assertEquals("[@0,0:13='//blah\\n//blah\\n',<1>,1:0]\n" +
|
||||
assertEquals("[@0,0:13='//blah\\n//blah\\n',<1>,1:0]\n" +
|
||||
"[@1,14:13='<EOF>',<-1>,3:0]\n", found);
|
||||
assertNull(this.stderrDuringParse);
|
||||
}
|
||||
|
@ -181,8 +181,8 @@ public class TestLexerExec extends BaseTest {
|
|||
sb.append("WS : (' '|'\\t')+;\n");
|
||||
String grammar = sb.toString();
|
||||
String found = execLexer("L.g4", grammar, "L", "//blah\n//blah\n", false);
|
||||
assertEquals("[@0,0:6='//blah\\n',<1>,1:0]\n" +
|
||||
"[@1,7:13='//blah\\n',<1>,2:0]\n" +
|
||||
assertEquals("[@0,0:6='//blah\\n',<1>,1:0]\n" +
|
||||
"[@1,7:13='//blah\\n',<1>,2:0]\n" +
|
||||
"[@2,14:13='<EOF>',<-1>,3:0]\n", found);
|
||||
assertNull(this.stderrDuringParse);
|
||||
}
|
||||
|
@ -196,10 +196,10 @@ public class TestLexerExec extends BaseTest {
|
|||
sb.append("WS : (' '|'\\n')+;\n");
|
||||
String grammar = sb.toString();
|
||||
String found = execLexer("L.g4", grammar, "L", "/* ick */\n/* /* */\n/* /*nested*/ */\n", false);
|
||||
assertEquals("[@0,0:8='/* ick */',<1>,1:0]\n" +
|
||||
"[@1,9:9='\\n',<2>,1:9]\n" +
|
||||
"[@2,10:34='/* /* */\\n/* /*nested*/ */',<1>,2:0]\n" +
|
||||
"[@3,35:35='\\n',<2>,3:16]\n" +
|
||||
assertEquals("[@0,0:8='/* ick */',<1>,1:0]\n" +
|
||||
"[@1,9:9='\\n',<2>,1:9]\n" +
|
||||
"[@2,10:34='/* /* */\\n/* /*nested*/ */',<1>,2:0]\n" +
|
||||
"[@3,35:35='\\n',<2>,3:16]\n" +
|
||||
"[@4,36:35='<EOF>',<-1>,4:0]\n", found);
|
||||
assertNull(this.stderrDuringParse);
|
||||
}
|
||||
|
@ -213,10 +213,10 @@ public class TestLexerExec extends BaseTest {
|
|||
sb.append("WS : (' '|'\\n')+;\n");
|
||||
String grammar = sb.toString();
|
||||
String found = execLexer("L.g4", grammar, "L", "/* ick */x\n/* /* */x\n/* /*nested*/ */x\n", false);
|
||||
assertEquals("[@0,0:8='/* ick */',<1>,1:0]\n" +
|
||||
"[@1,10:10='\\n',<2>,1:10]\n" +
|
||||
"[@2,11:36='/* /* */x\\n/* /*nested*/ */',<1>,2:0]\n" +
|
||||
"[@3,38:38='\\n',<2>,3:17]\n" +
|
||||
assertEquals("[@0,0:8='/* ick */',<1>,1:0]\n" +
|
||||
"[@1,10:10='\\n',<2>,1:10]\n" +
|
||||
"[@2,11:36='/* /* */x\\n/* /*nested*/ */',<1>,2:0]\n" +
|
||||
"[@3,38:38='\\n',<2>,3:17]\n" +
|
||||
"[@4,39:38='<EOF>',<-1>,4:0]\n", found);
|
||||
assertEquals("line 1:9 token recognition error at: 'x'\nline 3:16 token recognition error at: 'x'\n", this.stderrDuringParse);
|
||||
}
|
||||
|
@ -230,10 +230,10 @@ public class TestLexerExec extends BaseTest {
|
|||
sb.append("WS : (' '|'\\n')+;\n");
|
||||
String grammar = sb.toString();
|
||||
String found = execLexer("L.g4", grammar, "L", "/* ick */\n/* /* */\n/* /*nested*/ */\n", false);
|
||||
assertEquals("[@0,0:8='/* ick */',<1>,1:0]\n" +
|
||||
"[@1,9:9='\\n',<2>,1:9]\n" +
|
||||
"[@2,10:34='/* /* */\\n/* /*nested*/ */',<1>,2:0]\n" +
|
||||
"[@3,35:35='\\n',<2>,3:16]\n" +
|
||||
assertEquals("[@0,0:8='/* ick */',<1>,1:0]\n" +
|
||||
"[@1,9:9='\\n',<2>,1:9]\n" +
|
||||
"[@2,10:34='/* /* */\\n/* /*nested*/ */',<1>,2:0]\n" +
|
||||
"[@3,35:35='\\n',<2>,3:16]\n" +
|
||||
"[@4,36:35='<EOF>',<-1>,4:0]\n", found);
|
||||
assertNull(this.stderrDuringParse);
|
||||
}
|
||||
|
@ -247,10 +247,10 @@ public class TestLexerExec extends BaseTest {
|
|||
sb.append("WS : (' '|'\\n')+;\n");
|
||||
String grammar = sb.toString();
|
||||
String found = execLexer("L.g4", grammar, "L", "/* ick */x\n/* /* */x\n/* /*nested*/ */x\n", false);
|
||||
assertEquals("[@0,0:8='/* ick */',<1>,1:0]\n" +
|
||||
"[@1,10:10='\\n',<2>,1:10]\n" +
|
||||
"[@2,11:36='/* /* */x\\n/* /*nested*/ */',<1>,2:0]\n" +
|
||||
"[@3,38:38='\\n',<2>,3:17]\n" +
|
||||
assertEquals("[@0,0:8='/* ick */',<1>,1:0]\n" +
|
||||
"[@1,10:10='\\n',<2>,1:10]\n" +
|
||||
"[@2,11:36='/* /* */x\\n/* /*nested*/ */',<1>,2:0]\n" +
|
||||
"[@3,38:38='\\n',<2>,3:17]\n" +
|
||||
"[@4,39:38='<EOF>',<-1>,4:0]\n", found);
|
||||
assertEquals("line 1:9 token recognition error at: 'x'\nline 3:16 token recognition error at: 'x'\n", this.stderrDuringParse);
|
||||
}
|
||||
|
@ -269,11 +269,11 @@ public class TestLexerExec extends BaseTest {
|
|||
sb.append("J : .;\n");
|
||||
String grammar = sb.toString();
|
||||
String found = execLexer("L.g4", grammar, "L", "ab", false);
|
||||
assertEquals("stuff0: \n" +
|
||||
"stuff1: a\n" +
|
||||
"stuff2: ab\n" +
|
||||
"ab\n" +
|
||||
"[@0,0:1='ab',<1>,1:0]\n" +
|
||||
assertEquals("stuff0: \n" +
|
||||
"stuff1: a\n" +
|
||||
"stuff2: ab\n" +
|
||||
"ab\n" +
|
||||
"[@0,0:1='ab',<1>,1:0]\n" +
|
||||
"[@1,2:1='<EOF>',<-1>,1:2]\n", found);
|
||||
assertNull(this.stderrDuringParse);
|
||||
}
|
||||
|
@ -288,8 +288,8 @@ public class TestLexerExec extends BaseTest {
|
|||
sb.append("J : .;\n");
|
||||
String grammar = sb.toString();
|
||||
String found = execLexer("L.g4", grammar, "L", "ab", false);
|
||||
assertEquals("ab\n" +
|
||||
"[@0,0:1='ab',<1>,1:0]\n" +
|
||||
assertEquals("ab\n" +
|
||||
"[@0,0:1='ab',<1>,1:0]\n" +
|
||||
"[@1,2:1='<EOF>',<-1>,1:2]\n", found);
|
||||
assertNull(this.stderrDuringParse);
|
||||
}
|
||||
|
@ -304,10 +304,10 @@ public class TestLexerExec extends BaseTest {
|
|||
sb.append("J : . {Console.WriteLine(this.Text);};\n");
|
||||
String grammar = sb.toString();
|
||||
String found = execLexer("L.g4", grammar, "L", "ab", false);
|
||||
assertEquals("a\n" +
|
||||
"b\n" +
|
||||
"[@0,0:0='a',<1>,1:0]\n" +
|
||||
"[@1,1:1='b',<3>,1:1]\n" +
|
||||
assertEquals("a\n" +
|
||||
"b\n" +
|
||||
"[@0,0:0='a',<1>,1:0]\n" +
|
||||
"[@1,1:1='b',<3>,1:1]\n" +
|
||||
"[@2,2:1='<EOF>',<-1>,1:2]\n", found);
|
||||
assertNull(this.stderrDuringParse);
|
||||
}
|
||||
|
@ -322,13 +322,13 @@ public class TestLexerExec extends BaseTest {
|
|||
sb.append("WS : (' '|'\\n')+;\n");
|
||||
String grammar = sb.toString();
|
||||
String found = execLexer("L.g4", grammar, "L", "end eend ending a", false);
|
||||
assertEquals("[@0,0:2='end',<1>,1:0]\n" +
|
||||
"[@1,3:3=' ',<3>,1:3]\n" +
|
||||
"[@2,4:7='eend',<2>,1:4]\n" +
|
||||
"[@3,8:8=' ',<3>,1:8]\n" +
|
||||
"[@4,9:14='ending',<2>,1:9]\n" +
|
||||
"[@5,15:15=' ',<3>,1:15]\n" +
|
||||
"[@6,16:16='a',<2>,1:16]\n" +
|
||||
assertEquals("[@0,0:2='end',<1>,1:0]\n" +
|
||||
"[@1,3:3=' ',<3>,1:3]\n" +
|
||||
"[@2,4:7='eend',<2>,1:4]\n" +
|
||||
"[@3,8:8=' ',<3>,1:8]\n" +
|
||||
"[@4,9:14='ending',<2>,1:9]\n" +
|
||||
"[@5,15:15=' ',<3>,1:15]\n" +
|
||||
"[@6,16:16='a',<2>,1:16]\n" +
|
||||
"[@7,17:16='<EOF>',<-1>,1:17]\n", found);
|
||||
assertNull(this.stderrDuringParse);
|
||||
}
|
||||
|
@ -347,19 +347,19 @@ public class TestLexerExec extends BaseTest {
|
|||
sb.append("WS : (' '|'\\n')+;\n");
|
||||
String grammar = sb.toString();
|
||||
String found = execLexer("L.g4", grammar, "L", "x 0 1 a.b a.l", false);
|
||||
assertEquals("[@0,0:0='x',<5>,1:0]\n" +
|
||||
"[@1,1:1=' ',<6>,1:1]\n" +
|
||||
"[@2,2:2='0',<2>,1:2]\n" +
|
||||
"[@3,3:3=' ',<6>,1:3]\n" +
|
||||
"[@4,4:4='1',<2>,1:4]\n" +
|
||||
"[@5,5:5=' ',<6>,1:5]\n" +
|
||||
"[@6,6:6='a',<5>,1:6]\n" +
|
||||
"[@7,7:7='.',<4>,1:7]\n" +
|
||||
"[@8,8:8='b',<5>,1:8]\n" +
|
||||
"[@9,9:9=' ',<6>,1:9]\n" +
|
||||
"[@10,10:10='a',<5>,1:10]\n" +
|
||||
"[@11,11:11='.',<4>,1:11]\n" +
|
||||
"[@12,12:12='l',<5>,1:12]\n" +
|
||||
assertEquals("[@0,0:0='x',<5>,1:0]\n" +
|
||||
"[@1,1:1=' ',<6>,1:1]\n" +
|
||||
"[@2,2:2='0',<2>,1:2]\n" +
|
||||
"[@3,3:3=' ',<6>,1:3]\n" +
|
||||
"[@4,4:4='1',<2>,1:4]\n" +
|
||||
"[@5,5:5=' ',<6>,1:5]\n" +
|
||||
"[@6,6:6='a',<5>,1:6]\n" +
|
||||
"[@7,7:7='.',<4>,1:7]\n" +
|
||||
"[@8,8:8='b',<5>,1:8]\n" +
|
||||
"[@9,9:9=' ',<6>,1:9]\n" +
|
||||
"[@10,10:10='a',<5>,1:10]\n" +
|
||||
"[@11,11:11='.',<4>,1:11]\n" +
|
||||
"[@12,12:12='l',<5>,1:12]\n" +
|
||||
"[@13,13:12='<EOF>',<-1>,1:13]\n", found);
|
||||
assertNull(this.stderrDuringParse);
|
||||
}
|
||||
|
@ -373,7 +373,7 @@ public class TestLexerExec extends BaseTest {
|
|||
sb.append("A : 'a';\n");
|
||||
String grammar = sb.toString();
|
||||
String found = execLexer("L.g4", grammar, "L", "", false);
|
||||
assertEquals("[@0,0:-1='<EOF>',<1>,1:0]\n" +
|
||||
assertEquals("[@0,0:-1='<EOF>',<1>,1:0]\n" +
|
||||
"[@1,0:-1='<EOF>',<-1>,1:0]\n", found);
|
||||
assertNull(this.stderrDuringParse);
|
||||
}
|
||||
|
@ -402,7 +402,7 @@ public class TestLexerExec extends BaseTest {
|
|||
sb.append("C : 'c';\n");
|
||||
String grammar = sb.toString();
|
||||
String found = execLexer("L.g4", grammar, "L", "a", false);
|
||||
assertEquals("[@0,0:0='a',<1>,1:0]\n" +
|
||||
assertEquals("[@0,0:0='a',<1>,1:0]\n" +
|
||||
"[@1,1:0='<EOF>',<-1>,1:1]\n", found);
|
||||
assertNull(this.stderrDuringParse);
|
||||
}
|
||||
|
@ -416,10 +416,10 @@ public class TestLexerExec extends BaseTest {
|
|||
sb.append("WS : [ \\n\\u000D] -> skip ;\n");
|
||||
String grammar = sb.toString();
|
||||
String found = execLexer("L.g4", grammar, "L", "34\n 34", false);
|
||||
assertEquals("I\n" +
|
||||
"I\n" +
|
||||
"[@0,0:1='34',<1>,1:0]\n" +
|
||||
"[@1,4:5='34',<1>,2:1]\n" +
|
||||
assertEquals("I\n" +
|
||||
"I\n" +
|
||||
"[@0,0:1='34',<1>,1:0]\n" +
|
||||
"[@1,4:5='34',<1>,2:1]\n" +
|
||||
"[@2,6:5='<EOF>',<-1>,2:3]\n", found);
|
||||
assertNull(this.stderrDuringParse);
|
||||
}
|
||||
|
@ -433,10 +433,10 @@ public class TestLexerExec extends BaseTest {
|
|||
sb.append("WS : [ \\n\\u000D]+ -> skip ;\n");
|
||||
String grammar = sb.toString();
|
||||
String found = execLexer("L.g4", grammar, "L", "34\n 34", false);
|
||||
assertEquals("I\n" +
|
||||
"I\n" +
|
||||
"[@0,0:1='34',<1>,1:0]\n" +
|
||||
"[@1,4:5='34',<1>,2:1]\n" +
|
||||
assertEquals("I\n" +
|
||||
"I\n" +
|
||||
"[@0,0:1='34',<1>,1:0]\n" +
|
||||
"[@1,4:5='34',<1>,2:1]\n" +
|
||||
"[@2,6:5='<EOF>',<-1>,2:3]\n", found);
|
||||
assertNull(this.stderrDuringParse);
|
||||
}
|
||||
|
@ -450,8 +450,8 @@ public class TestLexerExec extends BaseTest {
|
|||
sb.append("WS : [ \\n\\u000D]+ -> skip ;\n");
|
||||
String grammar = sb.toString();
|
||||
String found = execLexer("L.g4", grammar, "L", "xaf", false);
|
||||
assertEquals("I\n" +
|
||||
"[@0,0:2='xaf',<1>,1:0]\n" +
|
||||
assertEquals("I\n" +
|
||||
"[@0,0:2='xaf',<1>,1:0]\n" +
|
||||
"[@1,3:2='<EOF>',<-1>,1:3]\n", found);
|
||||
assertNull(this.stderrDuringParse);
|
||||
}
|
||||
|
@ -466,10 +466,10 @@ public class TestLexerExec extends BaseTest {
|
|||
sb.append(" \n");
|
||||
String grammar = sb.toString();
|
||||
String found = execLexer("L.g4", grammar, "L", "a x", false);
|
||||
assertEquals("I\n" +
|
||||
"I\n" +
|
||||
"[@0,0:0='a',<1>,1:0]\n" +
|
||||
"[@1,2:2='x',<1>,1:2]\n" +
|
||||
assertEquals("I\n" +
|
||||
"I\n" +
|
||||
"[@0,0:0='a',<1>,1:0]\n" +
|
||||
"[@1,2:2='x',<1>,1:2]\n" +
|
||||
"[@2,3:2='<EOF>',<-1>,1:3]\n", found);
|
||||
assertNull(this.stderrDuringParse);
|
||||
}
|
||||
|
@ -484,14 +484,14 @@ public class TestLexerExec extends BaseTest {
|
|||
sb.append("WS : [ \\n\\u0009\\r]+ -> skip ;\n");
|
||||
String grammar = sb.toString();
|
||||
String found = execLexer("L.g4", grammar, "L", "34\n 34 a2 abc \n ", false);
|
||||
assertEquals("I\n" +
|
||||
"I\n" +
|
||||
"ID\n" +
|
||||
"ID\n" +
|
||||
"[@0,0:1='34',<1>,1:0]\n" +
|
||||
"[@1,4:5='34',<1>,2:1]\n" +
|
||||
"[@2,7:8='a2',<2>,2:4]\n" +
|
||||
"[@3,10:12='abc',<2>,2:7]\n" +
|
||||
assertEquals("I\n" +
|
||||
"I\n" +
|
||||
"ID\n" +
|
||||
"ID\n" +
|
||||
"[@0,0:1='34',<1>,1:0]\n" +
|
||||
"[@1,4:5='34',<1>,2:1]\n" +
|
||||
"[@2,7:8='a2',<2>,2:4]\n" +
|
||||
"[@3,10:12='abc',<2>,2:7]\n" +
|
||||
"[@4,18:17='<EOF>',<-1>,3:3]\n", found);
|
||||
assertNull(this.stderrDuringParse);
|
||||
}
|
||||
|
@ -505,8 +505,8 @@ public class TestLexerExec extends BaseTest {
|
|||
sb.append("WS : [ \\n\\u000D]+ -> skip ;\n");
|
||||
String grammar = sb.toString();
|
||||
String found = execLexer("L.g4", grammar, "L", "00\n", false);
|
||||
assertEquals("I\n" +
|
||||
"[@0,0:1='00',<1>,1:0]\n" +
|
||||
assertEquals("I\n" +
|
||||
"[@0,0:1='00',<1>,1:0]\n" +
|
||||
"[@1,3:2='<EOF>',<-1>,2:0]\n", found);
|
||||
assertNull(this.stderrDuringParse);
|
||||
}
|
||||
|
@ -520,8 +520,8 @@ public class TestLexerExec extends BaseTest {
|
|||
sb.append("WS : [ \\u]+ -> skip ;\n");
|
||||
String grammar = sb.toString();
|
||||
String found = execLexer("L.g4", grammar, "L", "34 ", false);
|
||||
assertEquals("I\n" +
|
||||
"[@0,0:1='34',<1>,1:0]\n" +
|
||||
assertEquals("I\n" +
|
||||
"[@0,0:1='34',<1>,1:0]\n" +
|
||||
"[@1,3:2='<EOF>',<-1>,1:3]\n", found);
|
||||
assertNull(this.stderrDuringParse);
|
||||
}
|
||||
|
@ -535,10 +535,10 @@ public class TestLexerExec extends BaseTest {
|
|||
sb.append("WS : [ \\u]+ -> skip ;\n");
|
||||
String grammar = sb.toString();
|
||||
String found = execLexer("L.g4", grammar, "L", "- ] ", false);
|
||||
assertEquals("DASHBRACK\n" +
|
||||
"DASHBRACK\n" +
|
||||
"[@0,0:0='-',<1>,1:0]\n" +
|
||||
"[@1,2:2=']',<1>,1:2]\n" +
|
||||
assertEquals("DASHBRACK\n" +
|
||||
"DASHBRACK\n" +
|
||||
"[@0,0:0='-',<1>,1:0]\n" +
|
||||
"[@1,2:2=']',<1>,1:2]\n" +
|
||||
"[@2,4:3='<EOF>',<-1>,1:4]\n", found);
|
||||
assertNull(this.stderrDuringParse);
|
||||
}
|
||||
|
@ -552,8 +552,8 @@ public class TestLexerExec extends BaseTest {
|
|||
sb.append("WS : [ \\u]+ -> skip ;\n");
|
||||
String grammar = sb.toString();
|
||||
String found = execLexer("L.g4", grammar, "L", "9", false);
|
||||
assertEquals("A\n" +
|
||||
"[@0,0:0='9',<1>,1:0]\n" +
|
||||
assertEquals("A\n" +
|
||||
"[@0,0:0='9',<1>,1:0]\n" +
|
||||
"[@1,1:0='<EOF>',<-1>,1:1]\n", found);
|
||||
assertNull(this.stderrDuringParse);
|
||||
}
|
||||
|
@ -567,8 +567,8 @@ public class TestLexerExec extends BaseTest {
|
|||
sb.append("WS : [ \\n\\t]+ -> skip ;\n");
|
||||
String grammar = sb.toString();
|
||||
String found = execLexer("L.g4", grammar, "L", "b\"a", false);
|
||||
assertEquals("A\n" +
|
||||
"[@0,0:2='b\"a',<1>,1:0]\n" +
|
||||
assertEquals("A\n" +
|
||||
"[@0,0:2='b\"a',<1>,1:0]\n" +
|
||||
"[@1,3:2='<EOF>',<-1>,1:3]\n", found);
|
||||
assertNull(this.stderrDuringParse);
|
||||
}
|
||||
|
@ -582,8 +582,8 @@ public class TestLexerExec extends BaseTest {
|
|||
sb.append("WS : [ \\n\\t]+ -> skip ;\n");
|
||||
String grammar = sb.toString();
|
||||
String found = execLexer("L.g4", grammar, "L", "b\"\\a", false);
|
||||
assertEquals("A\n" +
|
||||
"[@0,0:3='b\"\\a',<1>,1:0]\n" +
|
||||
assertEquals("A\n" +
|
||||
"[@0,0:3='b\"\\a',<1>,1:0]\n" +
|
||||
"[@1,4:3='<EOF>',<-1>,1:4]\n", found);
|
||||
assertNull(this.stderrDuringParse);
|
||||
}
|
||||
|
@ -702,15 +702,15 @@ public class TestLexerExec extends BaseTest {
|
|||
sb.append(" ;\n");
|
||||
String grammar = sb.toString();
|
||||
String found = execLexer("PositionAdjustingLexer.g4", grammar, "PositionAdjustingLexer", "tokens\ntokens {\nnotLabel\nlabel1 =\nlabel2 +=\nnotLabel\n", false);
|
||||
assertEquals("[@0,0:5='tokens',<6>,1:0]\n" +
|
||||
"[@1,7:12='tokens',<4>,2:0]\n" +
|
||||
"[@2,14:14='{',<3>,2:7]\n" +
|
||||
"[@3,16:23='notLabel',<6>,3:0]\n" +
|
||||
"[@4,25:30='label1',<5>,4:0]\n" +
|
||||
"[@5,32:32='=',<1>,4:7]\n" +
|
||||
"[@6,34:39='label2',<5>,5:0]\n" +
|
||||
"[@7,41:42='+=',<2>,5:7]\n" +
|
||||
"[@8,44:51='notLabel',<6>,6:0]\n" +
|
||||
assertEquals("[@0,0:5='tokens',<6>,1:0]\n" +
|
||||
"[@1,7:12='tokens',<4>,2:0]\n" +
|
||||
"[@2,14:14='{',<3>,2:7]\n" +
|
||||
"[@3,16:23='notLabel',<6>,3:0]\n" +
|
||||
"[@4,25:30='label1',<5>,4:0]\n" +
|
||||
"[@5,32:32='=',<1>,4:7]\n" +
|
||||
"[@6,34:39='label2',<5>,5:0]\n" +
|
||||
"[@7,41:42='+=',<2>,5:7]\n" +
|
||||
"[@8,44:51='notLabel',<6>,6:0]\n" +
|
||||
"[@9,53:52='<EOF>',<-1>,7:0]\n", found);
|
||||
assertNull(this.stderrDuringParse);
|
||||
}
|
||||
|
@ -4723,7 +4723,7 @@ public class TestLexerExec extends BaseTest {
|
|||
sb.append("KW3999 : 'KW' '3999';\n");
|
||||
String grammar = sb.toString();
|
||||
String found = execLexer("L.g4", grammar, "L", "KW400", false);
|
||||
assertEquals("[@0,0:4='KW400',<402>,1:0]\n" +
|
||||
assertEquals("[@0,0:4='KW400',<402>,1:0]\n" +
|
||||
"[@1,5:4='<EOF>',<-1>,1:5]\n", found);
|
||||
assertNull(this.stderrDuringParse);
|
||||
}
|
||||
|
@ -4743,10 +4743,10 @@ public class TestLexerExec extends BaseTest {
|
|||
sb.append(" EndString : '\\'' -> popMode;\n");
|
||||
String grammar = sb.toString();
|
||||
String found = execLexer("L.g4", grammar, "L", "'xxx'", false);
|
||||
assertEquals("[@0,0:4=''xxx'',<1>,1:0]\n" +
|
||||
assertEquals("[@0,0:4=''xxx'',<1>,1:0]\n" +
|
||||
"[@1,5:4='<EOF>',<-1>,1:5]\n", found);
|
||||
assertNull(this.stderrDuringParse);
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,7 +2,7 @@ package org.antlr.v4.test.rt.csharp;
|
|||
|
||||
import org.junit.Test;
|
||||
|
||||
public class TestListeners extends BaseTest {
|
||||
public class TestListeners extends org.antlr.v4.test.runtime.csharp.BaseTest {
|
||||
|
||||
/* this file and method are generated, any edit will be overwritten by the next generation */
|
||||
@Test
|
||||
|
@ -233,4 +233,4 @@ public class TestListeners extends BaseTest {
|
|||
}
|
||||
|
||||
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,7 +2,7 @@ package org.antlr.v4.test.rt.csharp;
|
|||
|
||||
import org.junit.Test;
|
||||
|
||||
public class TestParseTrees extends BaseTest {
|
||||
public class TestParseTrees extends org.antlr.v4.test.runtime.csharp.BaseTest {
|
||||
|
||||
/* this file and method are generated, any edit will be overwritten by the next generation */
|
||||
@Test
|
||||
|
@ -167,4 +167,4 @@ public class TestParseTrees extends BaseTest {
|
|||
}
|
||||
|
||||
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,7 +2,7 @@ package org.antlr.v4.test.rt.csharp;
|
|||
|
||||
import org.junit.Test;
|
||||
|
||||
public class TestParserErrors extends BaseTest {
|
||||
public class TestParserErrors extends org.antlr.v4.test.runtime.csharp.BaseTest {
|
||||
|
||||
/* this file and method are generated, any edit will be overwritten by the next generation */
|
||||
@Test
|
||||
|
@ -347,4 +347,4 @@ public class TestParserErrors extends BaseTest {
|
|||
}
|
||||
|
||||
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,7 +2,7 @@ package org.antlr.v4.test.rt.csharp;
|
|||
|
||||
import org.junit.Test;
|
||||
|
||||
public class TestParserExec extends BaseTest {
|
||||
public class TestParserExec extends org.antlr.v4.test.runtime.csharp.BaseTest {
|
||||
|
||||
/* this file and method are generated, any edit will be overwritten by the next generation */
|
||||
@Test
|
||||
|
@ -471,4 +471,4 @@ public class TestParserExec extends BaseTest {
|
|||
}
|
||||
|
||||
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,7 +2,7 @@ package org.antlr.v4.test.rt.csharp;
|
|||
|
||||
import org.junit.Test;
|
||||
|
||||
public class TestSemPredEvalLexer extends BaseTest {
|
||||
public class TestSemPredEvalLexer extends org.antlr.v4.test.runtime.csharp.BaseTest {
|
||||
|
||||
/* this file and method are generated, any edit will be overwritten by the next generation */
|
||||
@Test
|
||||
|
@ -15,15 +15,15 @@ public class TestSemPredEvalLexer extends BaseTest {
|
|||
sb.append("WS : (' '|'\\n') -> skip;\n");
|
||||
String grammar = sb.toString();
|
||||
String found = execLexer("L.g4", grammar, "L", "enum abc", true);
|
||||
assertEquals("[@0,0:3='enum',<2>,1:0]\n" +
|
||||
"[@1,5:7='abc',<3>,1:5]\n" +
|
||||
"[@2,8:7='<EOF>',<-1>,1:8]\n" +
|
||||
"s0-' '->:s5=>4\n" +
|
||||
"s0-'a'->:s6=>3\n" +
|
||||
"s0-'e'->:s1=>3\n" +
|
||||
":s1=>3-'n'->:s2=>3\n" +
|
||||
":s2=>3-'u'->:s3=>3\n" +
|
||||
":s6=>3-'b'->:s6=>3\n" +
|
||||
assertEquals("[@0,0:3='enum',<2>,1:0]\n" +
|
||||
"[@1,5:7='abc',<3>,1:5]\n" +
|
||||
"[@2,8:7='<EOF>',<-1>,1:8]\n" +
|
||||
"s0-' '->:s5=>4\n" +
|
||||
"s0-'a'->:s6=>3\n" +
|
||||
"s0-'e'->:s1=>3\n" +
|
||||
":s1=>3-'n'->:s2=>3\n" +
|
||||
":s2=>3-'u'->:s3=>3\n" +
|
||||
":s6=>3-'b'->:s6=>3\n" +
|
||||
":s6=>3-'c'->:s6=>3\n", found);
|
||||
assertNull(this.stderrDuringParse);
|
||||
}
|
||||
|
@ -38,16 +38,16 @@ public class TestSemPredEvalLexer extends BaseTest {
|
|||
sb.append("WS : (' '|'\\n') -> skip;\n");
|
||||
String grammar = sb.toString();
|
||||
String found = execLexer("L.g4", grammar, "L", "enum abc enum", true);
|
||||
assertEquals("[@0,0:3='enum',<2>,1:0]\n" +
|
||||
"[@1,5:7='abc',<2>,1:5]\n" +
|
||||
"[@2,9:12='enum',<2>,1:9]\n" +
|
||||
"[@3,13:12='<EOF>',<-1>,1:13]\n" +
|
||||
"s0-' '->:s5=>3\n" +
|
||||
"s0-'a'->:s4=>2\n" +
|
||||
"s0-'e'->:s1=>2\n" +
|
||||
":s1=>2-'n'->:s2=>2\n" +
|
||||
":s2=>2-'u'->:s3=>2\n" +
|
||||
":s4=>2-'b'->:s4=>2\n" +
|
||||
assertEquals("[@0,0:3='enum',<2>,1:0]\n" +
|
||||
"[@1,5:7='abc',<2>,1:5]\n" +
|
||||
"[@2,9:12='enum',<2>,1:9]\n" +
|
||||
"[@3,13:12='<EOF>',<-1>,1:13]\n" +
|
||||
"s0-' '->:s5=>3\n" +
|
||||
"s0-'a'->:s4=>2\n" +
|
||||
"s0-'e'->:s1=>2\n" +
|
||||
":s1=>2-'n'->:s2=>2\n" +
|
||||
":s2=>2-'u'->:s3=>2\n" +
|
||||
":s4=>2-'b'->:s4=>2\n" +
|
||||
":s4=>2-'c'->:s4=>2\n", found);
|
||||
assertNull(this.stderrDuringParse);
|
||||
}
|
||||
|
@ -62,10 +62,10 @@ public class TestSemPredEvalLexer extends BaseTest {
|
|||
sb.append("WS : (' '|'\\n') -> skip;\n");
|
||||
String grammar = sb.toString();
|
||||
String found = execLexer("L.g4", grammar, "L", "enum abc enum", true);
|
||||
assertEquals("[@0,0:3='enum',<2>,1:0]\n" +
|
||||
"[@1,5:7='abc',<2>,1:5]\n" +
|
||||
"[@2,9:12='enum',<2>,1:9]\n" +
|
||||
"[@3,13:12='<EOF>',<-1>,1:13]\n" +
|
||||
assertEquals("[@0,0:3='enum',<2>,1:0]\n" +
|
||||
"[@1,5:7='abc',<2>,1:5]\n" +
|
||||
"[@2,9:12='enum',<2>,1:9]\n" +
|
||||
"[@3,13:12='<EOF>',<-1>,1:13]\n" +
|
||||
"s0-' '->:s2=>3\n", found);
|
||||
assertNull(this.stderrDuringParse);
|
||||
}
|
||||
|
@ -80,10 +80,10 @@ public class TestSemPredEvalLexer extends BaseTest {
|
|||
sb.append("WS : (' '|'\\n') -> skip;\n");
|
||||
String grammar = sb.toString();
|
||||
String found = execLexer("L.g4", grammar, "L", "enum abc enum", true);
|
||||
assertEquals("[@0,0:3='enum',<1>,1:0]\n" +
|
||||
"[@1,5:7='abc',<2>,1:5]\n" +
|
||||
"[@2,9:12='enum',<1>,1:9]\n" +
|
||||
"[@3,13:12='<EOF>',<-1>,1:13]\n" +
|
||||
assertEquals("[@0,0:3='enum',<1>,1:0]\n" +
|
||||
"[@1,5:7='abc',<2>,1:5]\n" +
|
||||
"[@2,9:12='enum',<1>,1:9]\n" +
|
||||
"[@3,13:12='<EOF>',<-1>,1:13]\n" +
|
||||
"s0-' '->:s3=>3\n", found);
|
||||
assertNull(this.stderrDuringParse);
|
||||
}
|
||||
|
@ -100,21 +100,21 @@ public class TestSemPredEvalLexer extends BaseTest {
|
|||
sb.append("WS : [ \\t]+ ;\n");
|
||||
String grammar = sb.toString();
|
||||
String found = execLexer("L.g4", grammar, "L", "abc\n def \n", true);
|
||||
assertEquals("INDENT\n" +
|
||||
"[@0,0:2='abc',<1>,1:0]\n" +
|
||||
"[@1,3:3='\\n',<3>,1:3]\n" +
|
||||
"[@2,4:5=' ',<2>,2:0]\n" +
|
||||
"[@3,6:8='def',<1>,2:2]\n" +
|
||||
"[@4,9:10=' ',<4>,2:5]\n" +
|
||||
"[@5,11:11='\\n',<3>,2:7]\n" +
|
||||
"[@6,12:11='<EOF>',<-1>,3:0]\n" +
|
||||
"s0-'\n" +
|
||||
"'->:s2=>3\n" +
|
||||
"s0-'a'->:s1=>1\n" +
|
||||
"s0-'d'->:s1=>1\n" +
|
||||
":s1=>1-'b'->:s1=>1\n" +
|
||||
":s1=>1-'c'->:s1=>1\n" +
|
||||
":s1=>1-'e'->:s1=>1\n" +
|
||||
assertEquals("INDENT\n" +
|
||||
"[@0,0:2='abc',<1>,1:0]\n" +
|
||||
"[@1,3:3='\\n',<3>,1:3]\n" +
|
||||
"[@2,4:5=' ',<2>,2:0]\n" +
|
||||
"[@3,6:8='def',<1>,2:2]\n" +
|
||||
"[@4,9:10=' ',<4>,2:5]\n" +
|
||||
"[@5,11:11='\\n',<3>,2:7]\n" +
|
||||
"[@6,12:11='<EOF>',<-1>,3:0]\n" +
|
||||
"s0-'\n" +
|
||||
"'->:s2=>3\n" +
|
||||
"s0-'a'->:s1=>1\n" +
|
||||
"s0-'d'->:s1=>1\n" +
|
||||
":s1=>1-'b'->:s1=>1\n" +
|
||||
":s1=>1-'c'->:s1=>1\n" +
|
||||
":s1=>1-'e'->:s1=>1\n" +
|
||||
":s1=>1-'f'->:s1=>1\n", found);
|
||||
assertNull(this.stderrDuringParse);
|
||||
}
|
||||
|
@ -131,14 +131,14 @@ public class TestSemPredEvalLexer extends BaseTest {
|
|||
sb.append("WS : (' '|'\\n') -> skip;\n");
|
||||
String grammar = sb.toString();
|
||||
String found = execLexer("L.g4", grammar, "L", "a cde\nabcde\n", true);
|
||||
assertEquals("a\n" +
|
||||
"cde\n" +
|
||||
"ab\n" +
|
||||
"cde\n" +
|
||||
"[@0,0:0='a',<1>,1:0]\n" +
|
||||
"[@1,2:4='cde',<2>,1:2]\n" +
|
||||
"[@2,6:7='ab',<1>,2:0]\n" +
|
||||
"[@3,8:10='cde',<2>,2:2]\n" +
|
||||
assertEquals("a\n" +
|
||||
"cde\n" +
|
||||
"ab\n" +
|
||||
"cde\n" +
|
||||
"[@0,0:0='a',<1>,1:0]\n" +
|
||||
"[@1,2:4='cde',<2>,1:2]\n" +
|
||||
"[@2,6:7='ab',<1>,2:0]\n" +
|
||||
"[@3,8:10='cde',<2>,2:2]\n" +
|
||||
"[@4,12:11='<EOF>',<-1>,3:0]\n", found);
|
||||
assertNull(this.stderrDuringParse);
|
||||
}
|
||||
|
@ -153,15 +153,15 @@ public class TestSemPredEvalLexer extends BaseTest {
|
|||
sb.append("WS : [ \\n] -> skip ;\n");
|
||||
String grammar = sb.toString();
|
||||
String found = execLexer("L.g4", grammar, "L", "enum enu a", false);
|
||||
assertEquals("enum!\n" +
|
||||
"ID enu\n" +
|
||||
"ID a\n" +
|
||||
"[@0,0:3='enum',<1>,1:0]\n" +
|
||||
"[@1,5:7='enu',<2>,1:5]\n" +
|
||||
"[@2,9:9='a',<2>,1:9]\n" +
|
||||
assertEquals("enum!\n" +
|
||||
"ID enu\n" +
|
||||
"ID a\n" +
|
||||
"[@0,0:3='enum',<1>,1:0]\n" +
|
||||
"[@1,5:7='enu',<2>,1:5]\n" +
|
||||
"[@2,9:9='a',<2>,1:9]\n" +
|
||||
"[@3,10:9='<EOF>',<-1>,1:10]\n", found);
|
||||
assertNull(this.stderrDuringParse);
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,7 +2,7 @@ package org.antlr.v4.test.rt.csharp;
|
|||
|
||||
import org.junit.Test;
|
||||
|
||||
public class TestSemPredEvalParser extends BaseTest {
|
||||
public class TestSemPredEvalParser extends org.antlr.v4.test.runtime.csharp.BaseTest {
|
||||
|
||||
/* this file and method are generated, any edit will be overwritten by the next generation */
|
||||
@Test
|
||||
|
@ -450,4 +450,4 @@ public class TestSemPredEvalParser extends BaseTest {
|
|||
}
|
||||
|
||||
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,7 +2,7 @@ package org.antlr.v4.test.rt.csharp;
|
|||
|
||||
import org.junit.Test;
|
||||
|
||||
public class TestSets extends BaseTest {
|
||||
public class TestSets extends org.antlr.v4.test.runtime.csharp.BaseTest {
|
||||
|
||||
/* this file and method are generated, any edit will be overwritten by the next generation */
|
||||
@Test
|
||||
|
@ -257,4 +257,4 @@ public class TestSets extends BaseTest {
|
|||
}
|
||||
|
||||
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,44 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?><Project xmlns="http://schemas.microsoft.com/developer/msbuild/2003" DefaultTargets="Build" ToolsVersion="4.0">
|
||||
<PropertyGroup>
|
||||
<Configuration Condition=" '$(Configuration)' == '' ">Debug</Configuration>
|
||||
<Platform Condition=" '$(Platform)' == '' ">AnyCPU</Platform>
|
||||
<ProjectGuid>{EDC70A11-C4C1-4209-93A6-CCE2B19E8E95}</ProjectGuid>
|
||||
<OutputType>Exe</OutputType>
|
||||
<RootNamespace>Antlr4.Test.mono</RootNamespace>
|
||||
<AssemblyName>Test</AssemblyName>
|
||||
<StartupObject>Test</StartupObject>
|
||||
<TargetFrameworkVersion>v2.0</TargetFrameworkVersion>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Debug|AnyCPU' ">
|
||||
<DebugSymbols>true</DebugSymbols>
|
||||
<DebugType>full</DebugType>
|
||||
<Optimize>false</Optimize>
|
||||
<OutputPath>bin\Debug</OutputPath>
|
||||
<DefineConstants>DEBUG;</DefineConstants>
|
||||
<ErrorReport>prompt</ErrorReport>
|
||||
<WarningLevel>4</WarningLevel>
|
||||
<Externalconsole>true</Externalconsole>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Release|AnyCPU' ">
|
||||
<Optimize>true</Optimize>
|
||||
<OutputPath>bin\Release</OutputPath>
|
||||
<ErrorReport>prompt</ErrorReport>
|
||||
<WarningLevel>4</WarningLevel>
|
||||
<Externalconsole>true</Externalconsole>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<Reference Include="System"/>
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<Compile Include="AssemblyInfo.cs"/>
|
||||
<Compile Include="Test.cs"/>
|
||||
<Compile Include="L.cs"/>
|
||||
</ItemGroup>
|
||||
<Import Project="$(MSBuildBinPath)\Microsoft.CSharp.targets"/>
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="Antlr4.Runtime.mono.csproj">
|
||||
<Project>{E1A46D9D-66CB-46E8-93B0-7FC87299ABEF}</Project>
|
||||
<Name>Antlr4.Runtime.mono</Name>
|
||||
</ProjectReference>
|
||||
</ItemGroup>
|
||||
</Project>
|
|
@ -0,0 +1,44 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?><Project xmlns="http://schemas.microsoft.com/developer/msbuild/2003" DefaultTargets="Build" ToolsVersion="4.0">
|
||||
<PropertyGroup>
|
||||
<Configuration Condition=" '$(Configuration)' == '' ">Debug</Configuration>
|
||||
<Platform Condition=" '$(Platform)' == '' ">AnyCPU</Platform>
|
||||
<ProjectGuid>{EDC70A11-C4C1-4209-93A6-CCE2B19E8E95}</ProjectGuid>
|
||||
<OutputType>Exe</OutputType>
|
||||
<RootNamespace>Antlr4.Test.mono</RootNamespace>
|
||||
<AssemblyName>Test</AssemblyName>
|
||||
<StartupObject>Test</StartupObject>
|
||||
<TargetFrameworkVersion>v2.0</TargetFrameworkVersion>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Debug|AnyCPU' ">
|
||||
<DebugSymbols>true</DebugSymbols>
|
||||
<DebugType>full</DebugType>
|
||||
<Optimize>false</Optimize>
|
||||
<OutputPath>bin\Debug</OutputPath>
|
||||
<DefineConstants>DEBUG;</DefineConstants>
|
||||
<ErrorReport>prompt</ErrorReport>
|
||||
<WarningLevel>4</WarningLevel>
|
||||
<Externalconsole>true</Externalconsole>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Release|AnyCPU' ">
|
||||
<Optimize>true</Optimize>
|
||||
<OutputPath>bin\Release</OutputPath>
|
||||
<ErrorReport>prompt</ErrorReport>
|
||||
<WarningLevel>4</WarningLevel>
|
||||
<Externalconsole>true</Externalconsole>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<Reference Include="System"/>
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<Compile Include="AssemblyInfo.cs"/>
|
||||
<Compile Include="Test.cs"/>
|
||||
<Compile Include="L.cs"/>
|
||||
</ItemGroup>
|
||||
<Import Project="$(MSBuildBinPath)\Microsoft.CSharp.targets"/>
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="Antlr4.Runtime.vs2013.csproj">
|
||||
<Project>{E1A46D9D-66CB-46E8-93B0-7FC87299ABEF}</Project>
|
||||
<Name>Antlr4.Runtime.vs2013</Name>
|
||||
</ProjectReference>
|
||||
</ItemGroup>
|
||||
</Project>
|
|
@ -0,0 +1,28 @@
|
|||
using System;
|
||||
using System.Reflection;
|
||||
using System.Runtime.CompilerServices;
|
||||
|
||||
// Information about this assembly is defined by the following attributes.
|
||||
// Change them to the values specific to your project.
|
||||
|
||||
[assembly: AssemblyTitle ("Antlr4.Test.mono")]
|
||||
[assembly: AssemblyDescription ("")]
|
||||
[assembly: AssemblyConfiguration ("")]
|
||||
[assembly: AssemblyCompany ("")]
|
||||
[assembly: AssemblyProduct ("")]
|
||||
[assembly: AssemblyCopyright ("ericvergnaud")]
|
||||
[assembly: AssemblyTrademark ("")]
|
||||
[assembly: AssemblyCulture ("")]
|
||||
[assembly: CLSCompliant (true)]
|
||||
// The assembly version has the format "{Major}.{Minor}.{Build}.{Revision}".
|
||||
// The form "{Major}.{Minor}.*" will automatically update the build and revision,
|
||||
// and "{Major}.{Minor}.{Build}.*" will update just the revision.
|
||||
|
||||
[assembly: AssemblyVersion ("1.0.*")]
|
||||
|
||||
// The following attributes are used to specify the signing key for the assembly,
|
||||
// if desired. See the Mono documentation for more information about signing.
|
||||
|
||||
//[assembly: AssemblyDelaySign(false)]
|
||||
//[assembly: AssemblyKeyFile("")]
|
||||
|
|
@ -0,0 +1,898 @@
|
|||
/*
|
||||
* [The "BSD license"]
|
||||
* Copyright (c) 2012 Terence Parr
|
||||
* Copyright (c) 2012 Sam Harwell
|
||||
* All rights reserved.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions
|
||||
* are met:
|
||||
*
|
||||
* 1. Redistributions of source code must retain the above copyright
|
||||
* notice, this list of conditions and the following disclaimer.
|
||||
* 2. Redistributions in binary form must reproduce the above copyright
|
||||
* notice, this list of conditions and the following disclaimer in the
|
||||
* documentation and/or other materials provided with the distribution.
|
||||
* 3. The name of the author may not be used to endorse or promote products
|
||||
* derived from this software without specific prior written permission.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
|
||||
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
|
||||
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
|
||||
* IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
|
||||
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
|
||||
* THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
package org.antlr.v4.test.runtime.csharp;
|
||||
|
||||
import org.antlr.v4.Tool;
|
||||
import org.antlr.v4.runtime.CommonTokenStream;
|
||||
import org.antlr.v4.runtime.Token;
|
||||
import org.antlr.v4.runtime.TokenSource;
|
||||
import org.antlr.v4.runtime.WritableToken;
|
||||
import org.antlr.v4.runtime.misc.Utils;
|
||||
import org.antlr.v4.test.tool.ErrorQueue;
|
||||
import org.antlr.v4.tool.ANTLRMessage;
|
||||
import org.antlr.v4.tool.DefaultToolListener;
|
||||
import org.antlr.v4.tool.GrammarSemanticsMessage;
|
||||
import org.junit.Before;
|
||||
import org.junit.rules.TestRule;
|
||||
import org.junit.rules.TestWatcher;
|
||||
import org.junit.runner.Description;
|
||||
import org.stringtemplate.v4.ST;
|
||||
import org.w3c.dom.Document;
|
||||
import org.w3c.dom.Element;
|
||||
|
||||
import javax.xml.parsers.DocumentBuilderFactory;
|
||||
import javax.xml.transform.OutputKeys;
|
||||
import javax.xml.transform.Transformer;
|
||||
import javax.xml.transform.TransformerFactory;
|
||||
import javax.xml.transform.dom.DOMSource;
|
||||
import javax.xml.transform.stream.StreamResult;
|
||||
import javax.xml.xpath.XPathConstants;
|
||||
import javax.xml.xpath.XPathExpression;
|
||||
import javax.xml.xpath.XPathFactory;
|
||||
import java.io.BufferedReader;
|
||||
import java.io.File;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.InputStreamReader;
|
||||
import java.io.OutputStream;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import static org.junit.Assert.assertFalse;
|
||||
import static org.junit.Assert.assertNotNull;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
public abstract class BaseTest {
|
||||
public static final String newline = System.getProperty("line.separator");
|
||||
public static final String pathSep = System.getProperty("path.separator");
|
||||
|
||||
/**
|
||||
* When the {@code antlr.preserve-test-dir} runtime property is set to
|
||||
* {@code true}, the temporary directories created by the test run will not
|
||||
* be removed at the end of the test run, even for tests that completed
|
||||
* successfully.
|
||||
*
|
||||
* <p>
|
||||
* The default behavior (used in all other cases) is removing the temporary
|
||||
* directories for all tests which completed successfully, and preserving
|
||||
* the directories for tests which failed.</p>
|
||||
*/
|
||||
public static final boolean PRESERVE_TEST_DIR = Boolean.parseBoolean(System.getProperty("antlr-preserve-csharp-test-dir"));
|
||||
|
||||
/**
|
||||
* The base test directory is the directory where generated files get placed
|
||||
* during unit test execution.
|
||||
*
|
||||
* <p>
|
||||
* The default value for this property is the {@code java.io.tmpdir} system
|
||||
* property, and can be overridden by setting the
|
||||
* {@code antlr.java-test-dir} property to a custom location. Note that the
|
||||
* {@code antlr.java-test-dir} property directly affects the
|
||||
* {@link #CREATE_PER_TEST_DIRECTORIES} value as well.</p>
|
||||
*/
|
||||
public static final String BASE_TEST_DIR;
|
||||
|
||||
/**
|
||||
* When {@code true}, a temporary directory will be created for each test
|
||||
* executed during the test run.
|
||||
*
|
||||
* <p>
|
||||
* This value is {@code true} when the {@code antlr.java-test-dir} system
|
||||
* property is set, and otherwise {@code false}.</p>
|
||||
*/
|
||||
public static final boolean CREATE_PER_TEST_DIRECTORIES;
|
||||
|
||||
static {
|
||||
String baseTestDir = System.getProperty("antlr-csharp-test-dir");
|
||||
boolean perTestDirectories = false;
|
||||
if (baseTestDir == null || baseTestDir.isEmpty()) {
|
||||
baseTestDir = System.getProperty("java.io.tmpdir");
|
||||
perTestDirectories = true;
|
||||
}
|
||||
|
||||
if (!new File(baseTestDir).isDirectory()) {
|
||||
throw new UnsupportedOperationException("The specified base test directory does not exist: " + baseTestDir);
|
||||
}
|
||||
|
||||
BASE_TEST_DIR = baseTestDir;
|
||||
CREATE_PER_TEST_DIRECTORIES = perTestDirectories;
|
||||
}
|
||||
|
||||
public String tmpdir = null;
|
||||
|
||||
/** If error during parser execution, store stderr here; can't return
|
||||
* stdout and stderr. This doesn't trap errors from running antlr.
|
||||
*/
|
||||
protected String stderrDuringParse;
|
||||
|
||||
@org.junit.Rule
|
||||
public final TestRule testWatcher = new TestWatcher() {
|
||||
|
||||
@Override
|
||||
protected void succeeded(Description description) {
|
||||
// remove tmpdir if no error.
|
||||
if (!PRESERVE_TEST_DIR) {
|
||||
eraseTempDir();
|
||||
}
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
@Before
|
||||
public void setUp() throws Exception {
|
||||
if (CREATE_PER_TEST_DIRECTORIES) {
|
||||
// new output dir for each test
|
||||
String testDirectory = getClass().getSimpleName() + "-" + System.currentTimeMillis();
|
||||
tmpdir = new File(BASE_TEST_DIR, testDirectory).getAbsolutePath();
|
||||
}
|
||||
else {
|
||||
tmpdir = new File(BASE_TEST_DIR).getAbsolutePath();
|
||||
if (!PRESERVE_TEST_DIR && new File(tmpdir).exists()) {
|
||||
eraseFiles();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected org.antlr.v4.Tool newTool(String[] args) {
|
||||
Tool tool = new Tool(args);
|
||||
return tool;
|
||||
}
|
||||
|
||||
protected Tool newTool() {
|
||||
org.antlr.v4.Tool tool = new Tool(new String[] {"-o", tmpdir});
|
||||
return tool;
|
||||
}
|
||||
|
||||
protected String load(String fileName, String encoding)
|
||||
throws IOException
|
||||
{
|
||||
if ( fileName==null ) {
|
||||
return null;
|
||||
}
|
||||
|
||||
String fullFileName = getClass().getPackage().getName().replace('.', '/') + '/' + fileName;
|
||||
int size = 65000;
|
||||
InputStreamReader isr;
|
||||
InputStream fis = getClass().getClassLoader().getResourceAsStream(fullFileName);
|
||||
if ( encoding!=null ) {
|
||||
isr = new InputStreamReader(fis, encoding);
|
||||
}
|
||||
else {
|
||||
isr = new InputStreamReader(fis);
|
||||
}
|
||||
try {
|
||||
char[] data = new char[size];
|
||||
int n = isr.read(data);
|
||||
return new String(data, 0, n);
|
||||
}
|
||||
finally {
|
||||
isr.close();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
protected ErrorQueue antlr(String grammarFileName, boolean defaultListener, String... extraOptions) {
|
||||
final List<String> options = new ArrayList<String>();
|
||||
Collections.addAll(options, extraOptions);
|
||||
options.add("-Dlanguage=CSharp");
|
||||
if ( !options.contains("-o") ) {
|
||||
options.add("-o");
|
||||
options.add(tmpdir);
|
||||
}
|
||||
if ( !options.contains("-lib") ) {
|
||||
options.add("-lib");
|
||||
options.add(tmpdir);
|
||||
}
|
||||
if ( !options.contains("-encoding") ) {
|
||||
options.add("-encoding");
|
||||
options.add("UTF-8");
|
||||
}
|
||||
options.add(new File(tmpdir,grammarFileName).toString());
|
||||
|
||||
final String[] optionsA = new String[options.size()];
|
||||
options.toArray(optionsA);
|
||||
Tool antlr = newTool(optionsA);
|
||||
ErrorQueue equeue = new ErrorQueue(antlr);
|
||||
antlr.addListener(equeue);
|
||||
if (defaultListener) {
|
||||
antlr.addListener(new DefaultToolListener(antlr));
|
||||
}
|
||||
antlr.processGrammarsOnCommandLine();
|
||||
|
||||
if ( !defaultListener && !equeue.errors.isEmpty() ) {
|
||||
System.err.println("antlr reports errors from "+options);
|
||||
for (int i = 0; i < equeue.errors.size(); i++) {
|
||||
ANTLRMessage msg = equeue.errors.get(i);
|
||||
System.err.println(msg);
|
||||
}
|
||||
System.out.println("!!!\ngrammar:");
|
||||
try {
|
||||
System.out.println(new String(Utils.readFile(tmpdir+"/"+grammarFileName)));
|
||||
}
|
||||
catch (IOException ioe) {
|
||||
System.err.println(ioe.toString());
|
||||
}
|
||||
System.out.println("###");
|
||||
}
|
||||
if ( !defaultListener && !equeue.warnings.isEmpty() ) {
|
||||
System.err.println("antlr reports warnings from "+options);
|
||||
for (int i = 0; i < equeue.warnings.size(); i++) {
|
||||
ANTLRMessage msg = equeue.warnings.get(i);
|
||||
System.err.println(msg);
|
||||
}
|
||||
}
|
||||
|
||||
return equeue;
|
||||
}
|
||||
|
||||
protected ErrorQueue antlr(String grammarFileName, String grammarStr, boolean defaultListener, String... extraOptions) {
|
||||
System.out.println("dir "+tmpdir);
|
||||
mkdir(tmpdir);
|
||||
writeFile(tmpdir, grammarFileName, grammarStr);
|
||||
return antlr(grammarFileName, defaultListener, extraOptions);
|
||||
}
|
||||
|
||||
protected String execLexer(String grammarFileName,
|
||||
String grammarStr,
|
||||
String lexerName,
|
||||
String input)
|
||||
{
|
||||
return execLexer(grammarFileName, grammarStr, lexerName, input, false);
|
||||
}
|
||||
|
||||
protected String execLexer(String grammarFileName,
|
||||
String grammarStr,
|
||||
String lexerName,
|
||||
String input,
|
||||
boolean showDFA)
|
||||
{
|
||||
boolean success = rawGenerateRecognizer(grammarFileName,
|
||||
grammarStr,
|
||||
null,
|
||||
lexerName);
|
||||
assertTrue(success);
|
||||
writeFile(tmpdir, "input", input);
|
||||
writeLexerTestFile(lexerName, showDFA);
|
||||
addSourceFiles("Test.cs");
|
||||
compile();
|
||||
String output = execTest();
|
||||
if ( stderrDuringParse!=null && stderrDuringParse.length()>0 ) {
|
||||
System.err.println(stderrDuringParse);
|
||||
}
|
||||
return output;
|
||||
}
|
||||
|
||||
Set<String> sourceFiles = new HashSet<String>();
|
||||
|
||||
private void addSourceFiles(String ... files) {
|
||||
for(String file : files)
|
||||
this.sourceFiles.add(file);
|
||||
}
|
||||
|
||||
protected String execParser(String grammarFileName,
|
||||
String grammarStr,
|
||||
String parserName,
|
||||
String lexerName,
|
||||
String startRuleName,
|
||||
String input, boolean debug)
|
||||
{
|
||||
boolean success = rawGenerateRecognizer(grammarFileName,
|
||||
grammarStr,
|
||||
parserName,
|
||||
lexerName,
|
||||
"-visitor");
|
||||
assertTrue(success);
|
||||
writeFile(tmpdir, "input", input);
|
||||
return rawExecRecognizer(parserName,
|
||||
lexerName,
|
||||
startRuleName,
|
||||
debug);
|
||||
}
|
||||
|
||||
/** Return true if all is well */
|
||||
protected boolean rawGenerateRecognizer(String grammarFileName,
|
||||
String grammarStr,
|
||||
String parserName,
|
||||
String lexerName,
|
||||
String... extraOptions)
|
||||
{
|
||||
return rawGenerateRecognizer(grammarFileName, grammarStr, parserName, lexerName, false, extraOptions);
|
||||
}
|
||||
|
||||
/** Return true if all is well */
|
||||
protected boolean rawGenerateRecognizer(String grammarFileName,
|
||||
String grammarStr,
|
||||
String parserName,
|
||||
String lexerName,
|
||||
boolean defaultListener,
|
||||
String... extraOptions)
|
||||
{
|
||||
ErrorQueue equeue = antlr(grammarFileName, grammarStr, defaultListener, extraOptions);
|
||||
if (!equeue.errors.isEmpty()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
List<String> files = new ArrayList<String>();
|
||||
if ( lexerName!=null ) {
|
||||
files.add(lexerName+".cs");
|
||||
}
|
||||
if ( parserName!=null ) {
|
||||
files.add(parserName+".cs");
|
||||
Set<String> optionsSet = new HashSet<String>(Arrays.asList(extraOptions));
|
||||
String grammarName = grammarFileName.substring(0, grammarFileName.lastIndexOf('.'));
|
||||
if (!optionsSet.contains("-no-listener")) {
|
||||
files.add(grammarName+"Listener.cs");
|
||||
files.add(grammarName+"BaseListener.cs");
|
||||
}
|
||||
if (optionsSet.contains("-visitor")) {
|
||||
files.add(grammarName+"Visitor.cs");
|
||||
files.add(grammarName+"BaseVisitor.cs");
|
||||
}
|
||||
}
|
||||
addSourceFiles(files.toArray(new String[files.size()]));
|
||||
return true;
|
||||
}
|
||||
|
||||
protected String rawExecRecognizer(String parserName,
|
||||
String lexerName,
|
||||
String parserStartRuleName,
|
||||
boolean debug)
|
||||
{
|
||||
this.stderrDuringParse = null;
|
||||
if ( parserName==null ) {
|
||||
writeLexerTestFile(lexerName, false);
|
||||
}
|
||||
else {
|
||||
writeParserTestFile(parserName,
|
||||
lexerName,
|
||||
parserStartRuleName,
|
||||
debug);
|
||||
}
|
||||
|
||||
addSourceFiles("Test.cs");
|
||||
return execRecognizer();
|
||||
}
|
||||
|
||||
public String execRecognizer() {
|
||||
compile();
|
||||
return execTest();
|
||||
}
|
||||
|
||||
public boolean compile() {
|
||||
try {
|
||||
if(!createProject())
|
||||
return false;
|
||||
if(!buildProject())
|
||||
return false;
|
||||
return true;
|
||||
} catch(Exception e) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
private File getTestProjectFile() {
|
||||
return new File(tmpdir, "Antlr4.Test.mono.csproj");
|
||||
}
|
||||
|
||||
private boolean buildProject() throws Exception {
|
||||
String msbuild = locateMSBuild();
|
||||
String[] args = {
|
||||
msbuild,
|
||||
"/p:Configuration=Release",
|
||||
getTestProjectFile().getAbsolutePath()
|
||||
};
|
||||
System.err.println("Starting build "+Utils.join(args, " "));
|
||||
Process process =
|
||||
Runtime.getRuntime().exec(args, null, new File(tmpdir));
|
||||
StreamVacuum stdoutVacuum = new StreamVacuum(process.getInputStream());
|
||||
StreamVacuum stderrVacuum = new StreamVacuum(process.getErrorStream());
|
||||
stdoutVacuum.start();
|
||||
stderrVacuum.start();
|
||||
process.waitFor();
|
||||
stdoutVacuum.join();
|
||||
stderrVacuum.join();
|
||||
if ( stderrVacuum.toString().length()>0 ) {
|
||||
this.stderrDuringParse = stderrVacuum.toString();
|
||||
System.err.println("buildProject stderrVacuum: "+ stderrVacuum);
|
||||
}
|
||||
return process.exitValue()==0;
|
||||
}
|
||||
|
||||
private String locateMSBuild() {
|
||||
if(isWindows())
|
||||
return "\"C:\\Program Files (x86)\\MSBuild\\12.0\\Bin\\MSBuild.exe\"";
|
||||
else
|
||||
return locateTool("xbuild");
|
||||
}
|
||||
|
||||
private boolean isWindows() {
|
||||
return System.getProperty("os.name").toLowerCase().contains("windows");
|
||||
}
|
||||
|
||||
private String locateExec() {
|
||||
return new File(tmpdir, "bin/Release/Test.exe").getAbsolutePath();
|
||||
}
|
||||
|
||||
private String locateTool(String tool) {
|
||||
String[] roots = { "/usr/bin/", "/usr/local/bin/" };
|
||||
for(String root : roots) {
|
||||
if(new File(root + tool).exists())
|
||||
return root + tool;
|
||||
}
|
||||
throw new RuntimeException("Could not locate " + tool);
|
||||
}
|
||||
|
||||
public boolean createProject() {
|
||||
try {
|
||||
String pack = this.getClass().getPackage().getName().replace(".", "/") + "/";
|
||||
System.out.println("create project "+pack);
|
||||
// save AssemblyInfo
|
||||
InputStream input = Thread.currentThread().getContextClassLoader().getResourceAsStream(pack + "AssemblyInfo.cs");
|
||||
if ( input==null ) {
|
||||
System.err.println("Can't find " + pack + "AssemblyInfo.cs as resource");
|
||||
return false;
|
||||
}
|
||||
OutputStream output = new FileOutputStream(new File(tmpdir, "AssemblyInfo.cs").getAbsolutePath());
|
||||
while(input.available()>0) {
|
||||
output.write(input.read());
|
||||
}
|
||||
output.close();
|
||||
input.close();
|
||||
// update project
|
||||
String projectName = isWindows() ? "Antlr4.Test.vs2013.csproj" : "Antlr4.Test.mono.csproj";
|
||||
input = Thread.currentThread().getContextClassLoader().getResourceAsStream(pack + projectName);
|
||||
Document prjXml = DocumentBuilderFactory.newInstance().newDocumentBuilder().parse(input);
|
||||
// update runtime project reference
|
||||
String runtimePath = System.getProperty("antlr-csharp-runtime-project");
|
||||
String runtimeName = isWindows() ? "Antlr4.Runtime.vs2013.csproj" : "Antlr4.Runtime.mono.csproj";
|
||||
if(runtimePath==null)
|
||||
runtimePath = "../../antlr4-csharp/runtime/CSharp/Antlr4.Runtime/" + runtimeName;
|
||||
File projFile = new File(runtimePath);
|
||||
if(!projFile.exists())
|
||||
throw new RuntimeException("C# runtime project file not found at:" + projFile.getAbsolutePath());
|
||||
runtimePath = projFile.getAbsolutePath();
|
||||
XPathExpression exp = XPathFactory.newInstance().newXPath().compile("/Project/ItemGroup/ProjectReference[@Include='" + runtimeName + "']");
|
||||
Element node = (Element)exp.evaluate(prjXml, XPathConstants.NODE);
|
||||
node.setAttribute("Include", runtimePath.replace("/", "\\"));
|
||||
// update project file list
|
||||
exp = XPathFactory.newInstance().newXPath().compile("/Project/ItemGroup[Compile/@Include='AssemblyInfo.cs']");
|
||||
Element group = (Element)exp.evaluate(prjXml, XPathConstants.NODE);
|
||||
if(group==null)
|
||||
return false;
|
||||
// remove existing children
|
||||
while(group.hasChildNodes())
|
||||
group.removeChild(group.getFirstChild());
|
||||
// add AssemblyInfo.cs, not a generated source
|
||||
sourceFiles.add("AssemblyInfo.cs");
|
||||
// add files to compile
|
||||
for(String file : sourceFiles) {
|
||||
Element elem = group.getOwnerDocument().createElement("Compile");
|
||||
elem.setAttribute("Include", file);
|
||||
group.appendChild(elem);
|
||||
}
|
||||
// save project
|
||||
File prjFile = getTestProjectFile();
|
||||
Transformer transformer = TransformerFactory.newInstance().newTransformer();
|
||||
transformer.setOutputProperty(OutputKeys.INDENT, "yes");
|
||||
transformer.transform(new DOMSource(prjXml), new StreamResult(prjFile));
|
||||
return true;
|
||||
} catch(Exception e) {
|
||||
e.printStackTrace(System.err);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
public String execTest() {
|
||||
try {
|
||||
String exec = locateExec();
|
||||
String[] args = isWindows() ?
|
||||
new String[] { exec, new File(tmpdir, "input").getAbsolutePath() } :
|
||||
new String[] { "mono", "--runtime=v4.0.30319", exec, new File(tmpdir, "input").getAbsolutePath() };
|
||||
ProcessBuilder pb = new ProcessBuilder(args);
|
||||
pb.directory(new File(tmpdir));
|
||||
Process p = pb.start();
|
||||
StreamVacuum stdoutVacuum = new StreamVacuum(p.getInputStream());
|
||||
StreamVacuum stderrVacuum = new StreamVacuum(p.getErrorStream());
|
||||
stdoutVacuum.start();
|
||||
stderrVacuum.start();
|
||||
p.waitFor();
|
||||
stdoutVacuum.join();
|
||||
stderrVacuum.join();
|
||||
String output = stdoutVacuum.toString();
|
||||
if ( stderrVacuum.toString().length()>0 ) {
|
||||
this.stderrDuringParse = stderrVacuum.toString();
|
||||
System.err.println("exec stderrVacuum: "+ stderrVacuum);
|
||||
}
|
||||
return output;
|
||||
}
|
||||
catch (Exception e) {
|
||||
System.err.println("can't exec recognizer");
|
||||
e.printStackTrace(System.err);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
public void testErrors(String[] pairs, boolean printTree) {
|
||||
for (int i = 0; i < pairs.length; i+=2) {
|
||||
String input = pairs[i];
|
||||
String expect = pairs[i+1];
|
||||
|
||||
String[] lines = input.split("\n");
|
||||
String fileName = getFilenameFromFirstLineOfGrammar(lines[0]);
|
||||
ErrorQueue equeue = antlr(fileName, input, false);
|
||||
|
||||
String actual = equeue.toString(true);
|
||||
actual = actual.replace(tmpdir + File.separator, "");
|
||||
System.err.println(actual);
|
||||
String msg = input;
|
||||
msg = msg.replace("\n","\\n");
|
||||
msg = msg.replace("\r","\\r");
|
||||
msg = msg.replace("\t","\\t");
|
||||
|
||||
org.junit.Assert.assertEquals("error in: "+msg,expect,actual);
|
||||
}
|
||||
}
|
||||
|
||||
public String getFilenameFromFirstLineOfGrammar(String line) {
|
||||
String fileName = "A" + Tool.GRAMMAR_EXTENSION;
|
||||
int grIndex = line.lastIndexOf("grammar");
|
||||
int semi = line.lastIndexOf(';');
|
||||
if ( grIndex>=0 && semi>=0 ) {
|
||||
int space = line.indexOf(' ', grIndex);
|
||||
fileName = line.substring(space+1, semi)+Tool.GRAMMAR_EXTENSION;
|
||||
}
|
||||
if ( fileName.length()==Tool.GRAMMAR_EXTENSION.length() ) fileName = "A" + Tool.GRAMMAR_EXTENSION;
|
||||
return fileName;
|
||||
}
|
||||
|
||||
|
||||
List<ANTLRMessage> getMessagesOfType(List<ANTLRMessage> msgs, Class<? extends ANTLRMessage> c) {
|
||||
List<ANTLRMessage> filtered = new ArrayList<ANTLRMessage>();
|
||||
for (ANTLRMessage m : msgs) {
|
||||
if ( m.getClass() == c ) filtered.add(m);
|
||||
}
|
||||
return filtered;
|
||||
}
|
||||
|
||||
|
||||
public static class StreamVacuum implements Runnable {
|
||||
StringBuilder buf = new StringBuilder();
|
||||
BufferedReader in;
|
||||
Thread sucker;
|
||||
public StreamVacuum(InputStream in) {
|
||||
this.in = new BufferedReader( new InputStreamReader(in) );
|
||||
}
|
||||
public void start() {
|
||||
sucker = new Thread(this);
|
||||
sucker.start();
|
||||
}
|
||||
@Override
|
||||
public void run() {
|
||||
try {
|
||||
String line = in.readLine();
|
||||
while (line!=null) {
|
||||
buf.append(line);
|
||||
buf.append('\n');
|
||||
line = in.readLine();
|
||||
}
|
||||
}
|
||||
catch (IOException ioe) {
|
||||
System.err.println("can't read output from process");
|
||||
}
|
||||
}
|
||||
/** wait for the thread to finish */
|
||||
public void join() throws InterruptedException {
|
||||
sucker.join();
|
||||
}
|
||||
@Override
|
||||
public String toString() {
|
||||
return buf.toString();
|
||||
}
|
||||
}
|
||||
|
||||
protected void checkGrammarSemanticsError(ErrorQueue equeue,
|
||||
GrammarSemanticsMessage expectedMessage)
|
||||
throws Exception
|
||||
{
|
||||
ANTLRMessage foundMsg = null;
|
||||
for (int i = 0; i < equeue.errors.size(); i++) {
|
||||
ANTLRMessage m = equeue.errors.get(i);
|
||||
if (m.getErrorType()==expectedMessage.getErrorType() ) {
|
||||
foundMsg = m;
|
||||
}
|
||||
}
|
||||
assertNotNull("no error; "+expectedMessage.getErrorType()+" expected", foundMsg);
|
||||
assertTrue("error is not a GrammarSemanticsMessage",
|
||||
foundMsg instanceof GrammarSemanticsMessage);
|
||||
assertEquals(Arrays.toString(expectedMessage.getArgs()), Arrays.toString(foundMsg.getArgs()));
|
||||
if ( equeue.size()!=1 ) {
|
||||
System.err.println(equeue);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public static class FilteringTokenStream extends CommonTokenStream {
|
||||
public FilteringTokenStream(TokenSource src) { super(src); }
|
||||
Set<Integer> hide = new HashSet<Integer>();
|
||||
@Override
|
||||
protected boolean sync(int i) {
|
||||
if (!super.sync(i)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
Token t = get(i);
|
||||
if ( hide.contains(t.getType()) ) {
|
||||
((WritableToken)t).setChannel(Token.HIDDEN_CHANNEL);
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
public void setTokenTypeChannel(int ttype, int channel) {
|
||||
hide.add(ttype);
|
||||
}
|
||||
}
|
||||
|
||||
public static void writeFile(String dir, String fileName, String content) {
|
||||
try {
|
||||
Utils.writeFile(dir+"/"+fileName, content, "UTF-8");
|
||||
}
|
||||
catch (IOException ioe) {
|
||||
System.err.println("can't write file");
|
||||
ioe.printStackTrace(System.err);
|
||||
}
|
||||
}
|
||||
|
||||
protected void mkdir(String dir) {
|
||||
File f = new File(dir);
|
||||
f.mkdirs();
|
||||
}
|
||||
|
||||
protected void writeParserTestFile(String parserName,
|
||||
String lexerName,
|
||||
String parserStartRuleName,
|
||||
boolean debug)
|
||||
{
|
||||
ST outputFileST = new ST(
|
||||
"using System;\n" +
|
||||
"using Antlr4.Runtime;\n" +
|
||||
"using Antlr4.Runtime.Tree;\n" +
|
||||
"\n" +
|
||||
"public class Test {\n" +
|
||||
" public static void Main(string[] args) {\n" +
|
||||
" ICharStream input = new AntlrFileStream(args[0]);\n" +
|
||||
" <lexerName> lex = new <lexerName>(input);\n" +
|
||||
" CommonTokenStream tokens = new CommonTokenStream(lex);\n" +
|
||||
" <createParser>\n"+
|
||||
" parser.BuildParseTree = true;\n" +
|
||||
" ParserRuleContext tree = parser.<parserStartRuleName>();\n" +
|
||||
" ParseTreeWalker.Default.Walk(new TreeShapeListener(), tree);\n" +
|
||||
" }\n" +
|
||||
"}\n" +
|
||||
"\n" +
|
||||
"class TreeShapeListener : IParseTreeListener {\n" +
|
||||
" public void VisitTerminal(ITerminalNode node) { }\n" +
|
||||
" public void VisitErrorNode(IErrorNode node) { }\n" +
|
||||
" public void ExitEveryRule(ParserRuleContext ctx) { }\n" +
|
||||
"\n" +
|
||||
" public void EnterEveryRule(ParserRuleContext ctx) {\n" +
|
||||
" for (int i = 0; i \\< ctx.ChildCount; i++) {\n" +
|
||||
" IParseTree parent = ctx.GetChild(i).Parent;\n" +
|
||||
" if (!(parent is IRuleNode) || ((IRuleNode)parent).RuleContext != ctx) {\n" +
|
||||
" throw new Exception(\"Invalid parse tree shape detected.\");\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
"}"
|
||||
);
|
||||
ST createParserST = new ST(" <parserName> parser = new <parserName>(tokens);\n");
|
||||
if ( debug ) {
|
||||
createParserST =
|
||||
new ST(
|
||||
" <parserName> parser = new <parserName>(tokens);\n" +
|
||||
" parser.AddErrorListener(new DiagnosticErrorListener());\n");
|
||||
}
|
||||
outputFileST.add("createParser", createParserST);
|
||||
outputFileST.add("parserName", parserName);
|
||||
outputFileST.add("lexerName", lexerName);
|
||||
outputFileST.add("parserStartRuleName", parserStartRuleName);
|
||||
writeFile(tmpdir, "Test.cs", outputFileST.render());
|
||||
}
|
||||
|
||||
protected void writeLexerTestFile(String lexerName, boolean showDFA) {
|
||||
ST outputFileST = new ST(
|
||||
"using System;\n" +
|
||||
"using Antlr4.Runtime;\n" +
|
||||
"\n" +
|
||||
"public class Test {\n" +
|
||||
" public static void Main(string[] args) {\n" +
|
||||
" ICharStream input = new AntlrFileStream(args[0]);\n" +
|
||||
" <lexerName> lex = new <lexerName>(input);\n" +
|
||||
" CommonTokenStream tokens = new CommonTokenStream(lex);\n" +
|
||||
" tokens.Fill();\n" +
|
||||
" foreach (object t in tokens.GetTokens())\n" +
|
||||
" Console.WriteLine(t);\n" +
|
||||
(showDFA?"Console.Write(lex.Interpreter.GetDFA(Lexer.DefaultMode).ToLexerString());\n":"")+
|
||||
" }\n" +
|
||||
"}"
|
||||
);
|
||||
|
||||
outputFileST.add("lexerName", lexerName);
|
||||
writeFile(tmpdir, "Test.cs", outputFileST.render());
|
||||
}
|
||||
|
||||
public void writeRecognizerAndCompile(String parserName, String lexerName,
|
||||
String parserStartRuleName,
|
||||
boolean debug) {
|
||||
if ( parserName==null ) {
|
||||
writeLexerTestFile(lexerName, debug);
|
||||
}
|
||||
else {
|
||||
writeParserTestFile(parserName,
|
||||
lexerName,
|
||||
parserStartRuleName,
|
||||
debug);
|
||||
}
|
||||
|
||||
addSourceFiles("Test.cs");
|
||||
}
|
||||
|
||||
|
||||
protected void eraseFiles(final String filesEndingWith) {
|
||||
File tmpdirF = new File(tmpdir);
|
||||
String[] files = tmpdirF.list();
|
||||
for(int i = 0; files!=null && i < files.length; i++) {
|
||||
if ( files[i].endsWith(filesEndingWith) ) {
|
||||
new File(tmpdir+"/"+files[i]).delete();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected void eraseFiles() {
|
||||
if (tmpdir == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
File tmpdirF = new File(tmpdir);
|
||||
String[] files = tmpdirF.list();
|
||||
if(files!=null) for(String file : files) {
|
||||
new File(tmpdir+"/"+file).delete();
|
||||
}
|
||||
}
|
||||
|
||||
protected void eraseTempDir() {
|
||||
File tmpdirF = new File(tmpdir);
|
||||
if ( tmpdirF.exists() ) {
|
||||
eraseFiles();
|
||||
tmpdirF.delete();
|
||||
}
|
||||
}
|
||||
|
||||
public String getFirstLineOfException() {
|
||||
if ( this.stderrDuringParse ==null ) {
|
||||
return null;
|
||||
}
|
||||
String[] lines = this.stderrDuringParse.split("\n");
|
||||
String prefix="Exception in thread \"main\" ";
|
||||
return lines[0].substring(prefix.length(),lines[0].length());
|
||||
}
|
||||
|
||||
public List<String> realElements(List<String> elements) {
|
||||
return elements.subList(Token.MIN_USER_TOKEN_TYPE, elements.size());
|
||||
}
|
||||
|
||||
public void assertNotNullOrEmpty(String message, String text) {
|
||||
assertNotNull(message, text);
|
||||
assertFalse(message, text.isEmpty());
|
||||
}
|
||||
|
||||
public void assertNotNullOrEmpty(String text) {
|
||||
assertNotNull(text);
|
||||
assertFalse(text.isEmpty());
|
||||
}
|
||||
|
||||
|
||||
/** Return map sorted by key */
|
||||
public <K extends Comparable<? super K>,V> LinkedHashMap<K,V> sort(Map<K,V> data) {
|
||||
LinkedHashMap<K,V> dup = new LinkedHashMap<K, V>();
|
||||
List<K> keys = new ArrayList<K>();
|
||||
keys.addAll(data.keySet());
|
||||
Collections.sort(keys);
|
||||
for (K k : keys) {
|
||||
dup.put(k, data.get(k));
|
||||
}
|
||||
return dup;
|
||||
}
|
||||
|
||||
protected static void assertEquals(String msg, int a, int b) {
|
||||
org.junit.Assert.assertEquals(msg, a, b);
|
||||
}
|
||||
|
||||
protected static void assertEquals(String a, String b) {
|
||||
a = absorbExpectedDifferences(a);
|
||||
b = absorbActualDifferences(b);
|
||||
org.junit.Assert.assertEquals(a, b);
|
||||
}
|
||||
|
||||
protected static void assertNull(String a) {
|
||||
a = absorbActualDifferences(a);
|
||||
org.junit.Assert.assertNull(a);
|
||||
}
|
||||
|
||||
private static String absorbExpectedDifferences(String a) {
|
||||
if(a==null)
|
||||
return a;
|
||||
// work around the lack of requiresFullContext field in DFAState
|
||||
if(a.startsWith("Decision"))
|
||||
a = a.replaceAll("\\^", "");
|
||||
// work around the algo difference for full context
|
||||
a = stripOutUnwantedLinesWith(a, "reportAttemptingFullContext","reportContextSensitivity", "reportAmbiguity");
|
||||
if(a.isEmpty())
|
||||
a = null;
|
||||
return a;
|
||||
}
|
||||
|
||||
private static String absorbActualDifferences(String a) {
|
||||
if(a==null)
|
||||
return a;
|
||||
// work around the algo difference for full context
|
||||
// work around the algo difference for semantic predicates
|
||||
a = stripOutUnwantedLinesWith(a, "reportContextSensitivity","eval=false");
|
||||
if(a.isEmpty())
|
||||
a = null;
|
||||
return a;
|
||||
}
|
||||
|
||||
private static String stripOutUnwantedLinesWith(String a, String ... unwanteds) {
|
||||
String[] lines = a.split("\n");
|
||||
StringBuilder sb = new StringBuilder();
|
||||
for(String line : lines) {
|
||||
boolean wanted = true;
|
||||
for(String unwanted : unwanteds) {
|
||||
if(line.contains(unwanted) ) {
|
||||
wanted = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if(!wanted)
|
||||
continue;
|
||||
sb.append(line);
|
||||
sb.append("\n");
|
||||
}
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,395 @@
|
|||
TestFile(file) ::= <<
|
||||
package org.antlr.v4.test.runtime.csharp;
|
||||
|
||||
import org.antlr.v4.test.runtime.csharp.BaseTest;
|
||||
import org.junit.Test;
|
||||
|
||||
<if(file.Options.("ImportErrorQueue"))>
|
||||
import org.antlr.v4.test.tool.ErrorQueue;
|
||||
<endif>
|
||||
<if(file.Options.("ImportGrammar"))>
|
||||
import org.antlr.v4.tool.Grammar;
|
||||
<endif>
|
||||
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
public class Test<file.name> extends BaseTest {
|
||||
|
||||
<file.tests:{test | <test>}; separator="\n", wrap, anchor>
|
||||
|
||||
}<\n>
|
||||
>>
|
||||
|
||||
LexerTestMethod(test) ::= <<
|
||||
/* this file and method are generated, any edit will be overwritten by the next generation */
|
||||
@Test public void test<test.name>() throws Exception {
|
||||
mkdir(tmpdir);
|
||||
<test.SlaveGrammars:{grammar |
|
||||
String slave_<grammar> =<writeStringLiteral(test.SlaveGrammars.(grammar))>;
|
||||
writeFile(tmpdir, "<grammar>.g4", slave_<grammar>);
|
||||
}; separator="\n">
|
||||
|
||||
<test.Grammar:{grammar |
|
||||
<buildStringLiteral(test.Grammar.(grammar), "grammar")>
|
||||
<test.afterGrammar>
|
||||
String input =<writeStringLiteral(test.Input)>;
|
||||
String found = execLexer("<grammar>.g4", grammar, "<grammar><if(test.Options.("CombinedGrammar"))>Lexer<endif>", input, <writeBoolean(test.Options.("ShowDFA"))>);
|
||||
assertEquals(<writeStringLiteral(test.Output)>, found);
|
||||
<if(!isEmpty.(test.Errors))>
|
||||
assertEquals(<writeStringLiteral(test.Errors)>, this.stderrDuringParse);
|
||||
<else>
|
||||
assertNull(this.stderrDuringParse);
|
||||
<endif>
|
||||
}>
|
||||
}
|
||||
>>
|
||||
|
||||
CompositeLexerTestMethod(test) ::= <<
|
||||
<LexerTestMethod(test)>
|
||||
>>
|
||||
|
||||
ParserTestMethod(test) ::= <<
|
||||
/* this file and method are generated, any edit will be overwritten by the next generation */
|
||||
@Test public void test<test.name>() throws Exception {
|
||||
mkdir(tmpdir);
|
||||
<test.SlaveGrammars:{grammar |
|
||||
String slave_<grammar> =<writeStringLiteral(test.SlaveGrammars.(grammar))>;
|
||||
<if(test.Options.("SlaveIsLexer"))>
|
||||
rawGenerateAndBuildRecognizer("<grammar>.g4", slave_<grammar>, null, "<grammar>");
|
||||
<else>
|
||||
writeFile(tmpdir, "<grammar>.g4", slave_<grammar>);
|
||||
<endif>
|
||||
}; separator="\n">
|
||||
<test.Grammar:{grammar |
|
||||
<buildStringLiteral(test.Grammar.(grammar), "grammar")>
|
||||
<test.afterGrammar>
|
||||
String input =<writeStringLiteral(test.Input)>;
|
||||
String found = execParser("<grammar>.g4", grammar, "<grammar>Parser", "<grammar>Lexer", "<test.Rule>", input, <writeBoolean(test.Options.("Debug"))>);
|
||||
assertEquals(<writeStringLiteral(test.Output)>, found);
|
||||
<if(!isEmpty.(test.Errors))>
|
||||
assertEquals(<writeStringLiteral(test.Errors)>, this.stderrDuringParse);
|
||||
<else>
|
||||
assertNull(this.stderrDuringParse);
|
||||
<endif>
|
||||
}>
|
||||
}
|
||||
>>
|
||||
|
||||
CompositeParserTestMethod(test) ::= <<
|
||||
<ParserTestMethod(test)>
|
||||
>>
|
||||
|
||||
AbstractParserTestMethod(test) ::= <<
|
||||
/* this file and method are generated, any edit will be overwritten by the next generation */
|
||||
String test<test.name>(String input) throws Exception {
|
||||
String grammar = <test.grammar.lines:{ line | "<line>};separator="\\n\" +\n", wrap, anchor>";
|
||||
return execParser("<test.grammar.grammarName>.g4", grammar, "<test.grammar.grammarName>Parser", "<test.grammar.grammarName>Lexer", "<test.startRule>", input, <test.debug>);
|
||||
}
|
||||
|
||||
>>
|
||||
|
||||
ConcreteParserTestMethod(test) ::= <<
|
||||
/* this file and method are generated, any edit will be overwritten by the next generation */
|
||||
@Test
|
||||
public void test<test.name>() throws Exception {
|
||||
String found = test<test.baseName>("<test.input>");
|
||||
assertEquals("<test.expectedOutput>", found);
|
||||
<if(test.expectedErrors)>
|
||||
assertEquals("<test.expectedErrors>", this.stderrDuringParse);
|
||||
<else>
|
||||
assertNull(this.stderrDuringParse);
|
||||
<endif>
|
||||
}
|
||||
|
||||
>>
|
||||
|
||||
string(text) ::= <<
|
||||
"<escape.(text)>"
|
||||
>>
|
||||
|
||||
writeBoolean(o) ::= "<if(o && !isEmpty.(o))>true<else>false<endif>"
|
||||
|
||||
buildStringLiteral(text, variable) ::= <<
|
||||
StringBuilder <variable>Builder = new StringBuilder(<strlen.(text)>);
|
||||
<lines.(text):{line|<variable>Builder.append("<escape.(line)>");}; separator="\n">
|
||||
String <variable> = <variable>Builder.toString();
|
||||
>>
|
||||
|
||||
writeStringLiteral(text) ::= <%
|
||||
<if(isEmpty.(text))>
|
||||
""
|
||||
<else>
|
||||
<writeLines(lines.(text))>
|
||||
<endif>
|
||||
%>
|
||||
|
||||
writeLines(textLines) ::= <%
|
||||
<if(rest(textLines))>
|
||||
<textLines:{line|
|
||||
<\n> "<escape.(line)>}; separator="\" +">"
|
||||
<else>
|
||||
"<escape.(first(textLines))>"
|
||||
<endif>
|
||||
%>
|
||||
|
||||
writeln(s) ::= <<Console.WriteLine(<s>);>>
|
||||
|
||||
write(s) ::= <<Console.Write(<s>);>>
|
||||
|
||||
False() ::= "false"
|
||||
|
||||
True() ::= "true"
|
||||
|
||||
Not(v) ::= "!<v>"
|
||||
|
||||
Assert(s) ::= <<Debug.Assert(<s>);>>
|
||||
|
||||
Cast(t,v) ::= "((<t>)<v>)"
|
||||
|
||||
Append(a,b) ::= "<a> + <b>"
|
||||
|
||||
Concat(a,b) ::= "<a><b>"
|
||||
|
||||
DeclareLocal(s,v) ::= "Object <s> = <v>;"
|
||||
|
||||
DeclareListLocal(s,v) ::= "List <s> = <v>;"
|
||||
|
||||
AssignLocal(s,v) ::= "<s> = <v>;"
|
||||
|
||||
InitIntMember(n,v) ::= <%int <n> = <v>;%>
|
||||
|
||||
InitBooleanMember(n,v) ::= <%bool <n> = <v>;%>
|
||||
|
||||
GetMember(n) ::= <%this.<n>%>
|
||||
|
||||
SetMember(n,v) ::= <%this.<n> = <v>;%>
|
||||
|
||||
AddMember(n,v) ::= <%this.<n> += <v>;%>
|
||||
|
||||
PlusMember(v,n) ::= <%<v> + this.<n>%>
|
||||
|
||||
MemberEquals(n,v) ::= <%this.<n> == <v>%>
|
||||
|
||||
ModMemberEquals(n,m,v) ::= <%this.<n> % <m> == <v>%>
|
||||
|
||||
ModMemberNotEquals(n,m,v) ::= <%this.<n> % <m> != <v>%>
|
||||
|
||||
DumpDFA() ::= "this.DumpDFA();"
|
||||
|
||||
Pass() ::= ""
|
||||
|
||||
StringList() ::= "List\<String>"
|
||||
|
||||
BuildParseTrees() ::= "this.BuildParseTree = true;"
|
||||
|
||||
BailErrorStrategy() ::= <%ErrorHandler = new BailErrorStrategy();%>
|
||||
|
||||
ToStringTree(s) ::= <%<s>.ToStringTree(this)%>
|
||||
|
||||
Column() ::= "this.Column"
|
||||
|
||||
Text() ::= "this.Text"
|
||||
|
||||
ValEquals(a,b) ::= <%<a>==<b>%>
|
||||
|
||||
TextEquals(a) ::= <%this.Text.Equals("<a>")%>
|
||||
|
||||
PlusText(a) ::= <%"<a>" + this.Text%>
|
||||
|
||||
InputText() ::= "this.TokenStream.GetText()"
|
||||
|
||||
LTEquals(i, v) ::= <%this.TokenStream.Lt(<i>).Text.Equals(<v>)%>
|
||||
|
||||
LANotEquals(i, v) ::= <%this.InputStream.La(<i>)!=<v>%>
|
||||
|
||||
TokenStartColumnEquals(i) ::= <%this.TokenStartColumn==<i>%>
|
||||
|
||||
ImportListener(X) ::= ""
|
||||
|
||||
GetExpectedTokenNames() ::= "this.GetExpectedTokens().ToString(this.Vocabulary)"
|
||||
|
||||
RuleInvocationStack() ::= "GetRuleInvocationStackAsString()"
|
||||
|
||||
LL_EXACT_AMBIG_DETECTION() ::= <<Interpreter.PredictionMode = PredictionMode.LlExactAmbigDetection;>>
|
||||
|
||||
ParserPropertyMember() ::= <<
|
||||
@members {
|
||||
bool Property() {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
>>
|
||||
|
||||
PositionAdjustingLexer() ::= <<
|
||||
|
||||
public override IToken NextToken() {
|
||||
if (!(Interpreter is PositionAdjustingLexerATNSimulator)) {
|
||||
Interpreter = new PositionAdjustingLexerATNSimulator(this, _ATN);
|
||||
}
|
||||
|
||||
return base.NextToken();
|
||||
}
|
||||
|
||||
public override IToken Emit() {
|
||||
switch (Type) {
|
||||
case TOKENS:
|
||||
HandleAcceptPositionForKeyword("tokens");
|
||||
break;
|
||||
|
||||
case LABEL:
|
||||
HandleAcceptPositionForIdentifier();
|
||||
break;
|
||||
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
||||
return base.Emit();
|
||||
}
|
||||
|
||||
private bool HandleAcceptPositionForIdentifier() {
|
||||
string tokenText = this.Text;
|
||||
int identifierLength = 0;
|
||||
while (identifierLength \< tokenText.Length && IsIdentifierChar(tokenText[identifierLength])) {
|
||||
identifierLength++;
|
||||
}
|
||||
|
||||
if (InputStream.Index > TokenStartCharIndex + identifierLength) {
|
||||
int offset = identifierLength - 1;
|
||||
getInterpreter().ResetAcceptPosition((ICharStream)InputStream, TokenStartCharIndex + offset, TokenStartLine, TokenStartColumn + offset);
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
private bool HandleAcceptPositionForKeyword(string keyword) {
|
||||
if (InputStream.Index > TokenStartCharIndex + keyword.Length) {
|
||||
int offset = keyword.Length - 1;
|
||||
getInterpreter().ResetAcceptPosition((ICharStream)InputStream, TokenStartCharIndex + offset, TokenStartLine, TokenStartColumn + offset);
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
public PositionAdjustingLexerATNSimulator getInterpreter() {
|
||||
return (PositionAdjustingLexerATNSimulator)base.Interpreter;
|
||||
}
|
||||
|
||||
private static bool IsIdentifierChar(char c) {
|
||||
return Char.IsLetterOrDigit(c) || c == '_';
|
||||
}
|
||||
|
||||
public class PositionAdjustingLexerATNSimulator : LexerATNSimulator {
|
||||
|
||||
public PositionAdjustingLexerATNSimulator(Lexer recog, ATN atn)
|
||||
: base(recog, atn)
|
||||
{
|
||||
}
|
||||
|
||||
public void ResetAcceptPosition(ICharStream input, int index, int line, int column) {
|
||||
input.Seek(index);
|
||||
this.Line = line;
|
||||
this.Column = column;
|
||||
Consume(input);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
>>
|
||||
|
||||
BasicListener(X) ::= <<
|
||||
public class LeafListener : TBaseListener {
|
||||
public override void VisitTerminal(ITerminalNode node) {
|
||||
Console.WriteLine(node.Symbol.Text);
|
||||
}
|
||||
}
|
||||
>>
|
||||
|
||||
WalkListener(s) ::= <<
|
||||
ParseTreeWalker walker = new ParseTreeWalker();
|
||||
walker.Walk(new LeafListener(), <s>);
|
||||
>>
|
||||
|
||||
TokenGetterListener(X) ::= <<
|
||||
public class LeafListener : TBaseListener {
|
||||
public override void ExitA(TParser.AContext ctx) {
|
||||
if (ctx.ChildCount==2)
|
||||
{
|
||||
StringBuilder sb = new StringBuilder ("[");
|
||||
foreach (ITerminalNode node in ctx.INT ()) {
|
||||
sb.Append (node.ToString ());
|
||||
sb.Append (", ");
|
||||
}
|
||||
sb.Length = sb.Length - 2;
|
||||
sb.Append ("]");
|
||||
Console.Write ("{0} {1} {2}", ctx.INT (0).Symbol.Text,
|
||||
ctx.INT (1).Symbol.Text, sb.ToString());
|
||||
}
|
||||
else
|
||||
Console.WriteLine(ctx.ID().Symbol);
|
||||
}
|
||||
}
|
||||
>>
|
||||
|
||||
RuleGetterListener(X) ::= <<
|
||||
public class LeafListener : TBaseListener {
|
||||
public override void ExitA(TParser.AContext ctx) {
|
||||
if (ctx.ChildCount==2) {
|
||||
Console.Write("{0} {1} {2}",ctx.b(0).Start.Text,
|
||||
ctx.b(1).Start.Text,ctx.b()[0].Start.Text);
|
||||
} else
|
||||
Console.WriteLine(ctx.b(0).Start.Text);
|
||||
}
|
||||
}
|
||||
>>
|
||||
|
||||
|
||||
LRListener(X) ::= <<
|
||||
public class LeafListener : TBaseListener {
|
||||
public override void ExitE(TParser.EContext ctx) {
|
||||
if (ctx.ChildCount==3) {
|
||||
Console.Write("{0} {1} {2}\n",ctx.e(0).Start.Text,
|
||||
ctx.e(1).Start.Text, ctx.e()[0].Start.Text);
|
||||
} else
|
||||
Console.WriteLine(ctx.INT().Symbol.Text);
|
||||
}
|
||||
}
|
||||
>>
|
||||
|
||||
LRWithLabelsListener(X) ::= <<
|
||||
public class LeafListener : TBaseListener {
|
||||
public override void ExitCall(TParser.CallContext ctx) {
|
||||
Console.Write("{0} {1}",ctx.e().Start.Text,ctx.eList());
|
||||
}
|
||||
public override void ExitInt(TParser.IntContext ctx) {
|
||||
Console.WriteLine(ctx.INT().Symbol.Text);
|
||||
}
|
||||
}
|
||||
>>
|
||||
|
||||
DeclareContextListGettersFunction() ::= <<
|
||||
void foo() {
|
||||
SContext s = null;
|
||||
AContext[] a = s.a();
|
||||
BContext[] b = s.b();
|
||||
}
|
||||
>>
|
||||
|
||||
Declare_foo() ::= <<public void foo() {Console.WriteLine("foo");}>>
|
||||
|
||||
Invoke_foo() ::= "this.foo();"
|
||||
|
||||
Declare_pred() ::= <<bool pred(bool v) {
|
||||
Console.WriteLine("eval="+v.ToString().ToLower());
|
||||
return v;
|
||||
}
|
||||
>>
|
||||
|
||||
Invoke_pred(v) ::= <<this.pred(<v>)>>
|
||||
|
||||
isEmpty ::= [
|
||||
"": true,
|
||||
default: false
|
||||
]
|
Loading…
Reference in New Issue