forked from jasder/antlr
got token aliases checks done
[git-p4: depot-paths = "//depot/code/antlr4/main/": change = 8893]
This commit is contained in:
parent
c6904bb21b
commit
66933e36a2
|
@ -303,10 +303,10 @@ public class Tool {
|
|||
return g;
|
||||
}
|
||||
|
||||
public GrammarAST loadGrammar(String fileName) {
|
||||
public GrammarRootAST loadGrammar(String fileName) {
|
||||
try {
|
||||
ANTLRFileStream in = new ANTLRFileStream(fileName);
|
||||
GrammarAST t = load(in);
|
||||
GrammarRootAST t = load(in);
|
||||
return t;
|
||||
}
|
||||
catch (IOException ioe) {
|
||||
|
@ -316,7 +316,7 @@ public class Tool {
|
|||
}
|
||||
|
||||
/** Try current dir then dir of g then lib dir */
|
||||
public GrammarAST loadImportedGrammar(Grammar g, String fileName) throws IOException {
|
||||
public GrammarRootAST loadImportedGrammar(Grammar g, String fileName) throws IOException {
|
||||
System.out.println("loadImportedGrammar "+fileName+" from "+g.fileName);
|
||||
File importedFile = new File(fileName);
|
||||
if ( !importedFile.exists() ) {
|
||||
|
@ -335,11 +335,11 @@ public class Tool {
|
|||
return load(in);
|
||||
}
|
||||
|
||||
public GrammarAST loadFromString(String grammar) {
|
||||
public GrammarRootAST loadFromString(String grammar) {
|
||||
return load(new ANTLRStringStream(grammar));
|
||||
}
|
||||
|
||||
public GrammarAST load(CharStream in) {
|
||||
public GrammarRootAST load(CharStream in) {
|
||||
try {
|
||||
GrammarASTAdaptor adaptor = new GrammarASTAdaptor(in);
|
||||
ANTLRLexer lexer = new ANTLRLexer(in);
|
||||
|
@ -347,7 +347,7 @@ public class Tool {
|
|||
ToolANTLRParser p = new ToolANTLRParser(tokens, this);
|
||||
p.setTreeAdaptor(adaptor);
|
||||
ParserRuleReturnScope r = p.grammarSpec();
|
||||
GrammarAST root = (GrammarAST) r.getTree();
|
||||
GrammarRootAST root = (GrammarRootAST)r.getTree();
|
||||
if ( root instanceof GrammarRootAST ) {
|
||||
((GrammarRootAST)root).hasErrors = p.getNumberOfSyntaxErrors()>0;
|
||||
}
|
||||
|
@ -376,7 +376,8 @@ public class Tool {
|
|||
*/
|
||||
public void mergeImportedGrammars(Grammar rootGrammar) {
|
||||
GrammarAST root = rootGrammar.ast;
|
||||
GrammarASTAdaptor adaptor = new GrammarASTAdaptor(root.token.getInputStream());
|
||||
GrammarAST id = (GrammarAST) root.getChild(0);
|
||||
GrammarASTAdaptor adaptor = new GrammarASTAdaptor(id.token.getInputStream());
|
||||
|
||||
GrammarAST tokensRoot = (GrammarAST)root.getFirstChildWithType(ANTLRParser.TOKENS);
|
||||
|
||||
|
@ -391,7 +392,8 @@ public class Tool {
|
|||
root.addChild(RULES);
|
||||
}
|
||||
else {
|
||||
List<GrammarAST> rootRules = root.getNodesWithType(ANTLRParser.RULE);
|
||||
// make list of rules we have in root grammar
|
||||
List<GrammarAST> rootRules = RULES.getNodesWithType(ANTLRParser.RULE);
|
||||
for (GrammarAST r : rootRules) rootRuleNames.add(r.getChild(0).getText());
|
||||
}
|
||||
|
||||
|
@ -471,7 +473,11 @@ public class Tool {
|
|||
for (GrammarAST r : rules) {
|
||||
System.out.println("imported rule: "+r.toStringTree());
|
||||
String name = r.getChild(0).getText();
|
||||
if ( !rootRuleNames.contains(name) ) RULES.addChild(r); // if not overridden
|
||||
boolean rootAlreadyHasRule = rootRuleNames.contains(name);
|
||||
if ( !rootAlreadyHasRule ) {
|
||||
RULES.addChild(r); // merge in if not overridden
|
||||
rootRuleNames.add(name);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -588,6 +594,7 @@ public class Tool {
|
|||
}
|
||||
|
||||
lexerRulesRoot.freshenParentAndChildIndexesDeeply();
|
||||
combinedRulesRoot.freshenParentAndChildIndexesDeeply();
|
||||
|
||||
System.out.println("after ="+combinedAST.toStringTree());
|
||||
System.out.println("lexer ="+lexerAST.toStringTree());
|
||||
|
|
|
@ -87,7 +87,9 @@ public class SemanticPipeline {
|
|||
// hmm...we don't get missing arg errors and such if we bail out here
|
||||
|
||||
// STORE RULES/ACTIONS/SCOPES IN GRAMMAR
|
||||
for (Rule r : collector.rules) g.defineRule(r);
|
||||
for (Rule r : collector.rules) {
|
||||
g.defineRule(r);
|
||||
}
|
||||
for (GrammarAST a : collector.namedActions) {
|
||||
g.defineAction((GrammarAST)a.getParent());
|
||||
}
|
||||
|
|
|
@ -91,16 +91,15 @@ public class SymbolChecks {
|
|||
if ( prevRule==null ) {
|
||||
nameToRuleMap.put(r.name, r);
|
||||
}
|
||||
else if ( r.g == prevRule.g ) {
|
||||
// only generate warning if rules in same grammar
|
||||
else {
|
||||
GrammarAST idNode = (GrammarAST)r.ast.getChild(0);
|
||||
errMgr.grammarError(ErrorType.RULE_REDEFINITION,
|
||||
g.fileName, idNode.token, r.name);
|
||||
r.g.fileName, idNode.token, r.name);
|
||||
}
|
||||
if ( globalScopeNames.contains(r.name) ) {
|
||||
GrammarAST idNode = (GrammarAST)r.ast.getChild(0);
|
||||
errMgr.grammarError(ErrorType.SYMBOL_CONFLICTS_WITH_GLOBAL_SCOPE,
|
||||
g.fileName, idNode.token, r.name);
|
||||
r.g.fileName, idNode.token, r.name);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -114,39 +113,59 @@ public class SymbolChecks {
|
|||
globalScopeNames.add(s.getName());
|
||||
}
|
||||
else {
|
||||
Token idNode = ((GrammarAST) s.ast.getParent().getChild(0)).token;
|
||||
GrammarAST idNode = (GrammarAST) s.ast.getParent().getChild(0);
|
||||
errMgr.grammarError(ErrorType.SCOPE_REDEFINITION,
|
||||
g.fileName, idNode, s.getName());
|
||||
idNode.g.fileName, idNode.token, s.getName());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/** Catch:
|
||||
tokens { A='a'; A; } can't redefine token type if has alias
|
||||
tokens { A; A='a'; }
|
||||
tokens { A='a'; A='b'; } can't have two aliases for single token type
|
||||
tokens { A='a'; B='a'; } can't have to token types for same string alias
|
||||
*/
|
||||
public void checkTokenAliasRedefinitions(List<GrammarAST> aliases) {
|
||||
if ( aliases==null ) return;
|
||||
|
||||
// map names, strings to root of A or (= A 'a')
|
||||
Map<String, GrammarAST> aliasTokenNames = new HashMap<String, GrammarAST>();
|
||||
for (int i=0; i< aliases.size(); i++) {
|
||||
Map<String, GrammarAST> aliasStringValues = new HashMap<String, GrammarAST>();
|
||||
for (int i=0; i<aliases.size(); i++) {
|
||||
GrammarAST a = aliases.get(i);
|
||||
GrammarAST idNode = a;
|
||||
if ( a.getType()== ANTLRParser.ASSIGN ) {
|
||||
GrammarAST idNode = a;
|
||||
GrammarAST prevToken = aliasTokenNames.get(idNode.getText());
|
||||
GrammarAST stringNode = null;
|
||||
if ( a.getChildCount()>0 ) stringNode = (GrammarAST)a.getChild(1);
|
||||
GrammarAST prevString = null;
|
||||
if ( stringNode!=null ) prevString = aliasStringValues.get(stringNode.getText());
|
||||
if ( a.getType() == ANTLRParser.ASSIGN ) { // A='a'
|
||||
idNode = (GrammarAST)a.getChild(0);
|
||||
if ( g!=g.getOutermostGrammar() ) {
|
||||
errMgr.grammarError(ErrorType.TOKEN_ALIAS_IN_DELEGATE,
|
||||
g.fileName, idNode.token, idNode.getText(), g.name);
|
||||
if ( prevString==null ) { // not seen string before
|
||||
if ( stringNode!=null ) aliasStringValues.put(stringNode.getText(), a);
|
||||
}
|
||||
}
|
||||
GrammarAST prev = aliasTokenNames.get(idNode.getText());
|
||||
if ( prev==null ) {
|
||||
aliasTokenNames.put(idNode.getText(), a);
|
||||
}
|
||||
else {
|
||||
GrammarAST value = (GrammarAST)prev.getChild(1);
|
||||
String valueText = null;
|
||||
if ( value!=null ) valueText = value.getText();
|
||||
errMgr.grammarError(ErrorType.TOKEN_ALIAS_REASSIGNMENT,
|
||||
g.fileName, idNode.token, idNode.getText(), valueText);
|
||||
}
|
||||
}
|
||||
}
|
||||
if ( prevToken==null ) { // not seen before, define it
|
||||
aliasTokenNames.put(idNode.getText(), a);
|
||||
}
|
||||
|
||||
// we've defined token names and strings at this point if not seen previously.
|
||||
// now, look for trouble.
|
||||
if ( prevToken!=null ) {
|
||||
if ( !(prevToken.getChildCount()==0 && a.getChildCount()==0) ) {
|
||||
// one or both have strings; disallow
|
||||
errMgr.grammarError(ErrorType.TOKEN_NAME_REASSIGNMENT,
|
||||
a.g.fileName, idNode.token, idNode.getText());
|
||||
}
|
||||
}
|
||||
if ( prevString!=null ) {
|
||||
errMgr.grammarError(ErrorType.TOKEN_STRING_REASSIGNMENT,
|
||||
a.g.fileName, idNode.token, idNode.getText()+"="+stringNode.getText(),
|
||||
prevString.getChild(0).getText());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void checkForTokenConflicts(List<GrammarAST> tokenIDRefs) {
|
||||
for (GrammarAST a : tokenIDRefs) {
|
||||
|
|
|
@ -82,9 +82,6 @@ public class SymbolCollector extends GrammarTreeVisitor {
|
|||
|
||||
@Override
|
||||
public void globalNamedAction(GrammarAST scope, GrammarAST ID, ActionAST action) {
|
||||
// String scopeName = g.getDefaultActionScope();
|
||||
// if ( scope!=null ) scopeName = scope.getText();
|
||||
// namedActions.put(scopeName, ID.getText(), action);
|
||||
namedActions.add(ID);
|
||||
action.resolver = g;
|
||||
}
|
||||
|
|
|
@ -94,9 +94,6 @@ public class ErrorManager {
|
|||
|
||||
if ( msg.args!=null ) { // fill in arg1, arg2, ...
|
||||
for (int i=0; i<msg.args.length; i++) {
|
||||
if ( i==(msg.args.length-1) && msg.args[i]==null ) { // don't set last if null
|
||||
continue;
|
||||
}
|
||||
String attr = "arg";
|
||||
if ( i>0 ) attr += i + 1;
|
||||
messageST.add(attr, msg.args[i]);
|
||||
|
@ -127,7 +124,7 @@ public class ErrorManager {
|
|||
if (locationValid) reportST.add("location", locationST);
|
||||
reportST.add("message", messageFormatST);
|
||||
//((DebugST)reportST).inspect();
|
||||
//reportST.impl.dump();
|
||||
// reportST.impl.dump();
|
||||
return reportST;
|
||||
}
|
||||
|
||||
|
|
|
@ -49,7 +49,7 @@ public enum ErrorType {
|
|||
ERROR_READING_TOKENS_FILE("", ErrorSeverity.ERROR),
|
||||
DIR_NOT_FOUND("directory not found: <arg>", ErrorSeverity.ERROR),
|
||||
OUTPUT_DIR_IS_FILE("output directory is a file: <arg>", ErrorSeverity.ERROR),
|
||||
CANNOT_OPEN_FILE("cannot find or open file: <arg><if(arg2)>; reason: <arg2><endif>", ErrorSeverity.ERROR),
|
||||
CANNOT_OPEN_FILE("cannot find or open file: <arg><if(exception)>; reason: <exception><endif>", ErrorSeverity.ERROR),
|
||||
FILE_AND_GRAMMAR_NAME_DIFFER("grammar name <arg> and file name <arg2> differ", ErrorSeverity.ERROR),
|
||||
// FILENAME_EXTENSION_ERROR("", ErrorSeverity.ERROR),
|
||||
|
||||
|
@ -123,8 +123,8 @@ public enum ErrorType {
|
|||
HETERO_ILLEGAL_IN_REWRITE_ALT("alts with rewrites can't use heterogeneous types left of ->", ErrorSeverity.ERROR),
|
||||
NO_SUCH_GRAMMAR_SCOPE("reference to undefined grammar in rule reference: <arg>.<arg2>", ErrorSeverity.ERROR),
|
||||
NO_SUCH_RULE_IN_SCOPE("rule <arg2> is not defined in grammar <arg>", ErrorSeverity.ERROR),
|
||||
TOKEN_ALIAS_CONFLICT("cannot alias <arg>; string already assigned to <arg2>", ErrorSeverity.ERROR),
|
||||
TOKEN_ALIAS_REASSIGNMENT("cannot alias <arg>; token name already <if(arg2)>assigned to <arg2><else>defined<endif>", ErrorSeverity.ERROR),
|
||||
TOKEN_STRING_REASSIGNMENT("cannot alias <arg>; string already assigned to <arg2>", ErrorSeverity.ERROR),
|
||||
TOKEN_NAME_REASSIGNMENT("cannot redefine <arg>; token name already <if(arg2)>assigned to <arg2><else>defined<endif>", ErrorSeverity.ERROR),
|
||||
TOKEN_VOCAB_IN_DELEGATE("tokenVocab option ignored in imported grammar <arg>", ErrorSeverity.ERROR),
|
||||
TOKEN_ALIAS_IN_DELEGATE("can't assign string to token name <arg> to string in imported grammar <arg2>", ErrorSeverity.ERROR),
|
||||
CANNOT_FIND_IMPORTED_FILE("can't find or load grammar <arg>", ErrorSeverity.ERROR),
|
||||
|
|
|
@ -786,6 +786,32 @@ public abstract class BaseTest {
|
|||
}
|
||||
}
|
||||
|
||||
protected void checkGrammarSemanticsError(ErrorQueue equeue,
|
||||
GrammarSemanticsMessage expectedMessage)
|
||||
throws Exception
|
||||
{
|
||||
/*
|
||||
System.out.println(equeue.infos);
|
||||
System.out.println(equeue.warnings);
|
||||
System.out.println(equeue.errors);
|
||||
assertTrue("number of errors mismatch", n, equeue.errors.size());
|
||||
*/
|
||||
ANTLRMessage foundMsg = null;
|
||||
for (int i = 0; i < equeue.errors.size(); i++) {
|
||||
ANTLRMessage m = (ANTLRMessage)equeue.errors.get(i);
|
||||
if (m.errorType==expectedMessage.errorType ) {
|
||||
foundMsg = m;
|
||||
}
|
||||
}
|
||||
assertNotNull("no error; "+expectedMessage.errorType+" expected", foundMsg);
|
||||
assertTrue("error is not a GrammarSemanticsMessage",
|
||||
foundMsg instanceof GrammarSemanticsMessage);
|
||||
assertEquals(Arrays.toString(expectedMessage.args), Arrays.toString(foundMsg.args));
|
||||
if ( equeue.size()!=1 ) {
|
||||
System.err.println(equeue);
|
||||
}
|
||||
}
|
||||
|
||||
protected void checkError(ErrorQueue equeue,
|
||||
ANTLRMessage expectedMessage)
|
||||
throws Exception
|
||||
|
@ -1160,6 +1186,10 @@ public abstract class BaseTest {
|
|||
return nset.toString();
|
||||
}
|
||||
|
||||
public List<String> realElements(Vector elements) {
|
||||
return elements.subList(Token.MIN_USER_TOKEN_TYPE, elements.size());
|
||||
}
|
||||
|
||||
// override to track errors
|
||||
|
||||
public void assertEquals(String msg, Object a, Object b) { try {Assert.assertEquals(msg,a,b);} catch (Error e) {lastTestFailed=true; throw e;} }
|
||||
|
|
|
@ -30,8 +30,9 @@
|
|||
package org.antlr.v4.test;
|
||||
|
||||
import org.antlr.v4.Tool;
|
||||
import org.antlr.v4.tool.Grammar;
|
||||
import org.antlr.v4.tool.*;
|
||||
import org.junit.Test;
|
||||
import org.stringtemplate.v4.ST;
|
||||
|
||||
public class TestCompositeGrammars extends BaseTest {
|
||||
protected boolean debug = false;
|
||||
|
@ -67,7 +68,6 @@ public class TestCompositeGrammars extends BaseTest {
|
|||
assertEquals("S.a\n", found);
|
||||
}
|
||||
|
||||
/*
|
||||
@Test public void testDelegatorInvokesDelegateRuleWithArgs() throws Exception {
|
||||
// must generate something like:
|
||||
// public int a(int x) throws RecognitionException { return gS.a(x); }
|
||||
|
@ -120,7 +120,7 @@ public class TestCompositeGrammars extends BaseTest {
|
|||
String master =
|
||||
"grammar M;\n" + // uses no rules from the import
|
||||
"import S;\n" +
|
||||
"s : 'b' {gS.foo();} ;\n" + // gS is import pointer
|
||||
"s : 'b' {foo();} ;\n" + // gS is import pointer
|
||||
"WS : (' '|'\\n') {skip();} ;\n" ;
|
||||
String found = execParser("M.g", master, "MParser", "MLexer",
|
||||
"s", "b", debug);
|
||||
|
@ -188,8 +188,7 @@ public class TestCompositeGrammars extends BaseTest {
|
|||
}
|
||||
|
||||
@Test public void testDelegatesSeeSameTokenType2() throws Exception {
|
||||
org.antlr.test.ErrorQueue equeue = new org.antlr.test.ErrorQueue();
|
||||
ErrorManager.setErrorListener(equeue);
|
||||
ErrorQueue equeue = new ErrorQueue();
|
||||
String slave =
|
||||
"parser grammar S;\n" + // A, B, C token type order
|
||||
"tokens { A; B; C; }\n" +
|
||||
|
@ -213,29 +212,31 @@ public class TestCompositeGrammars extends BaseTest {
|
|||
"WS : (' '|'\\n') {skip();} ;\n" ;
|
||||
writeFile(tmpdir, "M.g", master);
|
||||
Tool antlr = newTool(new String[] {"-lib", tmpdir});
|
||||
CompositeGrammar composite = new CompositeGrammar();
|
||||
Grammar g = new Grammar(antlr,tmpdir+"/M.g",composite);
|
||||
composite.setDelegationRoot(g);
|
||||
g.parseAndBuildAST();
|
||||
g.composite.assignTokenTypes();
|
||||
antlr.addListener(equeue);
|
||||
GrammarRootAST root = antlr.loadGrammar(tmpdir+"/M.g");
|
||||
Grammar g = antlr.createGrammar(root);
|
||||
g.fileName = "M.g";
|
||||
antlr.process(g);
|
||||
|
||||
String expectedTokenIDToTypeMap = "[A=4, B=5, C=6, WS=7]";
|
||||
String expectedStringLiteralToTypeMap = "{}";
|
||||
String expectedTypeToTokenList = "[A, B, C, WS]";
|
||||
String expectedTokenIDToTypeMap = "{EOF=-1, B=3, A=4, C=5, WS=6}";
|
||||
String expectedStringLiteralToTypeMap = "{'c'=5, 'a'=4, 'b'=3}";
|
||||
String expectedTypeToTokenList = "[B, A, C, WS]";
|
||||
|
||||
assertEquals(expectedTokenIDToTypeMap,
|
||||
realElements(g.composite.tokenIDToTypeMap).toString());
|
||||
assertEquals(expectedStringLiteralToTypeMap, g.composite.stringLiteralToTypeMap.toString());
|
||||
assertEquals(expectedTypeToTokenList,
|
||||
realElements(g.composite.typeToTokenList).toString());
|
||||
assertEquals(expectedTokenIDToTypeMap, g.tokenNameToTypeMap.toString());
|
||||
assertEquals(expectedStringLiteralToTypeMap, g.stringLiteralToTypeMap.toString());
|
||||
assertEquals(expectedTypeToTokenList, realElements(g.typeToTokenList).toString());
|
||||
|
||||
assertEquals("unexpected errors: "+equeue, 0, equeue.errors.size());
|
||||
|
||||
String found = execParser("M.g", master, "MParser", "MLexer",
|
||||
"s", "aa", debug);
|
||||
assertEquals("S.x\n" +
|
||||
"T.y\n", found);
|
||||
}
|
||||
|
||||
@Test public void testCombinedImportsCombined() throws Exception {
|
||||
// for now, we don't allow combined to import combined
|
||||
org.antlr.test.ErrorQueue equeue = new org.antlr.test.ErrorQueue();
|
||||
ErrorManager.setErrorListener(equeue);
|
||||
ErrorQueue equeue = new ErrorQueue();
|
||||
String slave =
|
||||
"grammar S;\n" + // A, B, C token type order
|
||||
"tokens { A; B; C; }\n" +
|
||||
|
@ -251,20 +252,21 @@ public class TestCompositeGrammars extends BaseTest {
|
|||
"s : x INT ;\n";
|
||||
writeFile(tmpdir, "M.g", master);
|
||||
Tool antlr = newTool(new String[] {"-lib", tmpdir});
|
||||
CompositeGrammar composite = new CompositeGrammar();
|
||||
Grammar g = new Grammar(antlr,tmpdir+"/M.g",composite);
|
||||
composite.setDelegationRoot(g);
|
||||
g.parseAndBuildAST();
|
||||
g.composite.assignTokenTypes();
|
||||
antlr.addListener(equeue);
|
||||
GrammarRootAST root = antlr.loadGrammar(tmpdir+"/M.g");
|
||||
Grammar g = antlr.createGrammar(root);
|
||||
g.fileName = "M.g";
|
||||
antlr.process(g);
|
||||
|
||||
assertEquals("unexpected errors: "+equeue, 1, equeue.errors.size());
|
||||
String expectedError = "error(161): "+tmpdir.toString().replaceFirst("\\-[0-9]+","")+"/M.g:2:8: combined grammar M cannot import combined grammar S";
|
||||
assertEquals("unexpected errors: "+equeue, expectedError, equeue.errors.get(0).toString().replaceFirst("\\-[0-9]+",""));
|
||||
assertEquals("unexpected errors: "+equeue, 0, equeue.errors.size());
|
||||
|
||||
String found = execParser("M.g", master, "MParser", "MLexer",
|
||||
"s", "x 34 9", debug);
|
||||
assertEquals("S.x\n", found);
|
||||
}
|
||||
|
||||
@Test public void testSameStringTwoNames() throws Exception {
|
||||
org.antlr.test.ErrorQueue equeue = new org.antlr.test.ErrorQueue();
|
||||
ErrorManager.setErrorListener(equeue);
|
||||
ErrorQueue equeue = new ErrorQueue();
|
||||
String slave =
|
||||
"parser grammar S;\n" +
|
||||
"tokens { A='a'; }\n" +
|
||||
|
@ -285,38 +287,39 @@ public class TestCompositeGrammars extends BaseTest {
|
|||
"WS : (' '|'\\n') {skip();} ;\n" ;
|
||||
writeFile(tmpdir, "M.g", master);
|
||||
Tool antlr = newTool(new String[] {"-lib", tmpdir});
|
||||
CompositeGrammar composite = new CompositeGrammar();
|
||||
Grammar g = new Grammar(antlr,tmpdir+"/M.g",composite);
|
||||
composite.setDelegationRoot(g);
|
||||
g.parseAndBuildAST();
|
||||
g.composite.assignTokenTypes();
|
||||
antlr.addListener(equeue);
|
||||
GrammarRootAST root = antlr.loadGrammar(tmpdir+"/M.g");
|
||||
Grammar g = antlr.createGrammar(root);
|
||||
g.fileName = "M.g";
|
||||
antlr.process(g);
|
||||
|
||||
String expectedTokenIDToTypeMap = "[A=4, WS=5, X=6]";
|
||||
String expectedStringLiteralToTypeMap = "{'a'=4}";
|
||||
String expectedTypeToTokenList = "[A, WS, X]";
|
||||
String expectedTokenIDToTypeMap = "{EOF=-1, T__0=3, WS=4, A=5, X=6}";
|
||||
String expectedStringLiteralToTypeMap = "{'a'=6}";
|
||||
String expectedTypeToTokenList = "[T__0, WS, A, X]";
|
||||
|
||||
assertEquals(expectedTokenIDToTypeMap,
|
||||
realElements(g.composite.tokenIDToTypeMap).toString());
|
||||
assertEquals(expectedStringLiteralToTypeMap, g.composite.stringLiteralToTypeMap.toString());
|
||||
assertEquals(expectedTypeToTokenList,
|
||||
realElements(g.composite.typeToTokenList).toString());
|
||||
assertEquals(expectedTokenIDToTypeMap, g.tokenNameToTypeMap.toString());
|
||||
assertEquals(expectedStringLiteralToTypeMap, g.stringLiteralToTypeMap.toString());
|
||||
assertEquals(expectedTypeToTokenList, realElements(g.typeToTokenList).toString());
|
||||
|
||||
Object expectedArg = "X='a'";
|
||||
Object expectedArg2 = "A";
|
||||
int expectedMsgID = ErrorManager.MSG_TOKEN_ALIAS_CONFLICT;
|
||||
ErrorType expectedMsgID = ErrorType.TOKEN_STRING_REASSIGNMENT;
|
||||
GrammarSemanticsMessage expectedMessage =
|
||||
new GrammarSemanticsMessage(expectedMsgID, g, null, expectedArg, expectedArg2);
|
||||
new GrammarSemanticsMessage(expectedMsgID, g.fileName, null, expectedArg, expectedArg2);
|
||||
checkGrammarSemanticsError(equeue, expectedMessage);
|
||||
|
||||
assertEquals("unexpected errors: "+equeue, 1, equeue.errors.size());
|
||||
|
||||
String expectedError =
|
||||
"error(158): T.g:2:10: cannot alias X='a'; string already assigned to A";
|
||||
assertEquals(expectedError, equeue.errors.get(0).toString());
|
||||
"error(73): T.g:2:9: cannot alias X='a'; string already assigned to A";
|
||||
ST msgST = antlr.errMgr.getMessageTemplate(equeue.errors.get(0));
|
||||
String foundError = msgST.render();
|
||||
assertEquals(expectedError, foundError);
|
||||
}
|
||||
/*
|
||||
|
||||
@Test public void testSameNameTwoStrings() throws Exception {
|
||||
org.antlr.test.ErrorQueue equeue = new org.antlr.test.ErrorQueue();
|
||||
ErrorQueue equeue = new ErrorQueue();
|
||||
ErrorManager.setErrorListener(equeue);
|
||||
String slave =
|
||||
"parser grammar S;\n" +
|
||||
|
@ -369,7 +372,7 @@ public class TestCompositeGrammars extends BaseTest {
|
|||
}
|
||||
|
||||
@Test public void testImportedTokenVocabIgnoredWithWarning() throws Exception {
|
||||
org.antlr.test.ErrorQueue equeue = new org.antlr.test.ErrorQueue();
|
||||
ErrorQueue equeue = new ErrorQueue();
|
||||
ErrorManager.setErrorListener(equeue);
|
||||
String slave =
|
||||
"parser grammar S;\n" +
|
||||
|
@ -407,7 +410,7 @@ public class TestCompositeGrammars extends BaseTest {
|
|||
}
|
||||
|
||||
@Test public void testImportedTokenVocabWorksInRoot() throws Exception {
|
||||
org.antlr.test.ErrorQueue equeue = new org.antlr.test.ErrorQueue();
|
||||
ErrorQueue equeue = new ErrorQueue();
|
||||
ErrorManager.setErrorListener(equeue);
|
||||
String slave =
|
||||
"parser grammar S;\n" +
|
||||
|
@ -448,7 +451,7 @@ public class TestCompositeGrammars extends BaseTest {
|
|||
}
|
||||
|
||||
@Test public void testSyntaxErrorsInImportsNotThrownOut() throws Exception {
|
||||
org.antlr.test.ErrorQueue equeue = new org.antlr.test.ErrorQueue();
|
||||
ErrorQueue equeue = new ErrorQueue();
|
||||
ErrorManager.setErrorListener(equeue);
|
||||
String slave =
|
||||
"parser grammar S;\n" +
|
||||
|
@ -474,7 +477,7 @@ public class TestCompositeGrammars extends BaseTest {
|
|||
}
|
||||
|
||||
@Test public void testSyntaxErrorsInImportsNotThrownOut2() throws Exception {
|
||||
org.antlr.test.ErrorQueue equeue = new org.antlr.test.ErrorQueue();
|
||||
ErrorQueue equeue = new ErrorQueue();
|
||||
ErrorManager.setErrorListener(equeue);
|
||||
String slave =
|
||||
"parser grammar S;\n" +
|
||||
|
@ -617,7 +620,7 @@ public class TestCompositeGrammars extends BaseTest {
|
|||
"WS : (' '|'\\n') {skip();} ;\n" ;
|
||||
writeFile(tmpdir, "/M.g", master);
|
||||
|
||||
org.antlr.test.ErrorQueue equeue = new org.antlr.test.ErrorQueue();
|
||||
ErrorQueue equeue = new ErrorQueue();
|
||||
ErrorManager.setErrorListener(equeue);
|
||||
Tool antlr = newTool(new String[] {"-lib", tmpdir});
|
||||
CompositeGrammar composite = new CompositeGrammar();
|
||||
|
@ -657,7 +660,7 @@ public class TestCompositeGrammars extends BaseTest {
|
|||
"a : A ;";
|
||||
writeFile(tmpdir, "/M.g", master);
|
||||
|
||||
org.antlr.test.ErrorQueue equeue = new org.antlr.test.ErrorQueue();
|
||||
ErrorQueue equeue = new ErrorQueue();
|
||||
ErrorManager.setErrorListener(equeue);
|
||||
Tool antlr = newTool(new String[] {"-lib", tmpdir});
|
||||
CompositeGrammar composite = new CompositeGrammar();
|
||||
|
@ -698,7 +701,7 @@ public class TestCompositeGrammars extends BaseTest {
|
|||
}
|
||||
|
||||
@Test public void testKeywordVSIDGivesNoWarning() throws Exception {
|
||||
org.antlr.test.ErrorQueue equeue = new org.antlr.test.ErrorQueue();
|
||||
ErrorQueue equeue = new ErrorQueue();
|
||||
ErrorManager.setErrorListener(equeue);
|
||||
String slave =
|
||||
"lexer grammar S;\n" +
|
||||
|
@ -721,7 +724,7 @@ public class TestCompositeGrammars extends BaseTest {
|
|||
}
|
||||
|
||||
@Test public void testWarningForUndefinedToken() throws Exception {
|
||||
org.antlr.test.ErrorQueue equeue = new org.antlr.test.ErrorQueue();
|
||||
ErrorQueue equeue = new ErrorQueue();
|
||||
ErrorManager.setErrorListener(equeue);
|
||||
String slave =
|
||||
"lexer grammar S;\n" +
|
||||
|
@ -748,7 +751,7 @@ public class TestCompositeGrammars extends BaseTest {
|
|||
|
||||
// Make sure that M can import S that imports T.
|
||||
@Test public void test3LevelImport() throws Exception {
|
||||
org.antlr.test.ErrorQueue equeue = new org.antlr.test.ErrorQueue();
|
||||
ErrorQueue equeue = new ErrorQueue();
|
||||
ErrorManager.setErrorListener(equeue);
|
||||
String slave =
|
||||
"parser grammar T;\n" +
|
||||
|
@ -794,7 +797,7 @@ public class TestCompositeGrammars extends BaseTest {
|
|||
}
|
||||
|
||||
@Test public void testBigTreeOfImports() throws Exception {
|
||||
org.antlr.test.ErrorQueue equeue = new org.antlr.test.ErrorQueue();
|
||||
ErrorQueue equeue = new ErrorQueue();
|
||||
ErrorManager.setErrorListener(equeue);
|
||||
String slave =
|
||||
"parser grammar T;\n" +
|
||||
|
@ -857,7 +860,7 @@ public class TestCompositeGrammars extends BaseTest {
|
|||
}
|
||||
|
||||
@Test public void testRulesVisibleThroughMultilevelImport() throws Exception {
|
||||
org.antlr.test.ErrorQueue equeue = new org.antlr.test.ErrorQueue();
|
||||
ErrorQueue equeue = new ErrorQueue();
|
||||
ErrorManager.setErrorListener(equeue);
|
||||
String slave =
|
||||
"parser grammar T;\n" +
|
||||
|
@ -899,7 +902,7 @@ public class TestCompositeGrammars extends BaseTest {
|
|||
|
||||
@Test public void testNestedComposite() throws Exception {
|
||||
// Wasn't compiling. http://www.antlr.org/jira/browse/ANTLR-438
|
||||
org.antlr.test.ErrorQueue equeue = new org.antlr.test.ErrorQueue();
|
||||
ErrorQueue equeue = new ErrorQueue();
|
||||
ErrorManager.setErrorListener(equeue);
|
||||
String gstr =
|
||||
"lexer grammar L;\n" +
|
||||
|
@ -955,7 +958,7 @@ public class TestCompositeGrammars extends BaseTest {
|
|||
boolean expecting = true; // should be ok
|
||||
assertEquals(expecting, ok);
|
||||
}
|
||||
|
||||
*/
|
||||
@Test public void testHeadersPropogatedCorrectlyToImportedGrammars() throws Exception {
|
||||
String slave =
|
||||
"parser grammar S;\n" +
|
||||
|
@ -974,5 +977,4 @@ public class TestCompositeGrammars extends BaseTest {
|
|||
boolean expecting = true; // should be ok
|
||||
assertEquals(expecting, ok);
|
||||
}
|
||||
*/
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue