got invalid import error done
[git-p4: depot-paths = "//depot/code/antlr4/main/": change = 6657]
This commit is contained in:
parent
f3fb19b921
commit
b50c813cc5
|
@ -45,7 +45,7 @@ problem reading token vocabulary file <arg>: <exception>
|
||||||
>>
|
>>
|
||||||
DIR_NOT_FOUND(arg,exception,stackTrace) ::= "directory not found: <arg>"
|
DIR_NOT_FOUND(arg,exception,stackTrace) ::= "directory not found: <arg>"
|
||||||
OUTPUT_DIR_IS_FILE(arg,arg2) ::= "output directory is a file: <arg>"
|
OUTPUT_DIR_IS_FILE(arg,arg2) ::= "output directory is a file: <arg>"
|
||||||
CANNOT_OPEN_FILE(arg,exception) ::= "cannot find or open file: <arg><if(exception)>; reason: <exception><endif>"
|
CANNOT_OPEN_FILE(arg,arg2) ::= "cannot find or open file: <arg><if(arg2)>; reason: <arg2><endif>"
|
||||||
CIRCULAR_DEPENDENCY() ::= "your grammars contain a circular dependency and cannot be sorted into a valid build order."
|
CIRCULAR_DEPENDENCY() ::= "your grammars contain a circular dependency and cannot be sorted into a valid build order."
|
||||||
|
|
||||||
INTERNAL_ERROR(arg,arg2,exception,stackTrace) ::= <<
|
INTERNAL_ERROR(arg,arg2,exception,stackTrace) ::= <<
|
||||||
|
@ -222,15 +222,16 @@ TOKEN_ALIAS_REASSIGNMENT(arg,arg2) ::=
|
||||||
TOKEN_VOCAB_IN_DELEGATE(arg,arg2) ::=
|
TOKEN_VOCAB_IN_DELEGATE(arg,arg2) ::=
|
||||||
"tokenVocab option ignored in imported grammar <arg>"
|
"tokenVocab option ignored in imported grammar <arg>"
|
||||||
INVALID_IMPORT(arg,arg2) ::=
|
INVALID_IMPORT(arg,arg2) ::=
|
||||||
"<arg.grammarTypeString> grammar <arg.name> cannot import <arg2.grammarTypeString> grammar <arg2.name>"
|
"<arg.typeString> grammar <arg.name> cannot import <arg2.typeString> grammar <arg2.name>"
|
||||||
IMPORTED_TOKENS_RULE_EMPTY(arg,arg2) ::=
|
IMPORTED_TOKENS_RULE_EMPTY(arg,arg2) ::=
|
||||||
"no lexer rules contributed to <arg> from imported grammar <arg2>"
|
"no lexer rules contributed to <arg> from imported grammar <arg2>"
|
||||||
IMPORT_NAME_CLASH(arg,arg2) ::=
|
IMPORT_NAME_CLASH(arg,arg2) ::=
|
||||||
"combined grammar <arg.name> and imported <arg2.grammarTypeString> grammar <arg2.name> both generate <arg2.recognizerName>; import ignored"
|
"<arg.typeString> grammar <arg.name> and imported <arg2.typeString> grammar <arg2.name> both generate <arg2.recognizerName>"
|
||||||
AST_OP_IN_ALT_WITH_REWRITE(arg,arg2) ::=
|
AST_OP_IN_ALT_WITH_REWRITE(arg,arg2) ::=
|
||||||
"rule <arg> alt <arg2> uses rewrite syntax and also an AST operator"
|
"rule <arg> alt <arg2> uses rewrite syntax and also an AST operator"
|
||||||
WILDCARD_AS_ROOT(arg) ::= "Wildcard invalid as root; wildcard can itself be a tree"
|
WILDCARD_AS_ROOT(arg) ::= "Wildcard invalid as root; wildcard can itself be a tree"
|
||||||
CONFLICTING_OPTION_IN_TREE_FILTER(arg,arg2) ::= "option <arg>=<arg2> conflicts with tree grammar filter mode"
|
CONFLICTING_OPTION_IN_TREE_FILTER(arg,arg2) ::=
|
||||||
|
"option <arg>=<arg2> conflicts with tree grammar filter mode"
|
||||||
|
|
||||||
// GRAMMAR WARNINGS
|
// GRAMMAR WARNINGS
|
||||||
|
|
||||||
|
|
|
@ -300,7 +300,7 @@ public class Tool {
|
||||||
|
|
||||||
p.setTreeAdaptor(new GrammarASTAdaptor(in));
|
p.setTreeAdaptor(new GrammarASTAdaptor(in));
|
||||||
ParserRuleReturnScope r = p.grammarSpec();
|
ParserRuleReturnScope r = p.grammarSpec();
|
||||||
GrammarAST t = (GrammarAST) r.getTree();
|
GrammarRootAST t = (GrammarRootAST) r.getTree();
|
||||||
if ( internalOption_PrintGrammarTree ) System.out.println(t.toStringTree());
|
if ( internalOption_PrintGrammarTree ) System.out.println(t.toStringTree());
|
||||||
g = new Grammar(this, t);
|
g = new Grammar(this, t);
|
||||||
g.fileName = fileName;
|
g.fileName = fileName;
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
package org.antlr.v4.semantics;
|
package org.antlr.v4.semantics;
|
||||||
|
|
||||||
|
import org.antlr.misc.MultiMap;
|
||||||
import org.antlr.runtime.Token;
|
import org.antlr.runtime.Token;
|
||||||
import org.antlr.v4.misc.Utils;
|
import org.antlr.v4.misc.Utils;
|
||||||
import org.antlr.v4.parse.ANTLRParser;
|
import org.antlr.v4.parse.ANTLRParser;
|
||||||
|
@ -64,12 +65,32 @@ public class BasicSemanticChecks {
|
||||||
public static final Set legalTokenOptions =
|
public static final Set legalTokenOptions =
|
||||||
new HashSet() {
|
new HashSet() {
|
||||||
{
|
{
|
||||||
add(defaultTokenOption);
|
add(TerminalAST.defaultTokenOption);
|
||||||
add("associativity");
|
add("associativity");
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
public static final String defaultTokenOption = "node";
|
/** Set of valid imports. E.g., can only import a tree parser into
|
||||||
|
* another tree parser. Maps delegate to set of delegator grammar types.
|
||||||
|
* validDelegations.get(LEXER) gives list of the kinds of delegators
|
||||||
|
* that can import lexers.
|
||||||
|
*/
|
||||||
|
public static MultiMap<Integer,Integer> validImportTypes =
|
||||||
|
new MultiMap<Integer,Integer>() {
|
||||||
|
{
|
||||||
|
map(ANTLRParser.LEXER, ANTLRParser.LEXER);
|
||||||
|
map(ANTLRParser.LEXER, ANTLRParser.PARSER);
|
||||||
|
map(ANTLRParser.LEXER, ANTLRParser.GRAMMAR);
|
||||||
|
|
||||||
|
map(ANTLRParser.PARSER, ANTLRParser.PARSER);
|
||||||
|
map(ANTLRParser.PARSER, ANTLRParser.GRAMMAR);
|
||||||
|
|
||||||
|
map(ANTLRParser.TREE, ANTLRParser.TREE);
|
||||||
|
|
||||||
|
// TODO: allow COMBINED
|
||||||
|
// map(ANTLRParser.GRAMMAR, ANTLRParser.GRAMMAR);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
// TODO: track errors?
|
// TODO: track errors?
|
||||||
|
|
||||||
|
@ -288,6 +309,7 @@ public class BasicSemanticChecks {
|
||||||
protected static void checkTreeFilterOptions(int gtype, GrammarRootAST root,
|
protected static void checkTreeFilterOptions(int gtype, GrammarRootAST root,
|
||||||
Map<String, String> options)
|
Map<String, String> options)
|
||||||
{
|
{
|
||||||
|
if ( options==null ) return;
|
||||||
String fileName = root.token.getInputStream().getSourceName();
|
String fileName = root.token.getInputStream().getSourceName();
|
||||||
String filter = options.get("filter");
|
String filter = options.get("filter");
|
||||||
if ( gtype==ANTLRParser.TREE && filter!=null && filter.equals("true") ) {
|
if ( gtype==ANTLRParser.TREE && filter!=null && filter.equals("true") ) {
|
||||||
|
@ -327,7 +349,30 @@ public class BasicSemanticChecks {
|
||||||
wild);
|
wild);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
protected static void checkImport(Grammar g, Token importID) {
|
||||||
|
Grammar delegate = g.getImportedGrammar(importID.getText());
|
||||||
|
if ( delegate==null ) return;
|
||||||
|
List<Integer> validDelegators = validImportTypes.get(delegate.getType());
|
||||||
|
if ( validDelegators!=null && !validDelegators.contains(g.getType()) ) {
|
||||||
|
ErrorManager.grammarError(ErrorType.INVALID_IMPORT,
|
||||||
|
g.fileName,
|
||||||
|
importID,
|
||||||
|
g, delegate);
|
||||||
|
}
|
||||||
|
if ( g.getType()==ANTLRParser.GRAMMAR &&
|
||||||
|
(delegate.name.equals(g.name+Grammar.getGrammarTypeToFileNameSuffix(ANTLRParser.LEXER))||
|
||||||
|
delegate.name.equals(g.name+Grammar.getGrammarTypeToFileNameSuffix(ANTLRParser.PARSER))) )
|
||||||
|
{
|
||||||
|
ErrorManager.grammarError(ErrorType.IMPORT_NAME_CLASH,
|
||||||
|
g.fileName,
|
||||||
|
importID,
|
||||||
|
g, delegate);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
protected static void checkFOO(int gtype, Token ID) {
|
protected static void checkFOO(int gtype, Token ID) {
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -73,13 +73,11 @@ import org.antlr.v4.tool.*;
|
||||||
// that is, rewrite tree? maybe all passes are filters until code gen, which needs
|
// that is, rewrite tree? maybe all passes are filters until code gen, which needs
|
||||||
// tree grammar. 'course we won't try codegen if errors.
|
// tree grammar. 'course we won't try codegen if errors.
|
||||||
public String name;
|
public String name;
|
||||||
public String fileName;
|
|
||||||
GrammarASTWithOptions root;
|
GrammarASTWithOptions root;
|
||||||
protected int gtype;
|
Grammar g; // which grammar are we checking
|
||||||
//Grammar g; // which grammar are we checking
|
public BasicSemanticTriggers(TreeNodeStream input, Grammar g) {
|
||||||
public BasicSemanticTriggers(TreeNodeStream input, String fileName) {
|
|
||||||
this(input);
|
this(input);
|
||||||
this.fileName = fileName;
|
this.g = g;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -102,18 +100,19 @@ bottomup // do these "inside to outside" of expressions.
|
||||||
;
|
;
|
||||||
|
|
||||||
grammarSpec
|
grammarSpec
|
||||||
: ^(GRAMMAR ID DOC_COMMENT? prequelConstructs ^(RULES .*))
|
: ^( GRAMMAR ID DOC_COMMENT?
|
||||||
{
|
{
|
||||||
name = $ID.text;
|
name = $ID.text;
|
||||||
BasicSemanticChecks.checkGrammarName($ID.token);
|
BasicSemanticChecks.checkGrammarName($ID.token);
|
||||||
gtype = ((GrammarRootAST)$start).grammarType;
|
|
||||||
root = (GrammarRootAST)$start;
|
root = (GrammarRootAST)$start;
|
||||||
}
|
}
|
||||||
|
prequelConstructs ^(RULES .*)
|
||||||
|
)
|
||||||
;
|
;
|
||||||
|
|
||||||
checkGrammarOptions // when we get back to root
|
checkGrammarOptions // when we get back to root
|
||||||
: GRAMMAR
|
: GRAMMAR
|
||||||
{BasicSemanticChecks.checkTreeFilterOptions(gtype, (GrammarRootAST)$GRAMMAR,
|
{BasicSemanticChecks.checkTreeFilterOptions(g.getType(), (GrammarRootAST)$GRAMMAR,
|
||||||
root.getOptions());}
|
root.getOptions());}
|
||||||
;
|
;
|
||||||
|
|
||||||
|
@ -125,21 +124,28 @@ grammarType
|
||||||
*/
|
*/
|
||||||
|
|
||||||
prequelConstructs
|
prequelConstructs
|
||||||
: ( ^(o+=OPTIONS .*)
|
: ( ^(o+=OPTIONS .+)
|
||||||
| ^(i+=IMPORT .*)
|
| ^(i+=IMPORT delegateGrammar+)
|
||||||
| ^(t+=TOKENS .*)
|
| ^(t+=TOKENS .+)
|
||||||
)*
|
)*
|
||||||
{BasicSemanticChecks.checkNumPrequels(gtype, $o, $i, $t);}
|
{BasicSemanticChecks.checkNumPrequels(g.getType(), $o, $i, $t);}
|
||||||
;
|
;
|
||||||
|
|
||||||
rules : RULES {BasicSemanticChecks.checkNumRules(gtype, fileName, $RULES);} ;
|
delegateGrammar
|
||||||
|
: ( ^(ASSIGN ID id=ID)
|
||||||
|
| id=ID
|
||||||
|
)
|
||||||
|
{BasicSemanticChecks.checkImport(g, $id.token);}
|
||||||
|
;
|
||||||
|
|
||||||
|
rules : RULES {BasicSemanticChecks.checkNumRules(g.getType(), g.fileName, $RULES);} ;
|
||||||
|
|
||||||
option // TODO: put in grammar, or rule, or block
|
option // TODO: put in grammar, or rule, or block
|
||||||
: {inContext("OPTIONS")}? ^(ASSIGN o=ID optionValue)
|
: {inContext("OPTIONS")}? ^(ASSIGN o=ID optionValue)
|
||||||
{
|
{
|
||||||
GrammarAST parent = (GrammarAST)$start.getParent(); // OPTION
|
GrammarAST parent = (GrammarAST)$start.getParent(); // OPTION
|
||||||
GrammarAST parentWithOptionKind = (GrammarAST)parent.getParent();
|
GrammarAST parentWithOptionKind = (GrammarAST)parent.getParent();
|
||||||
boolean ok = BasicSemanticChecks.checkOptions(gtype, parentWithOptionKind,
|
boolean ok = BasicSemanticChecks.checkOptions(g.getType(), parentWithOptionKind,
|
||||||
$ID.token, $optionValue.v);
|
$ID.token, $optionValue.v);
|
||||||
// store options into XXX_GRAMMAR, RULE, BLOCK nodes
|
// store options into XXX_GRAMMAR, RULE, BLOCK nodes
|
||||||
if ( ok ) {
|
if ( ok ) {
|
||||||
|
@ -156,21 +162,21 @@ optionValue returns [String v]
|
||||||
| STAR
|
| STAR
|
||||||
;
|
;
|
||||||
|
|
||||||
rule: ^( RULE r=ID .*) {BasicSemanticChecks.checkInvalidRuleDef(gtype, $r.token);}
|
rule: ^( RULE r=ID .*) {BasicSemanticChecks.checkInvalidRuleDef(g.getType(), $r.token);}
|
||||||
;
|
;
|
||||||
|
|
||||||
ruleref
|
ruleref
|
||||||
: RULE_REF {BasicSemanticChecks.checkInvalidRuleRef(gtype, $RULE_REF.token);}
|
: RULE_REF {BasicSemanticChecks.checkInvalidRuleRef(g.getType(), $RULE_REF.token);}
|
||||||
;
|
;
|
||||||
|
|
||||||
tokenAlias
|
tokenAlias
|
||||||
: {inContext("TOKENS")}? ^(ASSIGN ID STRING_LITERAL)
|
: {inContext("TOKENS")}? ^(ASSIGN ID STRING_LITERAL)
|
||||||
{BasicSemanticChecks.checkTokenAlias(gtype, $ID.token);}
|
{BasicSemanticChecks.checkTokenAlias(g.getType(), $ID.token);}
|
||||||
;
|
;
|
||||||
|
|
||||||
tokenRefWithArgs
|
tokenRefWithArgs
|
||||||
: ^(TOKEN_REF ARG_ACTION)
|
: ^(TOKEN_REF ARG_ACTION)
|
||||||
{BasicSemanticChecks.checkTokenArgs(gtype, $TOKEN_REF.token);}
|
{BasicSemanticChecks.checkTokenArgs(g.getType(), $TOKEN_REF.token);}
|
||||||
;
|
;
|
||||||
|
|
||||||
elementOption
|
elementOption
|
||||||
|
@ -181,7 +187,7 @@ elementOption
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
{
|
{
|
||||||
boolean ok = BasicSemanticChecks.checkTokenOptions(gtype, (GrammarAST)$o.getParent(),
|
boolean ok = BasicSemanticChecks.checkTokenOptions(g.getType(), (GrammarAST)$o.getParent(),
|
||||||
$o.token, $value.text);
|
$o.token, $value.text);
|
||||||
if ( ok ) {
|
if ( ok ) {
|
||||||
if ( value!=null ) {
|
if ( value!=null ) {
|
||||||
|
@ -204,7 +210,7 @@ multiElementAltInTreeGrammar
|
||||||
{
|
{
|
||||||
int altNum = $start.getParent().getChildIndex() + 1; // alts are 1..n
|
int altNum = $start.getParent().getChildIndex() + 1; // alts are 1..n
|
||||||
GrammarAST firstNode = (GrammarAST)$start.getChild(0);
|
GrammarAST firstNode = (GrammarAST)$start.getChild(0);
|
||||||
BasicSemanticChecks.checkRewriteForMultiRootAltInTreeGrammar(gtype,root.getOptions(),
|
BasicSemanticChecks.checkRewriteForMultiRootAltInTreeGrammar(g.getType(),root.getOptions(),
|
||||||
firstNode.token,
|
firstNode.token,
|
||||||
altNum);
|
altNum);
|
||||||
}
|
}
|
||||||
|
@ -212,10 +218,11 @@ multiElementAltInTreeGrammar
|
||||||
|
|
||||||
// Check stuff like (^ A) (! r)
|
// Check stuff like (^ A) (! r)
|
||||||
astOps
|
astOps
|
||||||
: ^(ROOT el=.) {BasicSemanticChecks.checkASTOps(gtype, root.getOptions(), $start, $el);}
|
: ^(ROOT el=.) {BasicSemanticChecks.checkASTOps(g.getType(), root.getOptions(), $start, $el);}
|
||||||
| ^(BANG el=.) {BasicSemanticChecks.checkASTOps(gtype, root.getOptions(), $start, $el);}
|
| ^(BANG el=.) {BasicSemanticChecks.checkASTOps(g.getType(), root.getOptions(), $start, $el);}
|
||||||
;
|
;
|
||||||
|
|
||||||
wildcardRoot
|
wildcardRoot
|
||||||
: ^(TREE_BEGIN WILDCARD .*) {BasicSemanticChecks.checkWildcardRoot(gtype, $WILDCARD.token);}
|
: ^(TREE_BEGIN WILDCARD .*)
|
||||||
|
{BasicSemanticChecks.checkWildcardRoot(g.getType(), $WILDCARD.token);}
|
||||||
;
|
;
|
File diff suppressed because it is too large
Load Diff
|
@ -32,9 +32,17 @@ public class SemanticsPipeline {
|
||||||
|
|
||||||
// DO BASIC / EASY SEMANTIC CHECKS
|
// DO BASIC / EASY SEMANTIC CHECKS
|
||||||
nodes.reset();
|
nodes.reset();
|
||||||
BasicSemanticTriggers basics = new BasicSemanticTriggers(nodes,g.fileName);
|
BasicSemanticTriggers basics = new BasicSemanticTriggers(nodes,g);
|
||||||
basics.downup(g.ast);
|
basics.downup(g.ast);
|
||||||
|
|
||||||
|
// NO DO DELEGATES
|
||||||
|
if ( g.getImportedGrammars()!=null ) {
|
||||||
|
for (Grammar d : g.getImportedGrammars()) {
|
||||||
|
process(d);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* dump options
|
||||||
TreeVisitor v = new TreeVisitor(adaptor);
|
TreeVisitor v = new TreeVisitor(adaptor);
|
||||||
v.visit(g.ast,
|
v.visit(g.ast,
|
||||||
new TreeVisitorAction() {
|
new TreeVisitorAction() {
|
||||||
|
@ -49,5 +57,6 @@ public class SemanticsPipeline {
|
||||||
}
|
}
|
||||||
public Object post(Object t) { return t; }
|
public Object post(Object t) { return t; }
|
||||||
});
|
});
|
||||||
|
*/
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
package org.antlr.v4.tool;
|
package org.antlr.v4.tool;
|
||||||
|
|
||||||
import org.antlr.runtime.*;
|
import org.antlr.runtime.*;
|
||||||
import org.antlr.runtime.tree.CommonTree;
|
|
||||||
import org.antlr.v4.Tool;
|
import org.antlr.v4.Tool;
|
||||||
import org.antlr.v4.parse.ANTLRLexer;
|
import org.antlr.v4.parse.ANTLRLexer;
|
||||||
import org.antlr.v4.parse.ANTLRParser;
|
import org.antlr.v4.parse.ANTLRParser;
|
||||||
|
@ -19,19 +18,26 @@ public class Grammar {
|
||||||
|
|
||||||
public Tool tool;
|
public Tool tool;
|
||||||
public String name;
|
public String name;
|
||||||
public GrammarAST ast;
|
public GrammarRootAST ast;
|
||||||
public String text; // testing only
|
public String text; // testing only
|
||||||
public String fileName;
|
public String fileName;
|
||||||
|
|
||||||
|
/** Was this created from a COMBINED grammar? */
|
||||||
|
public boolean implicitLexer;
|
||||||
|
|
||||||
|
/** If we're imported, who imported us? If null, implies grammar is root */
|
||||||
|
public Grammar parent;
|
||||||
protected List<Grammar> importedGrammars;
|
protected List<Grammar> importedGrammars;
|
||||||
protected Map<String, Rule> rules = new HashMap<String, Rule>();
|
protected Map<String, Rule> rules = new HashMap<String, Rule>();
|
||||||
|
|
||||||
/** A list of options specified at the grammar level such as language=Java. */
|
/** A list of options specified at the grammar level such as language=Java. */
|
||||||
protected Map<String, String> options;
|
protected Map<String, String> options;
|
||||||
|
|
||||||
public Grammar(Tool tool, GrammarAST ast) {
|
public Grammar(Tool tool, GrammarRootAST ast) {
|
||||||
|
if ( ast==null ) throw new IllegalArgumentException("can't pass null tree");
|
||||||
this.tool = tool;
|
this.tool = tool;
|
||||||
this.ast = ast;
|
this.ast = ast;
|
||||||
|
this.name = ((GrammarAST)ast.getChild(0)).getText();
|
||||||
}
|
}
|
||||||
|
|
||||||
/** For testing */
|
/** For testing */
|
||||||
|
@ -42,16 +48,16 @@ public class Grammar {
|
||||||
CommonTokenStream tokens = new CommonTokenStream(lexer);
|
CommonTokenStream tokens = new CommonTokenStream(lexer);
|
||||||
ANTLRParser p = new ANTLRParser(tokens);
|
ANTLRParser p = new ANTLRParser(tokens);
|
||||||
ParserRuleReturnScope r = p.grammarSpec();
|
ParserRuleReturnScope r = p.grammarSpec();
|
||||||
ast = (GrammarAST) r.getTree();
|
ast = (GrammarRootAST) r.getTree();
|
||||||
System.out.println(ast.toStringTree());
|
System.out.println(ast.toStringTree());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void loadImportedGrammars() {
|
public void loadImportedGrammars() {
|
||||||
CommonTree i = (CommonTree)ast.getFirstChildWithType(ANTLRParser.IMPORT);
|
GrammarAST i = (GrammarAST)ast.getFirstChildWithType(ANTLRParser.IMPORT);
|
||||||
if ( i==null ) return;
|
if ( i==null ) return;
|
||||||
importedGrammars = new ArrayList<Grammar>();
|
importedGrammars = new ArrayList<Grammar>();
|
||||||
for (Object c : i.getChildren()) {
|
for (Object c : i.getChildren()) {
|
||||||
CommonTree t = (CommonTree)c;
|
GrammarAST t = (GrammarAST)c;
|
||||||
String importedGrammarName = null;
|
String importedGrammarName = null;
|
||||||
if ( t.getType()==ANTLRParser.ASSIGN ) {
|
if ( t.getType()==ANTLRParser.ASSIGN ) {
|
||||||
importedGrammarName = t.getChild(1).getText();
|
importedGrammarName = t.getChild(1).getText();
|
||||||
|
@ -63,6 +69,7 @@ public class Grammar {
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
Grammar g = tool.load(importedGrammarName+".g");
|
Grammar g = tool.load(importedGrammarName+".g");
|
||||||
|
g.parent = this;
|
||||||
importedGrammars.add(g);
|
importedGrammars.add(g);
|
||||||
}
|
}
|
||||||
catch (Exception e) {
|
catch (Exception e) {
|
||||||
|
@ -74,4 +81,115 @@ public class Grammar {
|
||||||
public Rule getRule(String name) {
|
public Rule getRule(String name) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/** Get list of all delegates from all grammars in the delegate subtree of g.
|
||||||
|
* The grammars are in delegation tree preorder. Don't include ourselves
|
||||||
|
* in list as we're not a delegate of ourselves.
|
||||||
|
*/
|
||||||
|
public List<Grammar> getDelegates() {
|
||||||
|
if ( importedGrammars==null ) return null;
|
||||||
|
List<Grammar> delegates = new ArrayList<Grammar>();
|
||||||
|
for (int i = 0; i < importedGrammars.size(); i++) {
|
||||||
|
Grammar d = importedGrammars.get(i);
|
||||||
|
delegates.add(d);
|
||||||
|
List<Grammar> ds = d.getDelegates();
|
||||||
|
if ( ds!=null ) delegates.addAll( ds );
|
||||||
|
}
|
||||||
|
return delegates;
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<Grammar> getImportedGrammars() { return importedGrammars; }
|
||||||
|
|
||||||
|
/** Get delegates below direct delegates of g
|
||||||
|
public List<Grammar> getIndirectDelegates(Grammar g) {
|
||||||
|
List<Grammar> direct = getDirectDelegates(g);
|
||||||
|
List<Grammar> delegates = getDelegates(g);
|
||||||
|
delegates.removeAll(direct);
|
||||||
|
return delegates;
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
|
||||||
|
/** Return list of delegate grammars from root down to our parent.
|
||||||
|
* Order is [root, ..., this.parent]. (us not included).
|
||||||
|
*/
|
||||||
|
public List<Grammar> getDelegationAncestors() {
|
||||||
|
Grammar root = getOutermostGrammar();
|
||||||
|
if ( this==root ) return null;
|
||||||
|
List<Grammar> grammars = new ArrayList<Grammar>();
|
||||||
|
// walk backwards to root, collecting grammars
|
||||||
|
Grammar p = this.parent;
|
||||||
|
while ( p!=null ) {
|
||||||
|
grammars.add(0, p); // add to head so in order later
|
||||||
|
p = p.parent;
|
||||||
|
}
|
||||||
|
return grammars;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Return the grammar that imported us and our parents. Return this
|
||||||
|
* if we're root.
|
||||||
|
*/
|
||||||
|
public Grammar getOutermostGrammar() {
|
||||||
|
if ( parent==null ) return this;
|
||||||
|
return parent.getOutermostGrammar();
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Get the name of the generated recognizer; may or may not be same
|
||||||
|
* as grammar name.
|
||||||
|
* Recognizer is TParser and TLexer from T if combined, else
|
||||||
|
* just use T regardless of grammar type.
|
||||||
|
*/
|
||||||
|
public String getRecognizerName() {
|
||||||
|
String suffix = "";
|
||||||
|
List<Grammar> grammarsFromRootToMe = getOutermostGrammar().getDelegationAncestors();
|
||||||
|
System.out.println("grammarsFromRootToMe for "+name+"="+grammarsFromRootToMe);
|
||||||
|
String qualifiedName = name;
|
||||||
|
if ( grammarsFromRootToMe!=null ) {
|
||||||
|
StringBuffer buf = new StringBuffer();
|
||||||
|
for (Grammar g : grammarsFromRootToMe) {
|
||||||
|
buf.append(g.name);
|
||||||
|
buf.append('_');
|
||||||
|
}
|
||||||
|
buf.append(name);
|
||||||
|
qualifiedName = buf.toString();
|
||||||
|
}
|
||||||
|
if ( getType()==ANTLRParser.GRAMMAR ||
|
||||||
|
(getType()==ANTLRParser.LEXER && implicitLexer) )
|
||||||
|
{
|
||||||
|
suffix = Grammar.getGrammarTypeToFileNameSuffix(getType());
|
||||||
|
}
|
||||||
|
return qualifiedName+suffix;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Return grammar directly imported by this grammar */
|
||||||
|
public Grammar getImportedGrammar(String name) {
|
||||||
|
for (int i = 0; i < importedGrammars.size(); i++) {
|
||||||
|
Grammar g = importedGrammars.get(i);
|
||||||
|
if ( g.name.equals(name) ) return g;
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
public int getType() {
|
||||||
|
if ( ast!=null ) return ast.grammarType;
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getTypeString() {
|
||||||
|
if ( ast==null ) return null;
|
||||||
|
if ( getType()==ANTLRParser.GRAMMAR ) return "combined";
|
||||||
|
return ANTLRParser.tokenNames[getType()].toLowerCase();
|
||||||
|
}
|
||||||
|
|
||||||
|
public static String getGrammarTypeToFileNameSuffix(int type) {
|
||||||
|
switch ( type ) {
|
||||||
|
case ANTLRParser.LEXER : return "Lexer";
|
||||||
|
case ANTLRParser.PARSER : return "Parser";
|
||||||
|
case ANTLRParser.TREE : return "";
|
||||||
|
// if combined grammar, gen Parser and Lexer will be done later
|
||||||
|
case ANTLRParser.GRAMMAR : return "Parser";
|
||||||
|
default :
|
||||||
|
return "<invalid>";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
Loading…
Reference in New Issue