got rule props in for current rule; fixed yet another freakin bug in implicit lexer extraction.
[git-p4: depot-paths = "//depot/code/antlr4/main/": change = 8895]
This commit is contained in:
parent
6dab71e160
commit
36f3590f6c
|
@ -341,6 +341,12 @@ RulePropertyRef_tree(r) ::= "(_localctx.<r.label>!=null?((<file.ASTLabelType>)_
|
||||||
RulePropertyRef_text(r) ::= "(_localctx.<r.label>!=null?((TokenStream)input).toString(_localctx.<r.label>.start,_localctx.<r.label>.stop):null)"
|
RulePropertyRef_text(r) ::= "(_localctx.<r.label>!=null?((TokenStream)input).toString(_localctx.<r.label>.start,_localctx.<r.label>.stop):null)"
|
||||||
RulePropertyRef_st(r) ::= "(_localctx.<r.label>!=null?_localctx.<r.label>.st:null)"
|
RulePropertyRef_st(r) ::= "(_localctx.<r.label>!=null?_localctx.<r.label>.st:null)"
|
||||||
|
|
||||||
|
ThisRulePropertyRef_start(r) ::= "_localctx.start"
|
||||||
|
ThisRulePropertyRef_stop(r) ::= "_localctx.stop"
|
||||||
|
ThisRulePropertyRef_tree(r) ::= "_localctx.tree"
|
||||||
|
ThisRulePropertyRef_text(r) ::= "((TokenStream)input).toString(_localctx.start, input.LT(-1))"
|
||||||
|
ThisRulePropertyRef_st(r) ::= "_localctx.st"
|
||||||
|
|
||||||
DynScopeRef(s) ::= "<s.scope>"
|
DynScopeRef(s) ::= "<s.scope>"
|
||||||
DynScopeAttrRef(s) ::= "<s.scope>.peek().<s.attr>"
|
DynScopeAttrRef(s) ::= "<s.scope>.peek().<s.attr>"
|
||||||
DynScopeAttrRef_negIndex(s, indexChunks) ::=
|
DynScopeAttrRef_negIndex(s, indexChunks) ::=
|
||||||
|
|
|
@ -225,7 +225,7 @@ public class Tool {
|
||||||
public void process(Grammar g) {
|
public void process(Grammar g) {
|
||||||
g.loadImportedGrammars();
|
g.loadImportedGrammars();
|
||||||
|
|
||||||
mergeImportedGrammars(g);
|
integrateImportedGrammars(g);
|
||||||
|
|
||||||
GrammarTransformPipeline transform = new GrammarTransformPipeline();
|
GrammarTransformPipeline transform = new GrammarTransformPipeline();
|
||||||
transform.process(g.ast);
|
transform.process(g.ast);
|
||||||
|
@ -374,7 +374,7 @@ public class Tool {
|
||||||
The goal is a complete combined grammar so we can ignore subordinate
|
The goal is a complete combined grammar so we can ignore subordinate
|
||||||
grammars.
|
grammars.
|
||||||
*/
|
*/
|
||||||
public void mergeImportedGrammars(Grammar rootGrammar) {
|
public void integrateImportedGrammars(Grammar rootGrammar) {
|
||||||
List<Grammar> imports = rootGrammar.getAllImportedGrammars();
|
List<Grammar> imports = rootGrammar.getAllImportedGrammars();
|
||||||
if ( imports==null ) return;
|
if ( imports==null ) return;
|
||||||
|
|
||||||
|
@ -401,6 +401,7 @@ public class Tool {
|
||||||
}
|
}
|
||||||
|
|
||||||
for (Grammar imp : imports) {
|
for (Grammar imp : imports) {
|
||||||
|
// COPY TOKENS
|
||||||
GrammarAST imp_tokensRoot = (GrammarAST)imp.ast.getFirstChildWithType(ANTLRParser.TOKENS);
|
GrammarAST imp_tokensRoot = (GrammarAST)imp.ast.getFirstChildWithType(ANTLRParser.TOKENS);
|
||||||
if ( imp_tokensRoot!=null ) {
|
if ( imp_tokensRoot!=null ) {
|
||||||
System.out.println("imported tokens: "+imp_tokensRoot.getChildren());
|
System.out.println("imported tokens: "+imp_tokensRoot.getChildren());
|
||||||
|
@ -417,6 +418,7 @@ public class Tool {
|
||||||
if ( actionRoots!=null ) all_actionRoots.addAll(actionRoots);
|
if ( actionRoots!=null ) all_actionRoots.addAll(actionRoots);
|
||||||
all_actionRoots.addAll(imp_actionRoots);
|
all_actionRoots.addAll(imp_actionRoots);
|
||||||
|
|
||||||
|
// COPY ACTIONS
|
||||||
if ( imp_actionRoots!=null ) {
|
if ( imp_actionRoots!=null ) {
|
||||||
DoubleKeyMap<String, String, GrammarAST> namedActions =
|
DoubleKeyMap<String, String, GrammarAST> namedActions =
|
||||||
new DoubleKeyMap<String, String, GrammarAST>();
|
new DoubleKeyMap<String, String, GrammarAST>();
|
||||||
|
@ -468,6 +470,7 @@ public class Tool {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// COPY RULES
|
||||||
List<GrammarAST> rules = imp.ast.getNodesWithType(ANTLRParser.RULE);
|
List<GrammarAST> rules = imp.ast.getNodesWithType(ANTLRParser.RULE);
|
||||||
if ( rules!=null ) {
|
if ( rules!=null ) {
|
||||||
for (GrammarAST r : rules) {
|
for (GrammarAST r : rules) {
|
||||||
|
@ -480,6 +483,12 @@ public class Tool {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
GrammarAST optionsRoot = (GrammarAST)imp.ast.getFirstChildWithType(ANTLRParser.OPTIONS);
|
||||||
|
if ( optionsRoot!=null ) {
|
||||||
|
errMgr.grammarError(ErrorType.OPTIONS_IN_DELEGATE,
|
||||||
|
optionsRoot.g.fileName, optionsRoot.token, imp.name);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
System.out.println("Grammar: "+rootGrammar.ast.toStringTree());
|
System.out.println("Grammar: "+rootGrammar.ast.toStringTree());
|
||||||
}
|
}
|
||||||
|
@ -560,7 +569,9 @@ public class Tool {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
int nLexicalRules = rulesWeMoved.size();
|
int nLexicalRules = rulesWeMoved.size();
|
||||||
rules.removeAll(rulesWeMoved);
|
for (GrammarAST r : rulesWeMoved) {
|
||||||
|
combinedRulesRoot.deleteChild( r );
|
||||||
|
}
|
||||||
|
|
||||||
// Will track 'if' from IF : 'if' ; rules to avoid defining new token for 'if'
|
// Will track 'if' from IF : 'if' ; rules to avoid defining new token for 'if'
|
||||||
Map<String,String> litAliases =
|
Map<String,String> litAliases =
|
||||||
|
@ -576,6 +587,7 @@ public class Tool {
|
||||||
Set<String> stringLiterals = combinedGrammar.getStringLiterals();
|
Set<String> stringLiterals = combinedGrammar.getStringLiterals();
|
||||||
// add strings from combined grammar (and imported grammars) into lexer
|
// add strings from combined grammar (and imported grammars) into lexer
|
||||||
// put them first as they are keywords; must resolve ambigs to these rules
|
// put them first as they are keywords; must resolve ambigs to these rules
|
||||||
|
// System.out.println("strings from parser: "+stringLiterals);
|
||||||
for (String lit : stringLiterals) {
|
for (String lit : stringLiterals) {
|
||||||
if ( litAliases!=null && litAliases.containsKey(lit) ) continue; // already has rule
|
if ( litAliases!=null && litAliases.containsKey(lit) ) continue; // already has rule
|
||||||
// create for each literal: (RULE <uniquename> (BLOCK (ALT <lit>))
|
// create for each literal: (RULE <uniquename> (BLOCK (ALT <lit>))
|
||||||
|
@ -591,10 +603,15 @@ public class Tool {
|
||||||
litRule.addChild(new TerminalAST(idToken));
|
litRule.addChild(new TerminalAST(idToken));
|
||||||
litRule.addChild(blk);
|
litRule.addChild(blk);
|
||||||
lexerRulesRoot.getChildren().add(0, litRule); // add first
|
lexerRulesRoot.getChildren().add(0, litRule); // add first
|
||||||
|
lexerRulesRoot.freshenParentAndChildIndexes(); // reset indexes and set litRule parent
|
||||||
}
|
}
|
||||||
|
|
||||||
lexerAST.freshenParentAndChildIndexesDeeply();
|
lexerAST.sanityCheckParentAndChildIndexes();
|
||||||
combinedAST.freshenParentAndChildIndexesDeeply();
|
combinedAST.sanityCheckParentAndChildIndexes();
|
||||||
|
// System.out.println(combinedAST.toTokenString());
|
||||||
|
|
||||||
|
// lexerAST.freshenParentAndChildIndexesDeeply();
|
||||||
|
// combinedAST.freshenParentAndChildIndexesDeeply();
|
||||||
|
|
||||||
System.out.println("after extract implicit lexer ="+combinedAST.toStringTree());
|
System.out.println("after extract implicit lexer ="+combinedAST.toStringTree());
|
||||||
System.out.println("lexer ="+lexerAST.toStringTree());
|
System.out.println("lexer ="+lexerAST.toStringTree());
|
||||||
|
|
|
@ -40,6 +40,14 @@ import java.util.*;
|
||||||
|
|
||||||
/** */
|
/** */
|
||||||
public class ActionTranslator implements ActionSplitterListener {
|
public class ActionTranslator implements ActionSplitterListener {
|
||||||
|
public static final Map<String, Class> thisRulePropToModelMap = new HashMap<String, Class>() {{
|
||||||
|
put("start", ThisRulePropertyRef_start.class);
|
||||||
|
put("stop", ThisRulePropertyRef_stop.class);
|
||||||
|
put("tree", ThisRulePropertyRef_tree.class);
|
||||||
|
put("text", ThisRulePropertyRef_text.class);
|
||||||
|
put("st", ThisRulePropertyRef_st.class);
|
||||||
|
}};
|
||||||
|
|
||||||
public static final Map<String, Class> rulePropToModelMap = new HashMap<String, Class>() {{
|
public static final Map<String, Class> rulePropToModelMap = new HashMap<String, Class>() {{
|
||||||
put("start", RulePropertyRef_start.class);
|
put("start", RulePropertyRef_start.class);
|
||||||
put("stop", RulePropertyRef_stop.class);
|
put("stop", RulePropertyRef_stop.class);
|
||||||
|
@ -123,7 +131,7 @@ public class ActionTranslator implements ActionSplitterListener {
|
||||||
switch ( a.dict.type ) {
|
switch ( a.dict.type ) {
|
||||||
case ARG: chunks.add(new ArgRef(x.getText())); break;
|
case ARG: chunks.add(new ArgRef(x.getText())); break;
|
||||||
case RET: chunks.add(new RetValueRef(x.getText())); break;
|
case RET: chunks.add(new RetValueRef(x.getText())); break;
|
||||||
// case PREDEFINED_RULE: chunks.add(new RetValueRef(x.getText())); break;
|
case PREDEFINED_RULE: chunks.add(getRulePropertyRef(x)); break;
|
||||||
// case PREDEFINED_TREE_RULE: chunks.add(new RetValueRef(x.getText())); break;
|
// case PREDEFINED_TREE_RULE: chunks.add(new RetValueRef(x.getText())); break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -175,7 +183,14 @@ public class ActionTranslator implements ActionSplitterListener {
|
||||||
else {
|
else {
|
||||||
chunks.add(new QRetValueRef(getRuleLabel(x.getText()), y.getText())); break;
|
chunks.add(new QRetValueRef(getRuleLabel(x.getText()), y.getText())); break;
|
||||||
}
|
}
|
||||||
case PREDEFINED_RULE: chunks.add(getRulePropertyRef(x, y)); break;
|
case PREDEFINED_RULE:
|
||||||
|
if ( a.dict == Rule.predefinedRulePropertiesDict ) {
|
||||||
|
chunks.add(getRulePropertyRef(y));
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
chunks.add(getRulePropertyRef(x, y));
|
||||||
|
}
|
||||||
|
break;
|
||||||
case TOKEN: chunks.add(getTokenPropertyRef(x, y)); break;
|
case TOKEN: chunks.add(getTokenPropertyRef(x, y)); break;
|
||||||
// case PREDEFINED_LEXER_RULE: chunks.add(new RetValueRef(x.getText())); break;
|
// case PREDEFINED_LEXER_RULE: chunks.add(new RetValueRef(x.getText())); break;
|
||||||
// case PREDEFINED_TREE_RULE: chunks.add(new RetValueRef(x.getText())); break;
|
// case PREDEFINED_TREE_RULE: chunks.add(new RetValueRef(x.getText())); break;
|
||||||
|
@ -259,9 +274,24 @@ public class ActionTranslator implements ActionSplitterListener {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
RulePropertyRef getRulePropertyRef(Token x, Token y) {
|
// $text
|
||||||
|
RulePropertyRef getRulePropertyRef(Token prop) {
|
||||||
try {
|
try {
|
||||||
Class c = rulePropToModelMap.get(y.getText());
|
Class c = thisRulePropToModelMap.get(prop.getText());
|
||||||
|
Constructor ctor = c.getConstructor(new Class[] {String.class});
|
||||||
|
RulePropertyRef ref =
|
||||||
|
(RulePropertyRef)ctor.newInstance(getRuleLabel(prop.getText()));
|
||||||
|
return ref;
|
||||||
|
}
|
||||||
|
catch (Exception e) {
|
||||||
|
factory.getGrammar().tool.errMgr.toolError(ErrorType.INTERNAL_ERROR, e);
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
RulePropertyRef getRulePropertyRef(Token x, Token prop) {
|
||||||
|
try {
|
||||||
|
Class c = rulePropToModelMap.get(prop.getText());
|
||||||
Constructor ctor = c.getConstructor(new Class[] {String.class});
|
Constructor ctor = c.getConstructor(new Class[] {String.class});
|
||||||
RulePropertyRef ref =
|
RulePropertyRef ref =
|
||||||
(RulePropertyRef)ctor.newInstance(getRuleLabel(x.getText()));
|
(RulePropertyRef)ctor.newInstance(getRuleLabel(x.getText()));
|
||||||
|
|
|
@ -0,0 +1,37 @@
|
||||||
|
/*
|
||||||
|
[The "BSD license"]
|
||||||
|
Copyright (c) 2011 Terence Parr
|
||||||
|
All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without
|
||||||
|
modification, are permitted provided that the following conditions
|
||||||
|
are met:
|
||||||
|
|
||||||
|
1. Redistributions of source code must retain the above copyright
|
||||||
|
notice, this list of conditions and the following disclaimer.
|
||||||
|
2. Redistributions in binary form must reproduce the above copyright
|
||||||
|
notice, this list of conditions and the following disclaimer in the
|
||||||
|
documentation and/or other materials provided with the distribution.
|
||||||
|
3. The name of the author may not be used to endorse or promote products
|
||||||
|
derived from this software without specific prior written permission.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
|
||||||
|
IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
|
||||||
|
OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
|
||||||
|
IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||||
|
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
|
||||||
|
NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||||
|
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||||
|
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
|
||||||
|
THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.antlr.v4.codegen.model.actions;
|
||||||
|
|
||||||
|
/** */
|
||||||
|
public class ThisRulePropertyRef_st extends RulePropertyRef {
|
||||||
|
public ThisRulePropertyRef_st(String label) {
|
||||||
|
super(label);
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,37 @@
|
||||||
|
/*
|
||||||
|
[The "BSD license"]
|
||||||
|
Copyright (c) 2011 Terence Parr
|
||||||
|
All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without
|
||||||
|
modification, are permitted provided that the following conditions
|
||||||
|
are met:
|
||||||
|
|
||||||
|
1. Redistributions of source code must retain the above copyright
|
||||||
|
notice, this list of conditions and the following disclaimer.
|
||||||
|
2. Redistributions in binary form must reproduce the above copyright
|
||||||
|
notice, this list of conditions and the following disclaimer in the
|
||||||
|
documentation and/or other materials provided with the distribution.
|
||||||
|
3. The name of the author may not be used to endorse or promote products
|
||||||
|
derived from this software without specific prior written permission.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
|
||||||
|
IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
|
||||||
|
OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
|
||||||
|
IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||||
|
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
|
||||||
|
NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||||
|
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||||
|
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
|
||||||
|
THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.antlr.v4.codegen.model.actions;
|
||||||
|
|
||||||
|
/** */
|
||||||
|
public class ThisRulePropertyRef_start extends RulePropertyRef {
|
||||||
|
public ThisRulePropertyRef_start(String label) {
|
||||||
|
super(label);
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,37 @@
|
||||||
|
/*
|
||||||
|
[The "BSD license"]
|
||||||
|
Copyright (c) 2011 Terence Parr
|
||||||
|
All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without
|
||||||
|
modification, are permitted provided that the following conditions
|
||||||
|
are met:
|
||||||
|
|
||||||
|
1. Redistributions of source code must retain the above copyright
|
||||||
|
notice, this list of conditions and the following disclaimer.
|
||||||
|
2. Redistributions in binary form must reproduce the above copyright
|
||||||
|
notice, this list of conditions and the following disclaimer in the
|
||||||
|
documentation and/or other materials provided with the distribution.
|
||||||
|
3. The name of the author may not be used to endorse or promote products
|
||||||
|
derived from this software without specific prior written permission.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
|
||||||
|
IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
|
||||||
|
OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
|
||||||
|
IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||||
|
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
|
||||||
|
NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||||
|
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||||
|
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
|
||||||
|
THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.antlr.v4.codegen.model.actions;
|
||||||
|
|
||||||
|
/** */
|
||||||
|
public class ThisRulePropertyRef_stop extends RulePropertyRef {
|
||||||
|
public ThisRulePropertyRef_stop(String label) {
|
||||||
|
super(label);
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,37 @@
|
||||||
|
/*
|
||||||
|
[The "BSD license"]
|
||||||
|
Copyright (c) 2011 Terence Parr
|
||||||
|
All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without
|
||||||
|
modification, are permitted provided that the following conditions
|
||||||
|
are met:
|
||||||
|
|
||||||
|
1. Redistributions of source code must retain the above copyright
|
||||||
|
notice, this list of conditions and the following disclaimer.
|
||||||
|
2. Redistributions in binary form must reproduce the above copyright
|
||||||
|
notice, this list of conditions and the following disclaimer in the
|
||||||
|
documentation and/or other materials provided with the distribution.
|
||||||
|
3. The name of the author may not be used to endorse or promote products
|
||||||
|
derived from this software without specific prior written permission.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
|
||||||
|
IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
|
||||||
|
OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
|
||||||
|
IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||||
|
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
|
||||||
|
NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||||
|
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||||
|
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
|
||||||
|
THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.antlr.v4.codegen.model.actions;
|
||||||
|
|
||||||
|
/** */
|
||||||
|
public class ThisRulePropertyRef_text extends RulePropertyRef {
|
||||||
|
public ThisRulePropertyRef_text(String label) {
|
||||||
|
super(label);
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,37 @@
|
||||||
|
/*
|
||||||
|
[The "BSD license"]
|
||||||
|
Copyright (c) 2011 Terence Parr
|
||||||
|
All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without
|
||||||
|
modification, are permitted provided that the following conditions
|
||||||
|
are met:
|
||||||
|
|
||||||
|
1. Redistributions of source code must retain the above copyright
|
||||||
|
notice, this list of conditions and the following disclaimer.
|
||||||
|
2. Redistributions in binary form must reproduce the above copyright
|
||||||
|
notice, this list of conditions and the following disclaimer in the
|
||||||
|
documentation and/or other materials provided with the distribution.
|
||||||
|
3. The name of the author may not be used to endorse or promote products
|
||||||
|
derived from this software without specific prior written permission.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
|
||||||
|
IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
|
||||||
|
OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
|
||||||
|
IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||||
|
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
|
||||||
|
NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||||
|
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||||
|
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
|
||||||
|
THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.antlr.v4.codegen.model.actions;
|
||||||
|
|
||||||
|
/** */
|
||||||
|
public class ThisRulePropertyRef_tree extends RulePropertyRef {
|
||||||
|
public ThisRulePropertyRef_tree(String label) {
|
||||||
|
super(label);
|
||||||
|
}
|
||||||
|
}
|
|
@ -85,7 +85,7 @@ public String grammarName;
|
||||||
public GrammarAST currentRuleAST;
|
public GrammarAST currentRuleAST;
|
||||||
public String currentModeName = LexerGrammar.DEFAULT_MODE_NAME;
|
public String currentModeName = LexerGrammar.DEFAULT_MODE_NAME;
|
||||||
public String currentRuleName;
|
public String currentRuleName;
|
||||||
public GrammarAST currentRuleBlock;
|
//public GrammarAST currentRuleBlock;
|
||||||
public GrammarAST currentOuterAltRoot;
|
public GrammarAST currentOuterAltRoot;
|
||||||
public int currentOuterAltNumber = 1; // 1..n
|
public int currentOuterAltNumber = 1; // 1..n
|
||||||
public int rewriteEBNFLevel = 0;
|
public int rewriteEBNFLevel = 0;
|
||||||
|
@ -136,12 +136,11 @@ public void discoverRules(GrammarAST rules) { }
|
||||||
public void finishRules(GrammarAST rule) { }
|
public void finishRules(GrammarAST rule) { }
|
||||||
public void discoverRule(RuleAST rule, GrammarAST ID, List<GrammarAST> modifiers,
|
public void discoverRule(RuleAST rule, GrammarAST ID, List<GrammarAST> modifiers,
|
||||||
ActionAST arg, ActionAST returns, GrammarAST thrws,
|
ActionAST arg, ActionAST returns, GrammarAST thrws,
|
||||||
GrammarAST options, List<ActionAST> actions,
|
GrammarAST options, List<GrammarAST> actions,
|
||||||
GrammarAST block) { }
|
GrammarAST block) { }
|
||||||
public void finishRule(GrammarAST rule, GrammarAST ID, GrammarAST block) { }
|
public void finishRule(GrammarAST rule, GrammarAST ID, GrammarAST block) { }
|
||||||
public void ruleCatch(GrammarAST arg, ActionAST action) { }
|
public void ruleCatch(GrammarAST arg, ActionAST action) { }
|
||||||
public void finallyAction(ActionAST action) { }
|
public void finallyAction(ActionAST action) { }
|
||||||
public void ruleNamedAction(GrammarAST ID, ActionAST action) { }
|
|
||||||
/** outermost alt */
|
/** outermost alt */
|
||||||
public void discoverAlt(AltAST alt) { }
|
public void discoverAlt(AltAST alt) { }
|
||||||
/** outermost alt */
|
/** outermost alt */
|
||||||
|
@ -253,7 +252,7 @@ mode : ^( MODE ID {currentModeName=$ID.text; modeDef($MODE, $ID);} rule+ ) ;
|
||||||
rule
|
rule
|
||||||
@init {
|
@init {
|
||||||
List<GrammarAST> mods = new ArrayList<GrammarAST>();
|
List<GrammarAST> mods = new ArrayList<GrammarAST>();
|
||||||
List<ActionAST> actions = new ArrayList<ActionAST>();
|
List<GrammarAST> actions = new ArrayList<GrammarAST>(); // track roots
|
||||||
}
|
}
|
||||||
: ^( RULE ID {currentRuleName=$ID.text; currentRuleAST=$RULE;}
|
: ^( RULE ID {currentRuleName=$ID.text; currentRuleAST=$RULE;}
|
||||||
DOC_COMMENT? (^(RULEMODIFIERS (m=ruleModifier{mods.add($m.start);})+))?
|
DOC_COMMENT? (^(RULEMODIFIERS (m=ruleModifier{mods.add($m.start);})+))?
|
||||||
|
@ -262,7 +261,7 @@ List<ActionAST> actions = new ArrayList<ActionAST>();
|
||||||
thr=throwsSpec?
|
thr=throwsSpec?
|
||||||
( ruleScopeSpec
|
( ruleScopeSpec
|
||||||
| opts=optionsSpec
|
| opts=optionsSpec
|
||||||
| a=ruleAction {actions.add((ActionAST)$a.start);}
|
| a=ruleAction {actions.add($a.start);}
|
||||||
)*
|
)*
|
||||||
{discoverRule((RuleAST)$RULE, $ID, mods, (ActionAST)$ARG_ACTION,
|
{discoverRule((RuleAST)$RULE, $ID, mods, (ActionAST)$ARG_ACTION,
|
||||||
$ret.start!=null?(ActionAST)$ret.start.getChild(0):null,
|
$ret.start!=null?(ActionAST)$ret.start.getChild(0):null,
|
||||||
|
@ -298,7 +297,7 @@ ruleScopeSpec
|
||||||
;
|
;
|
||||||
|
|
||||||
ruleAction
|
ruleAction
|
||||||
: ^(AT ID ACTION) {ruleNamedAction($ID, (ActionAST)$ACTION);}
|
: ^(AT ID ACTION)
|
||||||
;
|
;
|
||||||
|
|
||||||
ruleModifier
|
ruleModifier
|
||||||
|
|
|
@ -207,7 +207,7 @@ public class BasicSemanticChecks extends GrammarTreeVisitor {
|
||||||
List<GrammarAST> modifiers,
|
List<GrammarAST> modifiers,
|
||||||
ActionAST arg, ActionAST returns,
|
ActionAST arg, ActionAST returns,
|
||||||
GrammarAST thrws, GrammarAST options,
|
GrammarAST thrws, GrammarAST options,
|
||||||
List<ActionAST> actions, GrammarAST block)
|
List<GrammarAST> actions, GrammarAST block)
|
||||||
{
|
{
|
||||||
checkInvalidRuleDef(ID.token);
|
checkInvalidRuleDef(ID.token);
|
||||||
}
|
}
|
||||||
|
@ -363,16 +363,6 @@ public class BasicSemanticChecks extends GrammarTreeVisitor {
|
||||||
Token optionID, String value)
|
Token optionID, String value)
|
||||||
{
|
{
|
||||||
boolean ok = true;
|
boolean ok = true;
|
||||||
if ( optionID.getText().equals("tokenVocab") &&
|
|
||||||
g.parent!=null ) // only allow tokenVocab option in root grammar
|
|
||||||
{
|
|
||||||
g.tool.errMgr.grammarError(ErrorType.TOKEN_VOCAB_IN_DELEGATE,
|
|
||||||
g.fileName,
|
|
||||||
optionID,
|
|
||||||
g.name);
|
|
||||||
ok = false;
|
|
||||||
}
|
|
||||||
|
|
||||||
if ( parent.getType()==ANTLRParser.BLOCK ) {
|
if ( parent.getType()==ANTLRParser.BLOCK ) {
|
||||||
if ( !legalBlockOptions.contains(optionID.getText()) ) { // block
|
if ( !legalBlockOptions.contains(optionID.getText()) ) { // block
|
||||||
g.tool.errMgr.grammarError(ErrorType.ILLEGAL_OPTION,
|
g.tool.errMgr.grammarError(ErrorType.ILLEGAL_OPTION,
|
||||||
|
|
|
@ -105,7 +105,7 @@ public class SymbolCollector extends GrammarTreeVisitor {
|
||||||
public void discoverRule(RuleAST rule, GrammarAST ID,
|
public void discoverRule(RuleAST rule, GrammarAST ID,
|
||||||
List<GrammarAST> modifiers, ActionAST arg,
|
List<GrammarAST> modifiers, ActionAST arg,
|
||||||
ActionAST returns, GrammarAST thrws,
|
ActionAST returns, GrammarAST thrws,
|
||||||
GrammarAST options, List<ActionAST> actions,
|
GrammarAST options, List<GrammarAST> actions,
|
||||||
GrammarAST block)
|
GrammarAST block)
|
||||||
{
|
{
|
||||||
int numAlts = block.getChildCount();
|
int numAlts = block.getChildCount();
|
||||||
|
@ -127,6 +127,13 @@ public class SymbolCollector extends GrammarTreeVisitor {
|
||||||
r.retvals.type = AttributeDict.DictType.RET;
|
r.retvals.type = AttributeDict.DictType.RET;
|
||||||
r.retvals.ast = returns;
|
r.retvals.ast = returns;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
for (GrammarAST a : actions) {
|
||||||
|
// a = ^(AT ID ACTION)
|
||||||
|
ActionAST action = (ActionAST) a.getChild(1);
|
||||||
|
currentRule.namedActions.put(a.getChild(0).getText(), action);
|
||||||
|
action.resolver = currentRule;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -137,12 +144,6 @@ public class SymbolCollector extends GrammarTreeVisitor {
|
||||||
currentRule.alt[currentOuterAltNumber].ast = alt;
|
currentRule.alt[currentOuterAltNumber].ast = alt;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public void ruleNamedAction(GrammarAST ID, ActionAST action) {
|
|
||||||
currentRule.namedActions.put(ID.getText(), action);
|
|
||||||
action.resolver = currentRule;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void actionInAlt(ActionAST action) {
|
public void actionInAlt(ActionAST action) {
|
||||||
currentRule.defineActionInAlt(currentOuterAltNumber, action);
|
currentRule.defineActionInAlt(currentOuterAltNumber, action);
|
||||||
|
|
|
@ -156,12 +156,8 @@ public class ErrorManager {
|
||||||
org.antlr.runtime.RecognitionException antlrException,
|
org.antlr.runtime.RecognitionException antlrException,
|
||||||
Object... args)
|
Object... args)
|
||||||
{
|
{
|
||||||
switch ( etype.severity ) {
|
|
||||||
case WARNING: warnings++; break;
|
|
||||||
case ERROR: errors++; break;
|
|
||||||
}
|
|
||||||
ANTLRMessage msg = new GrammarSyntaxMessage(etype,fileName,token,antlrException,args);
|
ANTLRMessage msg = new GrammarSyntaxMessage(etype,fileName,token,antlrException,args);
|
||||||
tool.error(msg);
|
emit(etype, msg);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static void fatalInternalError(String error, Throwable e) {
|
public static void fatalInternalError(String error, Throwable e) {
|
||||||
|
@ -188,19 +184,14 @@ public class ErrorManager {
|
||||||
* @param args The arguments to pass to the StringTemplate
|
* @param args The arguments to pass to the StringTemplate
|
||||||
*/
|
*/
|
||||||
public void toolError(ErrorType errorType, Object... args) {
|
public void toolError(ErrorType errorType, Object... args) {
|
||||||
switch ( errorType.severity ) {
|
ToolMessage msg = new ToolMessage(errorType, args);
|
||||||
case WARNING: warnings++; break;
|
emit(errorType, msg);
|
||||||
case ERROR: errors++; break;
|
tool.error(msg);
|
||||||
}
|
|
||||||
tool.error(new ToolMessage(errorType, args));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void toolError(ErrorType errorType, Throwable e, Object... args) {
|
public void toolError(ErrorType errorType, Throwable e, Object... args) {
|
||||||
switch ( errorType.severity ) {
|
ToolMessage msg = new ToolMessage(errorType, e, args);
|
||||||
case WARNING: warnings++; break;
|
emit(errorType, msg);
|
||||||
case ERROR: errors++; break;
|
|
||||||
}
|
|
||||||
tool.error(new ToolMessage(errorType, e, args));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void grammarError(ErrorType etype,
|
public void grammarError(ErrorType etype,
|
||||||
|
@ -208,12 +199,9 @@ public class ErrorManager {
|
||||||
org.antlr.runtime.Token token,
|
org.antlr.runtime.Token token,
|
||||||
Object... args)
|
Object... args)
|
||||||
{
|
{
|
||||||
switch ( etype.severity ) {
|
|
||||||
case WARNING: warnings++; break;
|
|
||||||
case ERROR: errors++; break;
|
|
||||||
}
|
|
||||||
ANTLRMessage msg = new GrammarSemanticsMessage(etype,fileName,token,args);
|
ANTLRMessage msg = new GrammarSemanticsMessage(etype,fileName,token,args);
|
||||||
tool.error(msg);
|
emit(etype, msg);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void leftRecursionCycles(String fileName, Collection cycles) {
|
public void leftRecursionCycles(String fileName, Collection cycles) {
|
||||||
|
@ -242,6 +230,13 @@ public class ErrorManager {
|
||||||
|
|
||||||
// S U P P O R T C O D E
|
// S U P P O R T C O D E
|
||||||
|
|
||||||
|
public void emit(ErrorType etype, ANTLRMessage msg) {
|
||||||
|
switch ( etype.severity ) {
|
||||||
|
case WARNING: warnings++; tool.warning(msg); break;
|
||||||
|
case ERROR: errors++; tool.error(msg); break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/** The format gets reset either from the Tool if the user supplied a command line option to that effect
|
/** The format gets reset either from the Tool if the user supplied a command line option to that effect
|
||||||
* Otherwise we just use the default "antlr".
|
* Otherwise we just use the default "antlr".
|
||||||
*/
|
*/
|
||||||
|
|
|
@ -125,8 +125,9 @@ public enum ErrorType {
|
||||||
NO_SUCH_RULE_IN_SCOPE("rule <arg2> is not defined in grammar <arg>", ErrorSeverity.ERROR),
|
NO_SUCH_RULE_IN_SCOPE("rule <arg2> is not defined in grammar <arg>", ErrorSeverity.ERROR),
|
||||||
TOKEN_STRING_REASSIGNMENT("cannot alias <arg>; string already assigned to <arg2>", ErrorSeverity.ERROR),
|
TOKEN_STRING_REASSIGNMENT("cannot alias <arg>; string already assigned to <arg2>", ErrorSeverity.ERROR),
|
||||||
TOKEN_NAME_REASSIGNMENT("cannot redefine <arg>; token name already <if(arg2)>assigned to <arg2><else>defined<endif>", ErrorSeverity.ERROR),
|
TOKEN_NAME_REASSIGNMENT("cannot redefine <arg>; token name already <if(arg2)>assigned to <arg2><else>defined<endif>", ErrorSeverity.ERROR),
|
||||||
TOKEN_VOCAB_IN_DELEGATE("tokenVocab option ignored in imported grammar <arg>", ErrorSeverity.ERROR),
|
//TOKEN_VOCAB_IN_DELEGATE("tokenVocab option ignored in imported grammar <arg>", ErrorSeverity.ERROR),
|
||||||
TOKEN_ALIAS_IN_DELEGATE("can't assign string to token name <arg> to string in imported grammar <arg2>", ErrorSeverity.ERROR),
|
OPTIONS_IN_DELEGATE("options ignored in imported grammar <arg>", ErrorSeverity.WARNING),
|
||||||
|
// TOKEN_ALIAS_IN_DELEGATE("can't assign string to token name <arg> to string in imported grammar <arg2>", ErrorSeverity.ERROR),
|
||||||
CANNOT_FIND_IMPORTED_FILE("can't find or load grammar <arg>", ErrorSeverity.ERROR),
|
CANNOT_FIND_IMPORTED_FILE("can't find or load grammar <arg>", ErrorSeverity.ERROR),
|
||||||
INVALID_IMPORT("<arg.typeString> grammar <arg.name> cannot import <arg2.typeString> grammar <arg2.name>", ErrorSeverity.ERROR),
|
INVALID_IMPORT("<arg.typeString> grammar <arg.name> cannot import <arg2.typeString> grammar <arg2.name>", ErrorSeverity.ERROR),
|
||||||
IMPORTED_TOKENS_RULE_EMPTY("", ErrorSeverity.ERROR),
|
IMPORTED_TOKENS_RULE_EMPTY("", ErrorSeverity.ERROR),
|
||||||
|
|
|
@ -711,7 +711,6 @@ public class Grammar implements AttributeResolver {
|
||||||
|
|
||||||
public Set<String> getStringLiterals() {
|
public Set<String> getStringLiterals() {
|
||||||
// TODO: super inefficient way to get these.
|
// TODO: super inefficient way to get these.
|
||||||
GrammarASTAdaptor adaptor = new GrammarASTAdaptor();
|
|
||||||
SymbolCollector collector = new SymbolCollector(this);
|
SymbolCollector collector = new SymbolCollector(this);
|
||||||
collector.process(ast); // no side-effects; find strings
|
collector.process(ast); // no side-effects; find strings
|
||||||
return collector.strings;
|
return collector.strings;
|
||||||
|
|
|
@ -31,7 +31,7 @@ package org.antlr.v4.tool;
|
||||||
|
|
||||||
import org.antlr.runtime.*;
|
import org.antlr.runtime.*;
|
||||||
import org.antlr.runtime.tree.*;
|
import org.antlr.runtime.tree.*;
|
||||||
import org.antlr.v4.parse.ANTLRParser;
|
import org.antlr.v4.parse.*;
|
||||||
import org.antlr.v4.runtime.atn.ATNState;
|
import org.antlr.v4.runtime.atn.ATNState;
|
||||||
import org.antlr.v4.runtime.misc.IntervalSet;
|
import org.antlr.v4.runtime.misc.IntervalSet;
|
||||||
|
|
||||||
|
@ -89,6 +89,14 @@ public class GrammarAST extends CommonTree {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void deleteChild(GrammarAST t) {
|
||||||
|
List<GrammarAST> dup = new ArrayList<GrammarAST>();
|
||||||
|
dup.addAll(children);
|
||||||
|
for (Object c : dup) {
|
||||||
|
if ( c == t ) deleteChild(t.getChildIndex());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// TODO: move to basetree when i settle on how runtime works
|
// TODO: move to basetree when i settle on how runtime works
|
||||||
// TODO: don't include this node!!
|
// TODO: don't include this node!!
|
||||||
// TODO: reuse other method
|
// TODO: reuse other method
|
||||||
|
@ -131,4 +139,23 @@ public class GrammarAST extends CommonTree {
|
||||||
public String toString() {
|
public String toString() {
|
||||||
return super.toString();
|
return super.toString();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public String toTokenString() {
|
||||||
|
CharStream input = this.token.getInputStream();
|
||||||
|
GrammarASTAdaptor adaptor = new GrammarASTAdaptor(input);
|
||||||
|
CommonTreeNodeStream nodes =
|
||||||
|
new CommonTreeNodeStream(adaptor, this);
|
||||||
|
StringBuffer buf = new StringBuffer();
|
||||||
|
GrammarAST o = (GrammarAST)nodes.LT(1);
|
||||||
|
int type = adaptor.getType(o);
|
||||||
|
while ( type!=Token.EOF ) {
|
||||||
|
buf.append(" ");
|
||||||
|
buf.append(o.token.getText());
|
||||||
|
nodes.consume();
|
||||||
|
o = (GrammarAST)nodes.LT(1);
|
||||||
|
type = adaptor.getType(o);
|
||||||
|
}
|
||||||
|
return buf.toString();
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -774,12 +774,6 @@ public abstract class BaseTest {
|
||||||
GrammarSemanticsMessage expectedMessage)
|
GrammarSemanticsMessage expectedMessage)
|
||||||
throws Exception
|
throws Exception
|
||||||
{
|
{
|
||||||
/*
|
|
||||||
System.out.println(equeue.infos);
|
|
||||||
System.out.println(equeue.warnings);
|
|
||||||
System.out.println(equeue.errors);
|
|
||||||
assertTrue("number of errors mismatch", n, equeue.errors.size());
|
|
||||||
*/
|
|
||||||
ANTLRMessage foundMsg = null;
|
ANTLRMessage foundMsg = null;
|
||||||
for (int i = 0; i < equeue.errors.size(); i++) {
|
for (int i = 0; i < equeue.errors.size(); i++) {
|
||||||
ANTLRMessage m = (ANTLRMessage)equeue.errors.get(i);
|
ANTLRMessage m = (ANTLRMessage)equeue.errors.get(i);
|
||||||
|
@ -796,6 +790,26 @@ public abstract class BaseTest {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
protected void checkGrammarSemanticsWarning(ErrorQueue equeue,
|
||||||
|
GrammarSemanticsMessage expectedMessage)
|
||||||
|
throws Exception
|
||||||
|
{
|
||||||
|
ANTLRMessage foundMsg = null;
|
||||||
|
for (int i = 0; i < equeue.warnings.size(); i++) {
|
||||||
|
ANTLRMessage m = equeue.warnings.get(i);
|
||||||
|
if (m.errorType==expectedMessage.errorType ) {
|
||||||
|
foundMsg = m;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
assertNotNull("no error; "+expectedMessage.errorType+" expected", foundMsg);
|
||||||
|
assertTrue("error is not a GrammarSemanticsMessage",
|
||||||
|
foundMsg instanceof GrammarSemanticsMessage);
|
||||||
|
assertEquals(Arrays.toString(expectedMessage.args), Arrays.toString(foundMsg.args));
|
||||||
|
if ( equeue.size()!=1 ) {
|
||||||
|
System.err.println(equeue);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
protected void checkError(ErrorQueue equeue,
|
protected void checkError(ErrorQueue equeue,
|
||||||
ANTLRMessage expectedMessage)
|
ANTLRMessage expectedMessage)
|
||||||
throws Exception
|
throws Exception
|
||||||
|
|
|
@ -235,7 +235,15 @@ public class TestActionTranslation extends BaseTest {
|
||||||
testActions(scopeTemplate, "inline", action, expected);
|
testActions(scopeTemplate, "inline", action, expected);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test public void testRefToTextAttributeForCurrentRule() throws Exception {
|
||||||
|
String action = "$a.text; $text";
|
||||||
|
String expected =
|
||||||
|
"((TokenStream)input).toString(_localctx.start, _localctx.stop); " +
|
||||||
|
"((TokenStream)input).toString(_localctx.start, _localctx.stop)";
|
||||||
|
testActions(attributeTemplate, "init", action, expected);
|
||||||
|
testActions(attributeTemplate, "inline", action, expected);
|
||||||
|
testActions(attributeTemplate, "finally", action, expected);
|
||||||
|
}
|
||||||
|
|
||||||
@Test public void testDynamicRuleScopeRefInSubrule() throws Exception {
|
@Test public void testDynamicRuleScopeRefInSubrule() throws Exception {
|
||||||
String action = "$a::n;";
|
String action = "$a::n;";
|
||||||
|
@ -276,9 +284,7 @@ public class TestActionTranslation extends BaseTest {
|
||||||
@Test public void testRefToTemplateAttributeForCurrentRule() throws Exception {
|
@Test public void testRefToTemplateAttributeForCurrentRule() throws Exception {
|
||||||
String action = "$st=null;";
|
String action = "$st=null;";
|
||||||
}
|
}
|
||||||
@Test public void testRefToTextAttributeForCurrentRule() throws Exception {
|
|
||||||
String action = "$text";
|
|
||||||
}
|
|
||||||
@Test public void testRefToStartAttributeForCurrentRule() throws Exception {
|
@Test public void testRefToStartAttributeForCurrentRule() throws Exception {
|
||||||
String action = "$start;";
|
String action = "$start;";
|
||||||
}
|
}
|
||||||
|
|
|
@ -67,6 +67,22 @@ public class TestCompositeGrammars extends BaseTest {
|
||||||
assertEquals("S.a\n", found);
|
assertEquals("S.a\n", found);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test public void testBringInLiteralsFromDelegate() throws Exception {
|
||||||
|
String slave =
|
||||||
|
"parser grammar S;\n" +
|
||||||
|
"a : '=' 'a' {System.out.println(\"S.a\");} ;\n";
|
||||||
|
mkdir(tmpdir);
|
||||||
|
writeFile(tmpdir, "S.g", slave);
|
||||||
|
String master =
|
||||||
|
"grammar M;\n" +
|
||||||
|
"import S;\n" +
|
||||||
|
"s : a ;\n" +
|
||||||
|
"WS : (' '|'\\n') {skip();} ;\n" ;
|
||||||
|
String found = execParser("M.g", master, "MParser", "MLexer",
|
||||||
|
"s", "=a", debug);
|
||||||
|
assertEquals("S.a\n", found);
|
||||||
|
}
|
||||||
|
|
||||||
@Test public void testDelegatorInvokesDelegateRuleWithArgs() throws Exception {
|
@Test public void testDelegatorInvokesDelegateRuleWithArgs() throws Exception {
|
||||||
// must generate something like:
|
// must generate something like:
|
||||||
// public int a(int x) throws RecognitionException { return gS.a(x); }
|
// public int a(int x) throws RecognitionException { return gS.a(x); }
|
||||||
|
@ -254,58 +270,8 @@ public class TestCompositeGrammars extends BaseTest {
|
||||||
assertEquals("S.x\n", found);
|
assertEquals("S.x\n", found);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test public void testSameStringTwoNames() throws Exception {
|
|
||||||
ErrorQueue equeue = new ErrorQueue();
|
|
||||||
String slave =
|
|
||||||
"parser grammar S;\n" +
|
|
||||||
"tokens { A='a'; }\n" +
|
|
||||||
"x : A {System.out.println(\"S.x\");} ;\n";
|
|
||||||
mkdir(tmpdir);
|
|
||||||
writeFile(tmpdir, "S.g", slave);
|
|
||||||
String slave2 =
|
|
||||||
"parser grammar T;\n" +
|
|
||||||
"tokens { X='a'; }\n" +
|
|
||||||
"y : X {System.out.println(\"T.y\");} ;\n";
|
|
||||||
mkdir(tmpdir);
|
|
||||||
writeFile(tmpdir, "T.g", slave2);
|
|
||||||
|
|
||||||
String master =
|
|
||||||
"grammar M;\n" +
|
|
||||||
"import S,T;\n" +
|
|
||||||
"s : x y ;\n" +
|
|
||||||
"WS : (' '|'\\n') {skip();} ;\n" ;
|
|
||||||
writeFile(tmpdir, "M.g", master);
|
|
||||||
|
|
||||||
Grammar g = new Grammar(tmpdir+"/M.g", master, equeue);
|
|
||||||
|
|
||||||
String expectedTokenIDToTypeMap = "{EOF=-1, T__0=3, WS=4, A=5, X=6}";
|
|
||||||
String expectedStringLiteralToTypeMap = "{'a'=6}";
|
|
||||||
String expectedTypeToTokenList = "[T__0, WS, A, X]";
|
|
||||||
|
|
||||||
assertEquals(expectedTokenIDToTypeMap, g.tokenNameToTypeMap.toString());
|
|
||||||
assertEquals(expectedStringLiteralToTypeMap, g.stringLiteralToTypeMap.toString());
|
|
||||||
assertEquals(expectedTypeToTokenList, realElements(g.typeToTokenList).toString());
|
|
||||||
|
|
||||||
Object expectedArg = "X='a'";
|
|
||||||
Object expectedArg2 = "A";
|
|
||||||
ErrorType expectedMsgID = ErrorType.TOKEN_STRING_REASSIGNMENT;
|
|
||||||
GrammarSemanticsMessage expectedMessage =
|
|
||||||
new GrammarSemanticsMessage(expectedMsgID, g.fileName, null, expectedArg, expectedArg2);
|
|
||||||
checkGrammarSemanticsError(equeue, expectedMessage);
|
|
||||||
|
|
||||||
assertEquals("unexpected errors: "+equeue, 1, equeue.errors.size());
|
|
||||||
|
|
||||||
String expectedError =
|
|
||||||
"error(73): T.g:2:9: cannot alias X='a'; string already assigned to A";
|
|
||||||
// ST msgST = antlr.errMgr.getMessageTemplate(equeue.errors.get(0));
|
|
||||||
// String foundError = msgST.render();
|
|
||||||
// assertEquals(expectedError, foundError);
|
|
||||||
}
|
|
||||||
|
|
||||||
/*
|
|
||||||
@Test public void testImportedTokenVocabIgnoredWithWarning() throws Exception {
|
@Test public void testImportedTokenVocabIgnoredWithWarning() throws Exception {
|
||||||
ErrorQueue equeue = new ErrorQueue();
|
ErrorQueue equeue = new ErrorQueue();
|
||||||
ErrorManager.setErrorListener(equeue);
|
|
||||||
String slave =
|
String slave =
|
||||||
"parser grammar S;\n" +
|
"parser grammar S;\n" +
|
||||||
"options {tokenVocab=whatever;}\n" +
|
"options {tokenVocab=whatever;}\n" +
|
||||||
|
@ -320,71 +286,20 @@ public class TestCompositeGrammars extends BaseTest {
|
||||||
"s : x ;\n" +
|
"s : x ;\n" +
|
||||||
"WS : (' '|'\\n') {skip();} ;\n" ;
|
"WS : (' '|'\\n') {skip();} ;\n" ;
|
||||||
writeFile(tmpdir, "M.g", master);
|
writeFile(tmpdir, "M.g", master);
|
||||||
Tool antlr = newTool(new String[] {"-lib", tmpdir});
|
Grammar g = new Grammar(tmpdir+"/M.g", master, equeue);
|
||||||
CompositeGrammar composite = new CompositeGrammar();
|
|
||||||
Grammar g = new Grammar(antlr,tmpdir+"/M.g",composite);
|
|
||||||
composite.setDelegationRoot(g);
|
|
||||||
g.parseAndBuildAST();
|
|
||||||
g.composite.assignTokenTypes();
|
|
||||||
|
|
||||||
Object expectedArg = "S";
|
Object expectedArg = "S";
|
||||||
int expectedMsgID = ErrorManager.MSG_TOKEN_VOCAB_IN_DELEGATE;
|
ErrorType expectedMsgID = ErrorType.OPTIONS_IN_DELEGATE;
|
||||||
GrammarSemanticsMessage expectedMessage =
|
GrammarSemanticsMessage expectedMessage =
|
||||||
new GrammarSemanticsMessage(expectedMsgID, g, null, expectedArg);
|
new GrammarSemanticsMessage(expectedMsgID, g.fileName, null, expectedArg);
|
||||||
checkGrammarSemanticsWarning(equeue, expectedMessage);
|
checkGrammarSemanticsWarning(equeue, expectedMessage);
|
||||||
|
|
||||||
assertEquals("unexpected errors: "+equeue, 0, equeue.errors.size());
|
assertEquals("unexpected errors: "+equeue, 0, equeue.errors.size());
|
||||||
assertEquals("unexpected errors: "+equeue, 1, equeue.warnings.size());
|
assertEquals("unexpected warnings: "+equeue, 1, equeue.warnings.size());
|
||||||
|
|
||||||
String expectedError =
|
|
||||||
"warning(160): S.g:2:10: tokenVocab option ignored in imported grammar S";
|
|
||||||
assertEquals(expectedError, equeue.warnings.get(0).toString());
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test public void testImportedTokenVocabWorksInRoot() throws Exception {
|
|
||||||
ErrorQueue equeue = new ErrorQueue();
|
|
||||||
ErrorManager.setErrorListener(equeue);
|
|
||||||
String slave =
|
|
||||||
"parser grammar S;\n" +
|
|
||||||
"tokens { A='a'; }\n" +
|
|
||||||
"x : A {System.out.println(\"S.x\");} ;\n";
|
|
||||||
mkdir(tmpdir);
|
|
||||||
writeFile(tmpdir, "S.g", slave);
|
|
||||||
|
|
||||||
String tokens =
|
|
||||||
"A=99\n";
|
|
||||||
writeFile(tmpdir, "Test.tokens", tokens);
|
|
||||||
|
|
||||||
String master =
|
|
||||||
"grammar M;\n" +
|
|
||||||
"options {tokenVocab=Test;}\n" +
|
|
||||||
"import S;\n" +
|
|
||||||
"s : x ;\n" +
|
|
||||||
"WS : (' '|'\\n') {skip();} ;\n" ;
|
|
||||||
writeFile(tmpdir, "M.g", master);
|
|
||||||
Tool antlr = newTool(new String[] {"-lib", tmpdir});
|
|
||||||
CompositeGrammar composite = new CompositeGrammar();
|
|
||||||
Grammar g = new Grammar(antlr,tmpdir+"/M.g",composite);
|
|
||||||
composite.setDelegationRoot(g);
|
|
||||||
g.parseAndBuildAST();
|
|
||||||
g.composite.assignTokenTypes();
|
|
||||||
|
|
||||||
String expectedTokenIDToTypeMap = "[A=99, WS=101]";
|
|
||||||
String expectedStringLiteralToTypeMap = "{'a'=100}";
|
|
||||||
String expectedTypeToTokenList = "[A, 'a', WS]";
|
|
||||||
|
|
||||||
assertEquals(expectedTokenIDToTypeMap,
|
|
||||||
realElements(g.composite.tokenIDToTypeMap).toString());
|
|
||||||
assertEquals(expectedStringLiteralToTypeMap, g.composite.stringLiteralToTypeMap.toString());
|
|
||||||
assertEquals(expectedTypeToTokenList,
|
|
||||||
realElements(g.composite.typeToTokenList).toString());
|
|
||||||
|
|
||||||
assertEquals("unexpected errors: "+equeue, 0, equeue.errors.size());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test public void testSyntaxErrorsInImportsNotThrownOut() throws Exception {
|
@Test public void testSyntaxErrorsInImportsNotThrownOut() throws Exception {
|
||||||
ErrorQueue equeue = new ErrorQueue();
|
ErrorQueue equeue = new ErrorQueue();
|
||||||
ErrorManager.setErrorListener(equeue);
|
|
||||||
String slave =
|
String slave =
|
||||||
"parser grammar S;\n" +
|
"parser grammar S;\n" +
|
||||||
"options {toke\n";
|
"options {toke\n";
|
||||||
|
@ -397,41 +312,9 @@ public class TestCompositeGrammars extends BaseTest {
|
||||||
"s : x ;\n" +
|
"s : x ;\n" +
|
||||||
"WS : (' '|'\\n') {skip();} ;\n" ;
|
"WS : (' '|'\\n') {skip();} ;\n" ;
|
||||||
writeFile(tmpdir, "M.g", master);
|
writeFile(tmpdir, "M.g", master);
|
||||||
Tool antlr = newTool(new String[] {"-lib", tmpdir});
|
Grammar g = new Grammar(tmpdir+"/M.g", master, equeue);
|
||||||
CompositeGrammar composite = new CompositeGrammar();
|
|
||||||
Grammar g = new Grammar(antlr,tmpdir+"/M.g",composite);
|
|
||||||
composite.setDelegationRoot(g);
|
|
||||||
g.parseAndBuildAST();
|
|
||||||
g.composite.assignTokenTypes();
|
|
||||||
|
|
||||||
// whole bunch of errors from bad S.g file
|
assertEquals(ErrorType.SYNTAX_ERROR, equeue.errors.get(0).errorType);
|
||||||
assertEquals("unexpected errors: "+equeue, 5, equeue.errors.size());
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test public void testSyntaxErrorsInImportsNotThrownOut2() throws Exception {
|
|
||||||
ErrorQueue equeue = new ErrorQueue();
|
|
||||||
ErrorManager.setErrorListener(equeue);
|
|
||||||
String slave =
|
|
||||||
"parser grammar S;\n" +
|
|
||||||
": A {System.out.println(\"S.x\");} ;\n";
|
|
||||||
mkdir(tmpdir);
|
|
||||||
writeFile(tmpdir, "S.g", slave);
|
|
||||||
|
|
||||||
String master =
|
|
||||||
"grammar M;\n" +
|
|
||||||
"import S;\n" +
|
|
||||||
"s : x ;\n" +
|
|
||||||
"WS : (' '|'\\n') {skip();} ;\n" ;
|
|
||||||
writeFile(tmpdir, "M.g", master);
|
|
||||||
Tool antlr = newTool(new String[] {"-lib", tmpdir});
|
|
||||||
CompositeGrammar composite = new CompositeGrammar();
|
|
||||||
Grammar g = new Grammar(antlr,tmpdir+"/M.g",composite);
|
|
||||||
composite.setDelegationRoot(g);
|
|
||||||
g.parseAndBuildAST();
|
|
||||||
g.composite.assignTokenTypes();
|
|
||||||
|
|
||||||
// whole bunch of errors from bad S.g file
|
|
||||||
assertEquals("unexpected errors: "+equeue, 3, equeue.errors.size());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test public void testDelegatorRuleOverridesDelegate() throws Exception {
|
@Test public void testDelegatorRuleOverridesDelegate() throws Exception {
|
||||||
|
@ -456,7 +339,7 @@ public class TestCompositeGrammars extends BaseTest {
|
||||||
"parser grammar JavaDecl;\n" +
|
"parser grammar JavaDecl;\n" +
|
||||||
"type : 'int' ;\n" +
|
"type : 'int' ;\n" +
|
||||||
"decl : type ID ';'\n" +
|
"decl : type ID ';'\n" +
|
||||||
" | type ID init ';' {System.out.println(\"JavaDecl: \"+$decl.text);}\n" +
|
" | type ID init ';' {System.out.println(\"JavaDecl: \"+$text);}\n" +
|
||||||
" ;\n" +
|
" ;\n" +
|
||||||
"init : '=' INT ;\n";
|
"init : '=' INT ;\n";
|
||||||
mkdir(tmpdir);
|
mkdir(tmpdir);
|
||||||
|
@ -476,6 +359,7 @@ public class TestCompositeGrammars extends BaseTest {
|
||||||
assertEquals("JavaDecl: floatx=3;\n", found);
|
assertEquals("JavaDecl: floatx=3;\n", found);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
@Test public void testDelegatorRuleOverridesDelegates() throws Exception {
|
@Test public void testDelegatorRuleOverridesDelegates() throws Exception {
|
||||||
String slave =
|
String slave =
|
||||||
"parser grammar S;\n" +
|
"parser grammar S;\n" +
|
||||||
|
|
Loading…
Reference in New Issue