forked from jasder/antlr
got nullable kids tree parsing working. got $rule.text in tree parsing working
[git-p4: depot-paths = "//depot/code/antlr4/main/": change = 9077]
This commit is contained in:
parent
e8a2a738cf
commit
f02db87c02
|
@ -30,12 +30,12 @@ package org.antlr.v4.runtime;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*/
|
*/
|
||||||
public class MismatchedTreeNodeException extends RecognitionException {
|
public class MismatchedASTNodeException extends RecognitionException {
|
||||||
public MismatchedTreeNodeException() {
|
public MismatchedASTNodeException() {
|
||||||
}
|
}
|
||||||
|
|
||||||
public MismatchedTreeNodeException(BaseRecognizer recognizer,
|
public MismatchedASTNodeException(BaseRecognizer recognizer,
|
||||||
IntStream input, int firstSet)
|
IntStream input, int firstSet)
|
||||||
{
|
{
|
||||||
super(recognizer, input, recognizer._ctx);
|
super(recognizer, input, recognizer._ctx);
|
||||||
}
|
}
|
|
@ -173,6 +173,7 @@ public class Recognizer<ATNInterpreter> {
|
||||||
* so that it creates a new Java type.
|
* so that it creates a new Java type.
|
||||||
*/
|
*/
|
||||||
public String getTokenErrorDisplay(Token t) {
|
public String getTokenErrorDisplay(Token t) {
|
||||||
|
if ( t==null ) return "<no token>";
|
||||||
String s = t.getText();
|
String s = t.getText();
|
||||||
if ( s==null ) {
|
if ( s==null ) {
|
||||||
if ( t.getType()==Token.EOF ) {
|
if ( t.getType()==Token.EOF ) {
|
||||||
|
|
|
@ -32,6 +32,8 @@ package org.antlr.v4.runtime.atn;
|
||||||
import java.util.*;
|
import java.util.*;
|
||||||
|
|
||||||
public class ATNState {
|
public class ATNState {
|
||||||
|
public static final int INITIAL_NUM_TRANSITIONS = 4;
|
||||||
|
|
||||||
// constants for serialization
|
// constants for serialization
|
||||||
public static final int BASIC = 1;
|
public static final int BASIC = 1;
|
||||||
public static final int RULE_START = 2;
|
public static final int RULE_START = 2;
|
||||||
|
@ -79,15 +81,17 @@ public class ATNState {
|
||||||
|
|
||||||
public int stateNumber = INVALID_STATE_NUMBER;
|
public int stateNumber = INVALID_STATE_NUMBER;
|
||||||
|
|
||||||
// public Rule rule;
|
|
||||||
public int ruleIndex; // at runtime, we don't have Rule objects
|
public int ruleIndex; // at runtime, we don't have Rule objects
|
||||||
|
|
||||||
/** Which ATN are we in? */
|
/** Which ATN are we in? */
|
||||||
public ATN atn = null;
|
public ATN atn = null;
|
||||||
|
|
||||||
/** ATN state is associated with which node in AST? */
|
//public Transition transition;
|
||||||
// public GrammarAST ast;
|
|
||||||
public Transition transition;
|
/** Track the transitions emanating from this ATN state. */
|
||||||
|
protected List<Transition> transitions =
|
||||||
|
new ArrayList<Transition>(INITIAL_NUM_TRANSITIONS);
|
||||||
|
|
||||||
/** For o-A->o type ATN tranitions, record the label that leads to this
|
/** For o-A->o type ATN tranitions, record the label that leads to this
|
||||||
* state. Useful for creating rich error messages when we find
|
* state. Useful for creating rich error messages when we find
|
||||||
* insufficiently (with preds) covered states.
|
* insufficiently (with preds) covered states.
|
||||||
|
@ -109,31 +113,22 @@ public class ATNState {
|
||||||
return String.valueOf(stateNumber);
|
return String.valueOf(stateNumber);
|
||||||
}
|
}
|
||||||
|
|
||||||
public int getNumberOfTransitions() {
|
public int getNumberOfTransitions() { return transitions.size(); }
|
||||||
if ( transition!=null ) return 1;
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void addTransition(Transition e) {
|
public void addTransition(Transition e) { transitions.add(e); }
|
||||||
if ( transition!=null ) throw new IllegalArgumentException("only one transition in state type "+
|
|
||||||
getClass().getSimpleName());
|
|
||||||
transition = e;
|
|
||||||
}
|
|
||||||
|
|
||||||
public Transition transition(int i) {
|
public Transition transition(int i) { return transitions.get(i); }
|
||||||
if ( i>0 ) throw new IllegalArgumentException("only one transition"+
|
|
||||||
getClass().getSimpleName());
|
public void setTransition(int i, Transition e) {
|
||||||
return transition;
|
transitions.set(i, e);
|
||||||
}
|
}
|
||||||
|
|
||||||
public boolean onlyHasEpsilonTransitions() {
|
public boolean onlyHasEpsilonTransitions() {
|
||||||
return transition!=null && transition.isEpsilon();
|
if ( transitions==null ) return false;
|
||||||
}
|
for (Transition t : transitions) {
|
||||||
|
if ( !t.isEpsilon() ) return false;
|
||||||
public void setTransition(int i, Transition e) {
|
}
|
||||||
if ( i>0 ) throw new IllegalArgumentException("only one transition"+
|
return true;
|
||||||
getClass().getSimpleName());
|
|
||||||
transition = e;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setRuleIndex(int ruleIndex) { this.ruleIndex = ruleIndex; }
|
public void setRuleIndex(int ruleIndex) { this.ruleIndex = ruleIndex; }
|
||||||
|
|
|
@ -29,35 +29,11 @@
|
||||||
|
|
||||||
package org.antlr.v4.runtime.atn;
|
package org.antlr.v4.runtime.atn;
|
||||||
|
|
||||||
import java.util.*;
|
|
||||||
|
|
||||||
public class DecisionState extends ATNState {
|
public class DecisionState extends ATNState {
|
||||||
public static final int INITIAL_NUM_TRANSITIONS = 4;
|
|
||||||
|
|
||||||
/** Track the transitions emanating from this ATN state. */
|
|
||||||
public List<Transition> transitions = new ArrayList<Transition>(INITIAL_NUM_TRANSITIONS);
|
|
||||||
|
|
||||||
public int decision = -1;
|
public int decision = -1;
|
||||||
|
|
||||||
public boolean isGreedy = true;
|
public boolean isGreedy = true;
|
||||||
|
|
||||||
@Override
|
|
||||||
public int getNumberOfTransitions() { return transitions.size(); }
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void addTransition(Transition e) { transitions.add(e); }
|
|
||||||
|
|
||||||
public void addTransitionFirst(Transition e) { transitions.add(0, e); }
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Transition transition(int i) { return transitions.get(i); }
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean onlyHasEpsilonTransitions() { return true; }
|
public boolean onlyHasEpsilonTransitions() { return true; }
|
||||||
|
|
||||||
@Override
|
|
||||||
public void setTransition(int i, Transition e) {
|
|
||||||
transitions.set(i, e);
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -30,9 +30,9 @@
|
||||||
package org.antlr.v4.runtime.atn;
|
package org.antlr.v4.runtime.atn;
|
||||||
|
|
||||||
import org.antlr.v4.runtime.*;
|
import org.antlr.v4.runtime.*;
|
||||||
import org.antlr.v4.runtime.dfa.DFA;
|
import org.antlr.v4.runtime.dfa.*;
|
||||||
import org.antlr.v4.runtime.dfa.DFAState;
|
|
||||||
import org.antlr.v4.runtime.misc.OrderedHashSet;
|
import org.antlr.v4.runtime.misc.OrderedHashSet;
|
||||||
|
import org.antlr.v4.runtime.tree.ASTNodeStream;
|
||||||
import org.stringtemplate.v4.misc.MultiMap;
|
import org.stringtemplate.v4.misc.MultiMap;
|
||||||
|
|
||||||
import java.util.*;
|
import java.util.*;
|
||||||
|
@ -81,7 +81,7 @@ public class ParserATNSimulator extends ATNSimulator {
|
||||||
// System.out.println(dot.getDOT(atn.rules.get(1), parser.getRuleNames()));
|
// System.out.println(dot.getDOT(atn.rules.get(1), parser.getRuleNames()));
|
||||||
}
|
}
|
||||||
|
|
||||||
public int adaptivePredict(TokenStream input, int decision, RuleContext outerContext) {
|
public int adaptivePredict(IntStream input, int decision, RuleContext outerContext) {
|
||||||
predict_calls++;
|
predict_calls++;
|
||||||
DFA dfa = decisionToDFA[decision];
|
DFA dfa = decisionToDFA[decision];
|
||||||
if ( dfa==null || dfa.s0==null ) {
|
if ( dfa==null || dfa.s0==null ) {
|
||||||
|
@ -100,14 +100,14 @@ public class ParserATNSimulator extends ATNSimulator {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public int predictATN(DFA dfa, TokenStream input,
|
public int predictATN(DFA dfa, IntStream input,
|
||||||
RuleContext outerContext,
|
RuleContext outerContext,
|
||||||
boolean useContext)
|
boolean useContext)
|
||||||
{
|
{
|
||||||
if ( outerContext==null ) outerContext = RuleContext.EMPTY;
|
if ( outerContext==null ) outerContext = RuleContext.EMPTY;
|
||||||
this.outerContext = outerContext;
|
this.outerContext = outerContext;
|
||||||
if ( debug ) System.out.println("ATN decision "+dfa.decision+
|
if ( debug ) System.out.println("ATN decision "+dfa.decision+
|
||||||
" exec LA(1)=="+input.LT(1)+
|
" exec LA(1)=="+ getLookaheadName(input) +
|
||||||
", outerContext="+outerContext.toString(parser));
|
", outerContext="+outerContext.toString(parser));
|
||||||
RuleContext ctx = RuleContext.EMPTY;
|
RuleContext ctx = RuleContext.EMPTY;
|
||||||
if ( useContext ) ctx = outerContext;
|
if ( useContext ) ctx = outerContext;
|
||||||
|
@ -133,19 +133,19 @@ public class ParserATNSimulator extends ATNSimulator {
|
||||||
}
|
}
|
||||||
|
|
||||||
// doesn't create DFA when matching
|
// doesn't create DFA when matching
|
||||||
public int matchATN(TokenStream input, ATNState startState) {
|
public int matchATN(IntStream input, ATNState startState) {
|
||||||
DFA dfa = new DFA(startState);
|
DFA dfa = new DFA(startState);
|
||||||
RuleContext ctx = RuleContext.EMPTY;
|
RuleContext ctx = RuleContext.EMPTY;
|
||||||
OrderedHashSet<ATNConfig> s0_closure = computeStartState(dfa.decision, startState, ctx);
|
OrderedHashSet<ATNConfig> s0_closure = computeStartState(dfa.decision, startState, ctx);
|
||||||
return execATN(input, dfa, input.index(), s0_closure, false);
|
return execATN(input, dfa, input.index(), s0_closure, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
public int execDFA(TokenStream input, DFA dfa, DFAState s0, RuleContext outerContext) {
|
public int execDFA(IntStream input, DFA dfa, DFAState s0, RuleContext outerContext) {
|
||||||
// dump(dfa);
|
// dump(dfa);
|
||||||
if ( outerContext==null ) outerContext = RuleContext.EMPTY;
|
if ( outerContext==null ) outerContext = RuleContext.EMPTY;
|
||||||
this.outerContext = outerContext;
|
this.outerContext = outerContext;
|
||||||
if ( dfa_debug ) System.out.println("DFA decision "+dfa.decision+
|
if ( dfa_debug ) System.out.println("DFA decision "+dfa.decision+
|
||||||
" exec LA(1)=="+input.LT(1)+
|
" exec LA(1)=="+ getLookaheadName(input) +
|
||||||
", outerContext="+outerContext.toString(parser));
|
", outerContext="+outerContext.toString(parser));
|
||||||
DFAState prevAcceptState = null;
|
DFAState prevAcceptState = null;
|
||||||
DFAState s = s0;
|
DFAState s = s0;
|
||||||
|
@ -180,8 +180,9 @@ public class ParserATNSimulator extends ATNSimulator {
|
||||||
if ( dfa_debug ) System.out.println("no edge for "+t);
|
if ( dfa_debug ) System.out.println("no edge for "+t);
|
||||||
int alt = -1;
|
int alt = -1;
|
||||||
if ( dfa_debug ) {
|
if ( dfa_debug ) {
|
||||||
|
|
||||||
System.out.println("ATN exec upon "+
|
System.out.println("ATN exec upon "+
|
||||||
input.toString(start,input.index())+
|
getInputString(input, start) +
|
||||||
" at DFA state "+s.stateNumber);
|
" at DFA state "+s.stateNumber);
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
|
@ -228,13 +229,23 @@ public class ParserATNSimulator extends ATNSimulator {
|
||||||
return prevAcceptState.prediction;
|
return prevAcceptState.prediction;
|
||||||
}
|
}
|
||||||
|
|
||||||
public int execATN(TokenStream input,
|
public String getInputString(IntStream input, int start) {
|
||||||
|
if ( input instanceof TokenStream ) {
|
||||||
|
return ((TokenStream)input).toString(start,input.index());
|
||||||
|
}
|
||||||
|
else if ( input instanceof ASTNodeStream) {
|
||||||
|
return ((ASTNodeStream)input).toString(start,input.index());
|
||||||
|
}
|
||||||
|
return "n/a";
|
||||||
|
}
|
||||||
|
|
||||||
|
public int execATN(IntStream input,
|
||||||
DFA dfa,
|
DFA dfa,
|
||||||
int startIndex,
|
int startIndex,
|
||||||
OrderedHashSet<ATNConfig> s0,
|
OrderedHashSet<ATNConfig> s0,
|
||||||
boolean useContext)
|
boolean useContext)
|
||||||
{
|
{
|
||||||
if ( debug ) System.out.println("execATN decision "+dfa.decision+" exec LA(1)=="+input.LT(1));
|
if ( debug ) System.out.println("execATN decision "+dfa.decision+" exec LA(1)=="+ getLookaheadName(input));
|
||||||
ATN_failover++;
|
ATN_failover++;
|
||||||
OrderedHashSet<ATNConfig> closure = new OrderedHashSet<ATNConfig>();
|
OrderedHashSet<ATNConfig> closure = new OrderedHashSet<ATNConfig>();
|
||||||
|
|
||||||
|
@ -281,7 +292,7 @@ public class ParserATNSimulator extends ATNSimulator {
|
||||||
String rname = "n/a";
|
String rname = "n/a";
|
||||||
if ( parser !=null ) rname = parser.getRuleNames()[loc.ruleIndex];
|
if ( parser !=null ) rname = parser.getRuleNames()[loc.ruleIndex];
|
||||||
System.out.println("AMBIG dec "+dfa.decision+" in "+rname+" for alt "+ambigAlts+" upon "+
|
System.out.println("AMBIG dec "+dfa.decision+" in "+rname+" for alt "+ambigAlts+" upon "+
|
||||||
input.toString(startIndex, input.index()));
|
getInputString(input, startIndex));
|
||||||
System.out.println("REACH="+reach);
|
System.out.println("REACH="+reach);
|
||||||
}
|
}
|
||||||
dfa.conflict = true; // at least one DFA state is ambiguous
|
dfa.conflict = true; // at least one DFA state is ambiguous
|
||||||
|
@ -351,7 +362,7 @@ public class ParserATNSimulator extends ATNSimulator {
|
||||||
} while ( true );
|
} while ( true );
|
||||||
|
|
||||||
if ( prevAccept==null ) {
|
if ( prevAccept==null ) {
|
||||||
System.out.println("no viable token at input "+input.LT(1)+", index "+input.index());
|
System.out.println("no viable token at input "+ getLookaheadName(input) +", index "+input.index());
|
||||||
NoViableAltException nvae = new NoViableAltException(parser, input, closure, outerContext);
|
NoViableAltException nvae = new NoViableAltException(parser, input, closure, outerContext);
|
||||||
nvae.startIndex = startIndex;
|
nvae.startIndex = startIndex;
|
||||||
throw nvae;
|
throw nvae;
|
||||||
|
@ -382,7 +393,7 @@ public class ParserATNSimulator extends ATNSimulator {
|
||||||
return exitAlt;
|
return exitAlt;
|
||||||
}
|
}
|
||||||
|
|
||||||
public int retryWithContext(TokenStream input,
|
public int retryWithContext(IntStream input,
|
||||||
DFA dfa,
|
DFA dfa,
|
||||||
int startIndex,
|
int startIndex,
|
||||||
RuleContext originalContext,
|
RuleContext originalContext,
|
||||||
|
@ -395,7 +406,7 @@ public class ParserATNSimulator extends ATNSimulator {
|
||||||
retry_with_context++;
|
retry_with_context++;
|
||||||
int old_k = input.index();
|
int old_k = input.index();
|
||||||
// retry using context, if any; if none, kill all but min as before
|
// retry using context, if any; if none, kill all but min as before
|
||||||
if ( debug ) System.out.println("RETRY "+input.toString(startIndex, input.index())+
|
if ( debug ) System.out.println("RETRY "+ getInputString(input, startIndex) +
|
||||||
" with ctx="+ originalContext);
|
" with ctx="+ originalContext);
|
||||||
int min = getMinAlt(ambigAlts);
|
int min = getMinAlt(ambigAlts);
|
||||||
if ( originalContext==RuleContext.EMPTY ) {
|
if ( originalContext==RuleContext.EMPTY ) {
|
||||||
|
@ -786,6 +797,11 @@ public class ParserATNSimulator extends ATNSimulator {
|
||||||
return String.valueOf(t);
|
return String.valueOf(t);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public String getLookaheadName(IntStream input) {
|
||||||
|
if ( input.LA(1)==Token.EOF ) return "EOF";
|
||||||
|
return parser.getTokenNames()[input.LA(1)];
|
||||||
|
}
|
||||||
|
|
||||||
public void setContextSensitive(boolean ctxSensitive) {
|
public void setContextSensitive(boolean ctxSensitive) {
|
||||||
this.userWantsCtxSensitive = ctxSensitive;
|
this.userWantsCtxSensitive = ctxSensitive;
|
||||||
}
|
}
|
||||||
|
|
|
@ -36,5 +36,4 @@ package org.antlr.v4.runtime.atn;
|
||||||
*/
|
*/
|
||||||
public class PlusBlockStartState extends BlockStartState {
|
public class PlusBlockStartState extends BlockStartState {
|
||||||
public PlusLoopbackState loopBackState;
|
public PlusLoopbackState loopBackState;
|
||||||
//public BlockEndState endState;
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -29,33 +29,10 @@
|
||||||
|
|
||||||
package org.antlr.v4.runtime.atn;
|
package org.antlr.v4.runtime.atn;
|
||||||
|
|
||||||
import java.util.*;
|
|
||||||
|
|
||||||
/** The last node in the ATN for a rule, unless that rule is the start symbol.
|
/** The last node in the ATN for a rule, unless that rule is the start symbol.
|
||||||
* In that case, there is one transition to EOF. Later, we might encode
|
* In that case, there is one transition to EOF. Later, we might encode
|
||||||
* references to all calls to this rule to compute FOLLOW sets for
|
* references to all calls to this rule to compute FOLLOW sets for
|
||||||
* error handling.
|
* error handling.
|
||||||
*/
|
*/
|
||||||
public class RuleStopState extends ATNState {
|
public class RuleStopState extends ATNState {
|
||||||
public static final int INITIAL_NUM_TRANSITIONS = 4;
|
|
||||||
|
|
||||||
//public int actionIndex; // for lexer, this is right edge action in rule
|
|
||||||
|
|
||||||
/** Track the transitions emanating from this ATN state. */
|
|
||||||
protected List<Transition> transitions =
|
|
||||||
new ArrayList<Transition>(INITIAL_NUM_TRANSITIONS);
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int getNumberOfTransitions() { return transitions.size(); }
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void addTransition(Transition e) { transitions.add(e); }
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Transition transition(int i) { return transitions.get(i); }
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void setTransition(int i, Transition e) {
|
|
||||||
transitions.set(i, e);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -31,5 +31,4 @@ package org.antlr.v4.runtime.atn;
|
||||||
|
|
||||||
/** The block that begins a closure loop. */
|
/** The block that begins a closure loop. */
|
||||||
public class StarBlockStartState extends BlockStartState {
|
public class StarBlockStartState extends BlockStartState {
|
||||||
// public StarLoopbackState loopBackState;
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -39,7 +39,7 @@ import java.util.Iterator;
|
||||||
*
|
*
|
||||||
* Emit navigation nodes (DOWN, UP, and EOF) to let show tree structure.
|
* Emit navigation nodes (DOWN, UP, and EOF) to let show tree structure.
|
||||||
*/
|
*/
|
||||||
public class TreeIterator implements Iterator {
|
public class ASTIterator implements Iterator {
|
||||||
protected ASTAdaptor adaptor;
|
protected ASTAdaptor adaptor;
|
||||||
protected Object root;
|
protected Object root;
|
||||||
protected Object tree;
|
protected Object tree;
|
||||||
|
@ -55,11 +55,11 @@ public class TreeIterator implements Iterator {
|
||||||
*/
|
*/
|
||||||
protected FastQueue nodes;
|
protected FastQueue nodes;
|
||||||
|
|
||||||
public TreeIterator(Object tree) {
|
public ASTIterator(Object tree) {
|
||||||
this(new CommonASTAdaptor(),tree);
|
this(new CommonASTAdaptor(),tree);
|
||||||
}
|
}
|
||||||
|
|
||||||
public TreeIterator(ASTAdaptor adaptor, Object tree) {
|
public ASTIterator(ASTAdaptor adaptor, Object tree) {
|
||||||
this.adaptor = adaptor;
|
this.adaptor = adaptor;
|
||||||
this.tree = tree;
|
this.tree = tree;
|
||||||
this.root = tree;
|
this.root = tree;
|
|
@ -48,7 +48,7 @@ public class CommonASTNodeStream extends LookaheadStream<Object> implements ASTN
|
||||||
ASTAdaptor adaptor;
|
ASTAdaptor adaptor;
|
||||||
|
|
||||||
/** The tree iterator we using */
|
/** The tree iterator we using */
|
||||||
protected TreeIterator it;
|
protected ASTIterator it;
|
||||||
|
|
||||||
/** Stack of indexes used for push/pop calls */
|
/** Stack of indexes used for push/pop calls */
|
||||||
protected Stack<Integer> calls;
|
protected Stack<Integer> calls;
|
||||||
|
@ -66,7 +66,7 @@ public class CommonASTNodeStream extends LookaheadStream<Object> implements ASTN
|
||||||
public CommonASTNodeStream(ASTAdaptor adaptor, Object tree) {
|
public CommonASTNodeStream(ASTAdaptor adaptor, Object tree) {
|
||||||
this.root = tree;
|
this.root = tree;
|
||||||
this.adaptor = adaptor;
|
this.adaptor = adaptor;
|
||||||
it = new TreeIterator(adaptor,root);
|
it = new ASTIterator(adaptor,root);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void reset() {
|
public void reset() {
|
||||||
|
@ -147,10 +147,19 @@ public class CommonASTNodeStream extends LookaheadStream<Object> implements ASTN
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/** Print the token text between start and stop nodes. If stop is an UP
|
||||||
|
* node, then we have to walk it back until we see the first non-UP node.
|
||||||
|
* Then, just get the token indexes and look into the token stream.
|
||||||
|
*/
|
||||||
public String toString(Object start, Object stop) {
|
public String toString(Object start, Object stop) {
|
||||||
// we'll have to walk from start to stop in tree; we're not keeping
|
if ( tokens==null ) throw new UnsupportedOperationException("can't print from null token stream in node stream");
|
||||||
// a complete node stream buffer
|
if ( start==null || stop==null ) return "";
|
||||||
return "n/a";
|
Token startToken = adaptor.getToken(start);
|
||||||
|
Token stopToken = adaptor.getToken(stop);
|
||||||
|
while ( stopToken.getType()==Token.UP ) {
|
||||||
|
stopToken = adaptor.getToken(stop);
|
||||||
|
}
|
||||||
|
return tokens.toString(startToken.getTokenIndex(), stopToken.getTokenIndex());
|
||||||
}
|
}
|
||||||
|
|
||||||
/** For debugging; destructive: moves tree iterator to end. */
|
/** For debugging; destructive: moves tree iterator to end. */
|
||||||
|
|
|
@ -369,9 +369,22 @@ cases(ttypes) ::= <<
|
||||||
<ttypes:{t | case <t>:}; separator="\n">
|
<ttypes:{t | case <t>:}; separator="\n">
|
||||||
>>
|
>>
|
||||||
|
|
||||||
MatchTree(t, elems) ::= <<
|
MatchTree(t, root, down, leftActions, kids, rightActions, up) ::= <<
|
||||||
// match tree
|
// match tree
|
||||||
<elems; separator="\n">
|
<root>
|
||||||
|
<leftActions>
|
||||||
|
<if(t.isNullable)>
|
||||||
|
if ( _input.LA(1)==Token.DOWN ) {
|
||||||
|
<down>
|
||||||
|
<kids; separator="\n">
|
||||||
|
<up>
|
||||||
|
}
|
||||||
|
<else>
|
||||||
|
<down>
|
||||||
|
<kids; separator="\n">
|
||||||
|
<up>
|
||||||
|
<endif>
|
||||||
|
<rightActions>
|
||||||
>>
|
>>
|
||||||
|
|
||||||
MatchDOWN(m) ::= <<
|
MatchDOWN(m) ::= <<
|
||||||
|
@ -444,7 +457,7 @@ TokenPropertyRef_int(t) ::= "(_localctx.<t.label>!=null?Integer.valueOf(_localct
|
||||||
RulePropertyRef_start(r) ::= "(_localctx.<r.label>!=null?((<file.TokenLabelType>)_localctx.<r.label>.start):null)"
|
RulePropertyRef_start(r) ::= "(_localctx.<r.label>!=null?((<file.TokenLabelType>)_localctx.<r.label>.start):null)"
|
||||||
RulePropertyRef_stop(r) ::= "(_localctx.<r.label>!=null?((<file.TokenLabelType>)_localctx.<r.label>.stop):null)"
|
RulePropertyRef_stop(r) ::= "(_localctx.<r.label>!=null?((<file.TokenLabelType>)_localctx.<r.label>.stop):null)"
|
||||||
RulePropertyRef_tree(r) ::= "(_localctx.<r.label>!=null?((<file.ASTLabelType>)_localctx.<r.label>.tree):null)"
|
RulePropertyRef_tree(r) ::= "(_localctx.<r.label>!=null?((<file.ASTLabelType>)_localctx.<r.label>.tree):null)"
|
||||||
RulePropertyRef_text(r) ::= "(_localctx.<r.label>!=null?((TokenStream)_input).toString(_localctx.<r.label>.start,_localctx.<r.label>.stop):null)"
|
RulePropertyRef_text(r) ::= "(_localctx.<r.label>!=null?_input.toString(_localctx.<r.label>.start,_localctx.<r.label>.stop):null)"
|
||||||
RulePropertyRef_st(r) ::= "(_localctx.<r.label>!=null?_localctx.<r.label>.st:null)"
|
RulePropertyRef_st(r) ::= "(_localctx.<r.label>!=null?_localctx.<r.label>.st:null)"
|
||||||
|
|
||||||
ThisRulePropertyRef_start(r) ::= "_localctx.start"
|
ThisRulePropertyRef_start(r) ::= "_localctx.start"
|
||||||
|
@ -453,6 +466,15 @@ ThisRulePropertyRef_tree(r) ::= "_localctx.tree"
|
||||||
ThisRulePropertyRef_text(r) ::= "((TokenStream)_input).toString(_localctx.start, _input.LT(-1))"
|
ThisRulePropertyRef_text(r) ::= "((TokenStream)_input).toString(_localctx.start, _input.LT(-1))"
|
||||||
ThisRulePropertyRef_st(r) ::= "_localctx.st"
|
ThisRulePropertyRef_st(r) ::= "_localctx.st"
|
||||||
|
|
||||||
|
/*
|
||||||
|
TreeRulePropertyRef_text(r) ::= <%
|
||||||
|
(_localctx.<r.label>!=null?
|
||||||
|
((ASTNodeStream)_input).toString(_localctx.<r.label>.start,_localctx.<r.label>.stop)
|
||||||
|
:null
|
||||||
|
)
|
||||||
|
%>
|
||||||
|
*/
|
||||||
|
|
||||||
NonLocalAttrRef(s) ::= "((<s.ruleName>Context)getInvokingContext(<s.ruleIndex>)).<s.name>"
|
NonLocalAttrRef(s) ::= "((<s.ruleName>Context)getInvokingContext(<s.ruleIndex>)).<s.name>"
|
||||||
SetNonLocalAttr(s, rhsChunks) ::=
|
SetNonLocalAttr(s, rhsChunks) ::=
|
||||||
"((<s.ruleName>Context)getInvokingContext(<s.ruleIndex>)).<s.name> = <rhsChunks>;"
|
"((<s.ruleName>Context)getInvokingContext(<s.ruleIndex>)).<s.name> = <rhsChunks>;"
|
||||||
|
|
|
@ -95,7 +95,7 @@ public class LexerATNFactory extends ParserATNFactory {
|
||||||
ATNState right = newState(b);
|
ATNState right = newState(b);
|
||||||
int t1 = CharSupport.getCharValueFromGrammarCharLiteral(a.getText());
|
int t1 = CharSupport.getCharValueFromGrammarCharLiteral(a.getText());
|
||||||
int t2 = CharSupport.getCharValueFromGrammarCharLiteral(b.getText());
|
int t2 = CharSupport.getCharValueFromGrammarCharLiteral(b.getText());
|
||||||
left.transition = new RangeTransition(t1, t2, right);
|
left.addTransition(new RangeTransition(t1, t2, right));
|
||||||
a.atnState = left;
|
a.atnState = left;
|
||||||
b.atnState = left;
|
b.atnState = left;
|
||||||
return new Handle(left, right);
|
return new Handle(left, right);
|
||||||
|
@ -119,12 +119,12 @@ public class LexerATNFactory extends ParserATNFactory {
|
||||||
}
|
}
|
||||||
if ( invert ) {
|
if ( invert ) {
|
||||||
IntervalSet notSet = (IntervalSet)set.complement(Token.MIN_TOKEN_TYPE, g.getMaxTokenType());
|
IntervalSet notSet = (IntervalSet)set.complement(Token.MIN_TOKEN_TYPE, g.getMaxTokenType());
|
||||||
left.transition = new NotSetTransition(set, notSet, right);
|
left.addTransition(new NotSetTransition(set, notSet, right));
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
left.transition = new SetTransition(set, right);
|
left.addTransition(new SetTransition(set, right));
|
||||||
}
|
}
|
||||||
right.incidentTransition = left.transition;
|
right.incidentTransition = left.transition(0);
|
||||||
associatedAST.atnState = left;
|
associatedAST.atnState = left;
|
||||||
return new Handle(left, right);
|
return new Handle(left, right);
|
||||||
}
|
}
|
||||||
|
@ -144,7 +144,7 @@ public class LexerATNFactory extends ParserATNFactory {
|
||||||
ATNState right = null;
|
ATNState right = null;
|
||||||
for (int i=0; i<n; i++) {
|
for (int i=0; i<n; i++) {
|
||||||
right = newState(stringLiteralAST);
|
right = newState(stringLiteralAST);
|
||||||
prev.transition = new AtomTransition(chars.charAt(i), right);
|
prev.addTransition(new AtomTransition(chars.charAt(i), right));
|
||||||
prev = right;
|
prev = right;
|
||||||
}
|
}
|
||||||
stringLiteralAST.atnState = left;
|
stringLiteralAST.atnState = left;
|
||||||
|
|
|
@ -103,8 +103,8 @@ public class ParserATNFactory implements ATNFactory {
|
||||||
ATNState left = newState(node);
|
ATNState left = newState(node);
|
||||||
ATNState right = newState(node);
|
ATNState right = newState(node);
|
||||||
int ttype = g.getTokenType(node.getText());
|
int ttype = g.getTokenType(node.getText());
|
||||||
left.transition = new AtomTransition(ttype, right);
|
left.addTransition(new AtomTransition(ttype, right));
|
||||||
right.incidentTransition = left.transition;
|
right.incidentTransition = left.transition(0);
|
||||||
node.atnState = left;
|
node.atnState = left;
|
||||||
return new Handle(left, right);
|
return new Handle(left, right);
|
||||||
}
|
}
|
||||||
|
@ -123,12 +123,12 @@ public class ParserATNFactory implements ATNFactory {
|
||||||
}
|
}
|
||||||
if ( invert ) {
|
if ( invert ) {
|
||||||
IntervalSet notSet = (IntervalSet)set.complement(Token.MIN_TOKEN_TYPE, g.getMaxTokenType());
|
IntervalSet notSet = (IntervalSet)set.complement(Token.MIN_TOKEN_TYPE, g.getMaxTokenType());
|
||||||
left.transition = new NotSetTransition(set, notSet, right);
|
left.addTransition(new NotSetTransition(set, notSet, right));
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
left.transition = new SetTransition(set, right);
|
left.addTransition(new SetTransition(set, right));
|
||||||
}
|
}
|
||||||
right.incidentTransition = left.transition;
|
right.incidentTransition = left.transition(0);
|
||||||
associatedAST.atnState = left;
|
associatedAST.atnState = left;
|
||||||
return new Handle(left, right);
|
return new Handle(left, right);
|
||||||
}
|
}
|
||||||
|
@ -211,7 +211,7 @@ public class ParserATNFactory implements ATNFactory {
|
||||||
p.ruleIndex = currentRule.index;
|
p.ruleIndex = currentRule.index;
|
||||||
p.predIndex = g.sempreds.get(pred);
|
p.predIndex = g.sempreds.get(pred);
|
||||||
p.isCtxDependent = UseDefAnalyzer.actionIsContextDependent(pred);
|
p.isCtxDependent = UseDefAnalyzer.actionIsContextDependent(pred);
|
||||||
left.transition = p;
|
left.addTransition(p);
|
||||||
pred.atnState = left;
|
pred.atnState = left;
|
||||||
return new Handle(left, right);
|
return new Handle(left, right);
|
||||||
}
|
}
|
||||||
|
@ -226,7 +226,7 @@ public class ParserATNFactory implements ATNFactory {
|
||||||
ATNState right = newState(action);
|
ATNState right = newState(action);
|
||||||
ActionTransition a = new ActionTransition(right);
|
ActionTransition a = new ActionTransition(right);
|
||||||
a.ruleIndex = currentRule.index;
|
a.ruleIndex = currentRule.index;
|
||||||
left.transition = a;
|
left.addTransition(a);
|
||||||
action.atnState = left;
|
action.atnState = left;
|
||||||
return new Handle(left, right);
|
return new Handle(left, right);
|
||||||
}
|
}
|
||||||
|
@ -300,7 +300,7 @@ public class ParserATNFactory implements ATNFactory {
|
||||||
|
|
||||||
// public Handle notBlock(GrammarAST notAST, Handle set) {
|
// public Handle notBlock(GrammarAST notAST, Handle set) {
|
||||||
// SetTransition st = (SetTransition)set.left.transition;
|
// SetTransition st = (SetTransition)set.left.transition;
|
||||||
// set.left.transition = new NotSetTransition(st.label, set.right);
|
// set.left.addTransition(new NotSetTransition(st.label, set.right);
|
||||||
// notAST.atnState = set.left;
|
// notAST.atnState = set.left;
|
||||||
// return set;
|
// return set;
|
||||||
// }
|
// }
|
||||||
|
@ -418,8 +418,8 @@ public class ParserATNFactory implements ATNFactory {
|
||||||
public Handle wildcard(GrammarAST node) {
|
public Handle wildcard(GrammarAST node) {
|
||||||
ATNState left = newState(node);
|
ATNState left = newState(node);
|
||||||
ATNState right = newState(node);
|
ATNState right = newState(node);
|
||||||
left.transition = new WildcardTransition(right);
|
left.addTransition(new WildcardTransition(right));
|
||||||
right.incidentTransition = left.transition;
|
right.incidentTransition = left.transition(0);
|
||||||
node.atnState = left;
|
node.atnState = left;
|
||||||
return new Handle(left, right);
|
return new Handle(left, right);
|
||||||
}
|
}
|
||||||
|
|
|
@ -29,48 +29,75 @@
|
||||||
|
|
||||||
package org.antlr.v4.automata;
|
package org.antlr.v4.automata;
|
||||||
|
|
||||||
|
import org.antlr.v4.runtime.*;
|
||||||
|
import org.antlr.v4.runtime.atn.*;
|
||||||
|
import org.antlr.v4.runtime.misc.IntervalSet;
|
||||||
import org.antlr.v4.tool.*;
|
import org.antlr.v4.tool.*;
|
||||||
|
|
||||||
import java.util.List;
|
import java.util.*;
|
||||||
|
|
||||||
/** Build ATNs for tree grammars */
|
/** Build ATNs for tree grammars */
|
||||||
public class TreeParserATNFactory extends ParserATNFactory {
|
public class TreeParserATNFactory extends ParserATNFactory {
|
||||||
|
// track stuff for ^(...) patterns in grammar to fix up nullable after ATN build
|
||||||
|
List<TreePatternAST> treePatternRootNodes = new ArrayList<TreePatternAST>();
|
||||||
|
List<ATNState> firstChildStates = new ArrayList<ATNState>();
|
||||||
|
List<ATNState> downStates = new ArrayList<ATNState>();
|
||||||
|
List<ATNState> upTargetStates = new ArrayList<ATNState>();
|
||||||
|
|
||||||
public TreeParserATNFactory(Grammar g) {
|
public TreeParserATNFactory(Grammar g) {
|
||||||
super(g);
|
super(g);
|
||||||
}
|
}
|
||||||
|
|
||||||
/** x y z from ^(x y z) becomes o-x->o-DOWN->o-y->o-z->o-UP->o */
|
public ATN createATN() {
|
||||||
public Handle tree(GrammarAST node, List<Handle> els) {
|
super.createATN();
|
||||||
Handle h = elemList(els);
|
|
||||||
return h;
|
|
||||||
|
|
||||||
// ATNState first = h.left;
|
for (int i=0; i<firstChildStates.size(); i++) {
|
||||||
// ATNState last = h.right;
|
ATNState firstChild = firstChildStates.get(i);
|
||||||
// node.atnState = first;
|
LL1Analyzer analyzer = new LL1Analyzer(atn);
|
||||||
//
|
IntervalSet look = analyzer.LOOK(firstChild, RuleContext.EMPTY);
|
||||||
// // find root transition first side node
|
TreePatternAST root = treePatternRootNodes.get(i);
|
||||||
// ATNState p = first;
|
System.out.println(root.toStringTree()+"==nullable? "+look.member(Token.UP));
|
||||||
// while ( p.transition(0) instanceof EpsilonTransition ||
|
|
||||||
// p.transition(0) instanceof PredicateTransition ||
|
if ( look.member(Token.UP) ) {
|
||||||
// p.transition(0) instanceof RangeTransition ||
|
// nullable child list if we can see the UP as the next token.
|
||||||
// p.transition(0) instanceof ActionTransition )
|
// convert r DN kids UP to r (DN kids UP)?; leave AST
|
||||||
// {
|
// that drives code gen. This just affects analysis
|
||||||
// p = p.transition(0).target;
|
root.isNullable = true;
|
||||||
// }
|
epsilon(downStates.get(i), upTargetStates.get(i));
|
||||||
// ATNState rootLeftNode = p;
|
}
|
||||||
// ATNState rootRightNode = rootLeftNode.transition(0).target;
|
}
|
||||||
// ATNState downLeftNode = newState(node);
|
|
||||||
// downLeftNode.transition = new AtomTransition(Token.DOWN, rootRightNode);
|
return atn;
|
||||||
// rootRightNode.incidentTransition = downLeftNode.transition;
|
|
||||||
// rootLeftNode.transition.target = downLeftNode;
|
|
||||||
// downLeftNode.incidentTransition = rootLeftNode.transition;
|
|
||||||
//
|
|
||||||
// ATNState upRightNode = newState(node);
|
|
||||||
// last.transition = new AtomTransition(Token.UP, upRightNode);
|
|
||||||
// upRightNode.incidentTransition = last.transition;
|
|
||||||
// last = upRightNode;
|
|
||||||
//
|
|
||||||
// return new Handle(first, last);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/** x y z from ^(x y z) becomes o-x->o-DOWN->o-y->o-z->o-UP->o
|
||||||
|
* ANTLRParser.g has added DOWN_TOKEN, UP_TOKEN into AST.
|
||||||
|
* Elems are [root, DOWN_TOKEN, x, y, UP_TOKEN]
|
||||||
|
*/
|
||||||
|
public Handle tree(GrammarAST node, List<Handle> els) {
|
||||||
|
Handle h = elemList(els);
|
||||||
|
|
||||||
|
treePatternRootNodes.add((TreePatternAST)node);
|
||||||
|
// find DOWN node then first child
|
||||||
|
for (Handle elh : els) {
|
||||||
|
Transition trans = elh.left.transition(0);
|
||||||
|
if ( !trans.isEpsilon() && trans.label().member(Token.DOWN) ) {
|
||||||
|
ATNState downState = elh.left;
|
||||||
|
downStates.add(downState);
|
||||||
|
firstChildStates.add(downState.transition(0).target);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// find UP node
|
||||||
|
for (Handle elh : els) {
|
||||||
|
Transition trans = elh.left.transition(0);
|
||||||
|
if ( trans instanceof AtomTransition && trans.label().member(Token.UP) ) {
|
||||||
|
ATNState upTargetState = elh.right;
|
||||||
|
upTargetStates.add(upTargetState);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return h;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -56,6 +56,8 @@ public class ActionTranslator implements ActionSplitterListener {
|
||||||
put("st", RulePropertyRef_st.class);
|
put("st", RulePropertyRef_st.class);
|
||||||
}};
|
}};
|
||||||
|
|
||||||
|
public static final Map<String, Class> treeRulePropToModelMap = rulePropToModelMap;
|
||||||
|
|
||||||
public static final Map<String, Class> tokenPropToModelMap = new HashMap<String, Class>() {{
|
public static final Map<String, Class> tokenPropToModelMap = new HashMap<String, Class>() {{
|
||||||
put("text", TokenPropertyRef_text.class);
|
put("text", TokenPropertyRef_text.class);
|
||||||
put("type", TokenPropertyRef_type.class);
|
put("type", TokenPropertyRef_type.class);
|
||||||
|
@ -135,7 +137,7 @@ public class ActionTranslator implements ActionSplitterListener {
|
||||||
case RET: chunks.add(new RetValueRef(x.getText())); break;
|
case RET: chunks.add(new RetValueRef(x.getText())); break;
|
||||||
case LOCAL: chunks.add(new LocalRef(x.getText())); break;
|
case LOCAL: chunks.add(new LocalRef(x.getText())); break;
|
||||||
case PREDEFINED_RULE: chunks.add(getRulePropertyRef(x)); break;
|
case PREDEFINED_RULE: chunks.add(getRulePropertyRef(x)); break;
|
||||||
// case PREDEFINED_TREE_RULE: chunks.add(new RetValueRef(x.getText())); break;
|
case PREDEFINED_TREE_RULE: chunks.add(getRulePropertyRef(x)); break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if ( node.resolver.resolvesToToken(x.getText(), node) ) {
|
if ( node.resolver.resolvesToToken(x.getText(), node) ) {
|
||||||
|
@ -150,16 +152,6 @@ public class ActionTranslator implements ActionSplitterListener {
|
||||||
chunks.add(new ListLabelRef(x.getText())); // $ids for ids+=ID etc...
|
chunks.add(new ListLabelRef(x.getText())); // $ids for ids+=ID etc...
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
// switch ( a.dict.type ) {
|
|
||||||
// case ARG: chunks.add(new ArgRef(x.getText())); break;
|
|
||||||
// case RET: chunks.add(new RetValueRef(x.getText())); break;
|
|
||||||
// case PREDEFINED_RULE: chunks.add(new RetValueRef(x.getText())); break;
|
|
||||||
// case PREDEFINED_LEXER_RULE: chunks.add(new RetValueRef(x.getText())); break;
|
|
||||||
// case PREDEFINED_TREE_RULE: chunks.add(new RetValueRef(x.getText())); break;
|
|
||||||
// case GLOBAL_SCOPE: chunks.add(new RetValueRef(x.getText())); break;
|
|
||||||
// case RULE_SCOPE: chunks.add(new RetValueRef(x.getText())); break;
|
|
||||||
// case TOKEN: chunks.add(new TokenRef(x.getText())); break;
|
|
||||||
// }
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/** $x.y = expr; */
|
/** $x.y = expr; */
|
||||||
|
@ -176,13 +168,16 @@ public class ActionTranslator implements ActionSplitterListener {
|
||||||
switch ( a.dict.type ) {
|
switch ( a.dict.type ) {
|
||||||
case ARG: chunks.add(new ArgRef(y.getText())); break; // has to be current rule
|
case ARG: chunks.add(new ArgRef(y.getText())); break; // has to be current rule
|
||||||
case RET:
|
case RET:
|
||||||
if ( factory.getCurrentRuleFunction()!=null && factory.getCurrentRuleFunction().name.equals(x.getText()) ) {
|
if ( factory.getCurrentRuleFunction()!=null &&
|
||||||
|
factory.getCurrentRuleFunction().name.equals(x.getText()) )
|
||||||
|
{
|
||||||
chunks.add(new RetValueRef(y.getText())); break;
|
chunks.add(new RetValueRef(y.getText())); break;
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
chunks.add(new QRetValueRef(getRuleLabel(x.getText()), y.getText())); break;
|
chunks.add(new QRetValueRef(getRuleLabel(x.getText()), y.getText())); break;
|
||||||
}
|
}
|
||||||
case PREDEFINED_RULE:
|
case PREDEFINED_RULE:
|
||||||
|
case PREDEFINED_TREE_RULE:
|
||||||
if ( factory.getCurrentRuleFunction()!=null &&
|
if ( factory.getCurrentRuleFunction()!=null &&
|
||||||
factory.getCurrentRuleFunction().name.equals(x.getText()) )
|
factory.getCurrentRuleFunction().name.equals(x.getText()) )
|
||||||
{
|
{
|
||||||
|
@ -192,9 +187,13 @@ public class ActionTranslator implements ActionSplitterListener {
|
||||||
chunks.add(getRulePropertyRef(x, y));
|
chunks.add(getRulePropertyRef(x, y));
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
case TOKEN: chunks.add(getTokenPropertyRef(x, y)); break;
|
case TOKEN:
|
||||||
|
chunks.add(getTokenPropertyRef(x, y));
|
||||||
|
break;
|
||||||
|
// case PREDEFINED_TREE_RULE:
|
||||||
|
// chunks.add(new RetValueRef(x.getText()));
|
||||||
|
// break;
|
||||||
// case PREDEFINED_LEXER_RULE: chunks.add(new RetValueRef(x.getText())); break;
|
// case PREDEFINED_LEXER_RULE: chunks.add(new RetValueRef(x.getText())); break;
|
||||||
// case PREDEFINED_TREE_RULE: chunks.add(new RetValueRef(x.getText())); break;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -272,15 +271,18 @@ public class ActionTranslator implements ActionSplitterListener {
|
||||||
}
|
}
|
||||||
|
|
||||||
RulePropertyRef getRulePropertyRef(Token x, Token prop) {
|
RulePropertyRef getRulePropertyRef(Token x, Token prop) {
|
||||||
|
Grammar g = factory.getGrammar();
|
||||||
try {
|
try {
|
||||||
Class c = rulePropToModelMap.get(prop.getText());
|
Class c = g.isTreeGrammar() ?
|
||||||
|
treeRulePropToModelMap.get(prop.getText()) :
|
||||||
|
rulePropToModelMap.get(prop.getText());
|
||||||
Constructor ctor = c.getConstructor(new Class[] {String.class});
|
Constructor ctor = c.getConstructor(new Class[] {String.class});
|
||||||
RulePropertyRef ref =
|
RulePropertyRef ref =
|
||||||
(RulePropertyRef)ctor.newInstance(getRuleLabel(x.getText()));
|
(RulePropertyRef)ctor.newInstance(getRuleLabel(x.getText()));
|
||||||
return ref;
|
return ref;
|
||||||
}
|
}
|
||||||
catch (Exception e) {
|
catch (Exception e) {
|
||||||
factory.getGrammar().tool.errMgr.toolError(ErrorType.INTERNAL_ERROR, e);
|
g.tool.errMgr.toolError(ErrorType.INTERNAL_ERROR, e);
|
||||||
}
|
}
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
|
@ -131,18 +131,18 @@ treeSpec returns [MatchTree treeMatch]
|
||||||
subrule returns [List<? extends SrcOp> omos]
|
subrule returns [List<? extends SrcOp> omos]
|
||||||
: ^(astBlockSuffix block[null,null,$astBlockSuffix.start]) {$omos = $block.omos;}
|
: ^(astBlockSuffix block[null,null,$astBlockSuffix.start]) {$omos = $block.omos;}
|
||||||
| ^(OPTIONAL block[null,$OPTIONAL,null]) {$omos = $block.omos;}
|
| ^(OPTIONAL block[null,$OPTIONAL,null]) {$omos = $block.omos;}
|
||||||
| ^(CLOSURE block[null,null,null])
|
| ( ^(op=CLOSURE b=block[null,$CLOSURE,null])
|
||||||
|
| ^(op=POSITIVE_CLOSURE b=block[null,$POSITIVE_CLOSURE,null])
|
||||||
|
)
|
||||||
{
|
{
|
||||||
List<CodeBlockForAlt> alts = new ArrayList<CodeBlockForAlt>();
|
List<CodeBlockForAlt> alts = new ArrayList<CodeBlockForAlt>();
|
||||||
SrcOp blk = $block.omos.get(0);
|
SrcOp blk = $b.omos.get(0);
|
||||||
CodeBlockForAlt alt = new CodeBlockForAlt(controller.delegate);
|
CodeBlockForAlt alt = new CodeBlockForAlt(controller.delegate);
|
||||||
alt.addOp(blk);
|
alt.addOp(blk);
|
||||||
alts.add(alt);
|
alts.add(alt);
|
||||||
SrcOp loop = controller.getEBNFBlock($CLOSURE, alts); // "star it"
|
SrcOp loop = controller.getEBNFBlock($op, alts); // "star it"
|
||||||
$omos = DefaultOutputModelFactory.list(loop);
|
$omos = DefaultOutputModelFactory.list(loop);
|
||||||
}
|
}
|
||||||
| ^(POSITIVE_CLOSURE block[null,$POSITIVE_CLOSURE,null])
|
|
||||||
{$omos = $block.omos;}
|
|
||||||
| block[null, null,null] {$omos = $block.omos;}
|
| block[null, null,null] {$omos = $block.omos;}
|
||||||
;
|
;
|
||||||
|
|
||||||
|
@ -266,7 +266,7 @@ rewriteTreeAtom[boolean isRoot] returns [List<SrcOp> omos]
|
||||||
;
|
;
|
||||||
|
|
||||||
rewriteTreeEbnf returns [CodeBlock op]
|
rewriteTreeEbnf returns [CodeBlock op]
|
||||||
: ^( (a=OPTIONAL|a=CLOSURE)
|
: ^( (a=OPTIONAL|a=CLOSURE|a=POSITIVE_CLOSURE)
|
||||||
^( REWRITE_BLOCK
|
^( REWRITE_BLOCK
|
||||||
{
|
{
|
||||||
controller.codeBlockLevel++;
|
controller.codeBlockLevel++;
|
||||||
|
|
|
@ -40,7 +40,7 @@ public class MatchSet extends MatchToken {
|
||||||
|
|
||||||
public MatchSet(OutputModelFactory factory, GrammarAST ast) {
|
public MatchSet(OutputModelFactory factory, GrammarAST ast) {
|
||||||
super(factory, ast);
|
super(factory, ast);
|
||||||
SetTransition st = (SetTransition)ast.atnState.transition;
|
SetTransition st = (SetTransition)ast.atnState.transition(0);
|
||||||
expr = new TestSetInline(factory, null, st.set);
|
expr = new TestSetInline(factory, null, st.set);
|
||||||
Decl d = new TokenTypeDecl(factory, expr.varName);
|
Decl d = new TokenTypeDecl(factory, expr.varName);
|
||||||
factory.getCurrentRuleFunction().addLocalDecl(d);
|
factory.getCurrentRuleFunction().addLocalDecl(d);
|
||||||
|
|
|
@ -38,13 +38,14 @@ import java.util.*;
|
||||||
/** */
|
/** */
|
||||||
public class MatchToken extends RuleElement implements LabeledOp {
|
public class MatchToken extends RuleElement implements LabeledOp {
|
||||||
public String name;
|
public String name;
|
||||||
|
public int ttype;
|
||||||
public List<Decl> labels = new ArrayList<Decl>();
|
public List<Decl> labels = new ArrayList<Decl>();
|
||||||
|
|
||||||
public MatchToken(OutputModelFactory factory, TerminalAST ast) {
|
public MatchToken(OutputModelFactory factory, TerminalAST ast) {
|
||||||
super(factory, ast);
|
super(factory, ast);
|
||||||
Grammar g = factory.getGrammar();
|
Grammar g = factory.getGrammar();
|
||||||
CodeGenerator gen = factory.getGenerator();
|
CodeGenerator gen = factory.getGenerator();
|
||||||
int ttype = g.getTokenType(ast.getText());
|
ttype = g.getTokenType(ast.getText());
|
||||||
name = gen.target.getTokenTypeAsTargetLabel(g, ttype);
|
name = gen.target.getTokenTypeAsTargetLabel(g, ttype);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -30,16 +30,46 @@
|
||||||
package org.antlr.v4.codegen.model;
|
package org.antlr.v4.codegen.model;
|
||||||
|
|
||||||
import org.antlr.v4.codegen.OutputModelFactory;
|
import org.antlr.v4.codegen.OutputModelFactory;
|
||||||
import org.antlr.v4.tool.GrammarAST;
|
import org.antlr.v4.misc.Utils;
|
||||||
|
import org.antlr.v4.runtime.Token;
|
||||||
|
import org.antlr.v4.tool.*;
|
||||||
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
public class MatchTree extends RuleElement {
|
public class MatchTree extends RuleElement {
|
||||||
@ModelElement public List<? extends SrcOp> elems;
|
public boolean isNullable;
|
||||||
|
|
||||||
|
@ModelElement public SrcOp root;
|
||||||
|
@ModelElement public List<? extends SrcOp> leftActions;
|
||||||
|
@ModelElement public SrcOp down;
|
||||||
|
@ModelElement public List<? extends SrcOp> kids;
|
||||||
|
@ModelElement public SrcOp up;
|
||||||
|
@ModelElement public List<? extends SrcOp> rightActions;
|
||||||
|
|
||||||
public MatchTree(OutputModelFactory factory, GrammarAST ast, List<? extends SrcOp> elems) {
|
public MatchTree(OutputModelFactory factory, GrammarAST ast, List<? extends SrcOp> elems) {
|
||||||
super(factory, ast);
|
super(factory, ast);
|
||||||
this.elems = elems;
|
TreePatternAST rootNode = (TreePatternAST)ast;
|
||||||
|
this.isNullable = rootNode.isNullable;
|
||||||
|
List<? extends SrcOp> afterRoot = elems.subList(1, elems.size());
|
||||||
|
int downIndex =
|
||||||
|
Utils.indexOf(afterRoot, new Utils.Filter<SrcOp>() {
|
||||||
|
public boolean select(SrcOp op) {
|
||||||
|
return op instanceof MatchToken && ((MatchToken)op).ttype==Token.DOWN;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
downIndex++; // we skipped root
|
||||||
|
down = elems.get(downIndex);
|
||||||
|
int upIndex =
|
||||||
|
Utils.lastIndexOf(elems, new Utils.Filter<SrcOp>() {
|
||||||
|
public boolean select(SrcOp op) {
|
||||||
|
return op instanceof MatchToken && ((MatchToken) op).ttype == Token.UP;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
up = elems.get(upIndex);
|
||||||
|
root = elems.get(0);
|
||||||
|
leftActions = elems.subList(1, downIndex);
|
||||||
|
rightActions = elems.subList(upIndex+1, elems.size());
|
||||||
|
this.kids = elems.subList(downIndex+1, upIndex);
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -37,6 +37,11 @@ import java.util.*;
|
||||||
/** */
|
/** */
|
||||||
public class Utils {
|
public class Utils {
|
||||||
public static final int INTEGER_POOL_MAX_VALUE = 1000;
|
public static final int INTEGER_POOL_MAX_VALUE = 1000;
|
||||||
|
|
||||||
|
public interface Filter<T> {
|
||||||
|
boolean select(T t);
|
||||||
|
}
|
||||||
|
|
||||||
static Integer[] ints = new Integer[INTEGER_POOL_MAX_VALUE+1];
|
static Integer[] ints = new Integer[INTEGER_POOL_MAX_VALUE+1];
|
||||||
|
|
||||||
/** Integer objects are immutable so share all Integers with the
|
/** Integer objects are immutable so share all Integers with the
|
||||||
|
@ -179,4 +184,19 @@ public class Utils {
|
||||||
}
|
}
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static <T> int indexOf(List<? extends T> elems, Filter<T> filter) {
|
||||||
|
for (int i=0; i<elems.size(); i++) {
|
||||||
|
if ( filter.select(elems.get(i)) ) return i;
|
||||||
|
}
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static <T> int lastIndexOf(List<? extends T> elems, Filter<T> filter) {
|
||||||
|
for (int i=elems.size()-1; i>=0; i--) {
|
||||||
|
if ( filter.select(elems.get(i)) ) return i;
|
||||||
|
}
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -649,21 +649,34 @@ labeledElement
|
||||||
// will walk of course. Alts for trees therefore start with ^( XXX, which
|
// will walk of course. Alts for trees therefore start with ^( XXX, which
|
||||||
// says we will see a root node of XXX then DOWN etc
|
// says we will see a root node of XXX then DOWN etc
|
||||||
treeSpec
|
treeSpec
|
||||||
: TREE_BEGIN
|
@after {
|
||||||
|
GrammarAST down = new DownAST(DOWN_TOKEN, $begin);
|
||||||
|
GrammarAST up = new UpAST(UP_TOKEN, $begin);
|
||||||
|
int i = 1; // skip root element
|
||||||
|
GrammarAST p = (GrammarAST)$tree.getChild(i);
|
||||||
|
while ( p.getType()==ACTION || p.getType()==SEMPRED ) {
|
||||||
|
i++;
|
||||||
|
p = (GrammarAST)$tree.getChild(i);
|
||||||
|
}
|
||||||
|
$tree.insertChild(i, down); // ADD DOWN
|
||||||
|
i = $tree.getChildCount()-1;
|
||||||
|
p = (GrammarAST)$tree.getChild(i);
|
||||||
|
while ( p.getType()==ACTION || p.getType()==SEMPRED ) {
|
||||||
|
i--;
|
||||||
|
p = (GrammarAST)$tree.getChild(i);
|
||||||
|
}
|
||||||
|
$tree.insertChild(i+1, up); // ADD UP
|
||||||
|
}
|
||||||
|
: begin=TREE_BEGIN
|
||||||
// Only a subset of elements are allowed to be a root node. However
|
// Only a subset of elements are allowed to be a root node. However
|
||||||
// we allow any element to appear here and reject silly ones later
|
// we allow any element to appear here and reject silly ones later
|
||||||
// when we walk the AST.
|
// when we walk the AST.
|
||||||
root=element
|
root=element
|
||||||
// After the tree root we get the usual suspects,
|
// After the tree root we get the usual suspects,
|
||||||
// all members of the element set
|
// all members of the element set.
|
||||||
(kids+=element)+
|
(kids+=element)+
|
||||||
RPAREN
|
RPAREN
|
||||||
-> ^(TREE_BEGIN
|
-> ^( TREE_BEGIN<TreePatternAST> $root $kids+ )
|
||||||
$root
|
|
||||||
DOWN_TOKEN<DownAST>[$TREE_BEGIN]
|
|
||||||
$kids+
|
|
||||||
UP_TOKEN<UpAST>[$TREE_BEGIN]
|
|
||||||
)
|
|
||||||
;
|
;
|
||||||
|
|
||||||
// A block of gramamr structure optionally followed by standard EBNF
|
// A block of gramamr structure optionally followed by standard EBNF
|
||||||
|
|
|
@ -358,6 +358,8 @@ element
|
||||||
| ^(BANG astOperand) {bangOp($BANG, $astOperand.start);}
|
| ^(BANG astOperand) {bangOp($BANG, $astOperand.start);}
|
||||||
| ^(NOT blockSet)
|
| ^(NOT blockSet)
|
||||||
| ^(NOT block)
|
| ^(NOT block)
|
||||||
|
| DOWN_TOKEN
|
||||||
|
| UP_TOKEN
|
||||||
;
|
;
|
||||||
|
|
||||||
astOperand
|
astOperand
|
||||||
|
@ -371,7 +373,7 @@ labeledElement
|
||||||
;
|
;
|
||||||
|
|
||||||
treeSpec
|
treeSpec
|
||||||
: ^(TREE_BEGIN element DOWN_TOKEN element+ UP_TOKEN )
|
: ^(TREE_BEGIN element element+ ) // UP_TOKEN and DOWN_TOKEN in there somewhere
|
||||||
;
|
;
|
||||||
|
|
||||||
subrule
|
subrule
|
||||||
|
|
|
@ -31,8 +31,7 @@ package org.antlr.v4.semantics;
|
||||||
|
|
||||||
import org.antlr.runtime.Token;
|
import org.antlr.runtime.Token;
|
||||||
import org.antlr.v4.misc.Utils;
|
import org.antlr.v4.misc.Utils;
|
||||||
import org.antlr.v4.parse.ANTLRParser;
|
import org.antlr.v4.parse.*;
|
||||||
import org.antlr.v4.parse.GrammarTreeVisitor;
|
|
||||||
import org.antlr.v4.tool.*;
|
import org.antlr.v4.tool.*;
|
||||||
import org.stringtemplate.v4.misc.MultiMap;
|
import org.stringtemplate.v4.misc.MultiMap;
|
||||||
|
|
||||||
|
@ -534,7 +533,7 @@ public class BasicSemanticChecks extends GrammarTreeVisitor {
|
||||||
}
|
}
|
||||||
|
|
||||||
void checkWildcardRoot(GrammarAST wild) {
|
void checkWildcardRoot(GrammarAST wild) {
|
||||||
if ( wild.getParent().getType()==ANTLRParser.TREE_BEGIN ) {
|
if ( wild.getChildIndex()==0 && wild.getParent().getType()==ANTLRParser.TREE_BEGIN ) {
|
||||||
String fileName = wild.token.getInputStream().getSourceName();
|
String fileName = wild.token.getInputStream().getSourceName();
|
||||||
g.tool.errMgr.grammarError(ErrorType.WILDCARD_AS_ROOT,
|
g.tool.errMgr.grammarError(ErrorType.WILDCARD_AS_ROOT,
|
||||||
fileName,
|
fileName,
|
||||||
|
|
|
@ -38,9 +38,6 @@ public class ActionAST extends GrammarAST {
|
||||||
// Alt, rule, grammar space
|
// Alt, rule, grammar space
|
||||||
public AttributeResolver resolver;
|
public AttributeResolver resolver;
|
||||||
public List<Token> chunks; // useful for ANTLR IDE developers
|
public List<Token> chunks; // useful for ANTLR IDE developers
|
||||||
/** In which alt does this node live? */
|
|
||||||
// public Alternative alt;
|
|
||||||
|
|
||||||
|
|
||||||
public ActionAST(GrammarAST node) {
|
public ActionAST(GrammarAST node) {
|
||||||
super(node);
|
super(node);
|
||||||
|
|
|
@ -0,0 +1,40 @@
|
||||||
|
/*
|
||||||
|
[The "BSD license"]
|
||||||
|
Copyright (c) 2011 Terence Parr
|
||||||
|
All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without
|
||||||
|
modification, are permitted provided that the following conditions
|
||||||
|
are met:
|
||||||
|
|
||||||
|
1. Redistributions of source code must retain the above copyright
|
||||||
|
notice, this list of conditions and the following disclaimer.
|
||||||
|
2. Redistributions in binary form must reproduce the above copyright
|
||||||
|
notice, this list of conditions and the following disclaimer in the
|
||||||
|
documentation and/or other materials provided with the distribution.
|
||||||
|
3. The name of the author may not be used to endorse or promote products
|
||||||
|
derived from this software without specific prior written permission.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
|
||||||
|
IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
|
||||||
|
OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
|
||||||
|
IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||||
|
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
|
||||||
|
NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||||
|
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||||
|
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
|
||||||
|
THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.antlr.v4.tool;
|
||||||
|
|
||||||
|
import org.antlr.runtime.Token;
|
||||||
|
|
||||||
|
public class TreePatternAST extends GrammarAST {
|
||||||
|
public boolean isNullable;
|
||||||
|
|
||||||
|
public TreePatternAST(Token t) {
|
||||||
|
super(t);
|
||||||
|
}
|
||||||
|
}
|
|
@ -325,6 +325,7 @@ public abstract class BaseTest {
|
||||||
/** Return true if all is ok, no errors */
|
/** Return true if all is ok, no errors */
|
||||||
protected boolean antlr(String fileName, String grammarFileName, String grammarStr, boolean debug) {
|
protected boolean antlr(String fileName, String grammarFileName, String grammarStr, boolean debug) {
|
||||||
boolean allIsWell = true;
|
boolean allIsWell = true;
|
||||||
|
System.out.println("dir "+tmpdir);
|
||||||
mkdir(tmpdir);
|
mkdir(tmpdir);
|
||||||
writeFile(tmpdir, fileName, grammarStr);
|
writeFile(tmpdir, fileName, grammarStr);
|
||||||
try {
|
try {
|
||||||
|
|
|
@ -233,6 +233,19 @@ public class TestRewriteAST extends BaseTest {
|
||||||
assertEquals("a b\n", found);
|
assertEquals("a b\n", found);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test public void testPositiveClosureSingleToken() throws Exception {
|
||||||
|
String grammar =
|
||||||
|
"grammar T;\n" +
|
||||||
|
"options {output=AST;}\n" +
|
||||||
|
"a : ID ID -> ID+ ;\n" +
|
||||||
|
"ID : 'a'..'z'+ ;\n" +
|
||||||
|
"INT : '0'..'9'+;\n" +
|
||||||
|
"WS : (' '|'\\n') {$channel=HIDDEN;} ;\n";
|
||||||
|
String found = execParser("T.g", grammar, "TParser", "TLexer",
|
||||||
|
"a", "a b", debug);
|
||||||
|
assertEquals("a b\n", found);
|
||||||
|
}
|
||||||
|
|
||||||
@Test public void testOptionalSingleRule() throws Exception {
|
@Test public void testOptionalSingleRule() throws Exception {
|
||||||
String grammar =
|
String grammar =
|
||||||
"grammar T;\n" +
|
"grammar T;\n" +
|
||||||
|
|
|
@ -42,7 +42,7 @@ public class TestTreeIterator {
|
||||||
ASTAdaptor adaptor = new CommonASTAdaptor();
|
ASTAdaptor adaptor = new CommonASTAdaptor();
|
||||||
TreeWizard wiz = new TreeWizard(adaptor, tokens);
|
TreeWizard wiz = new TreeWizard(adaptor, tokens);
|
||||||
CommonAST t = (CommonAST)wiz.create("A");
|
CommonAST t = (CommonAST)wiz.create("A");
|
||||||
TreeIterator it = new TreeIterator(t);
|
ASTIterator it = new ASTIterator(t);
|
||||||
StringBuffer buf = toString(it);
|
StringBuffer buf = toString(it);
|
||||||
String expecting = "A EOF";
|
String expecting = "A EOF";
|
||||||
String found = buf.toString();
|
String found = buf.toString();
|
||||||
|
@ -53,7 +53,7 @@ public class TestTreeIterator {
|
||||||
ASTAdaptor adaptor = new CommonASTAdaptor();
|
ASTAdaptor adaptor = new CommonASTAdaptor();
|
||||||
TreeWizard wiz = new TreeWizard(adaptor, tokens);
|
TreeWizard wiz = new TreeWizard(adaptor, tokens);
|
||||||
CommonAST t = (CommonAST)wiz.create("(nil A B)");
|
CommonAST t = (CommonAST)wiz.create("(nil A B)");
|
||||||
TreeIterator it = new TreeIterator(t);
|
ASTIterator it = new ASTIterator(t);
|
||||||
StringBuffer buf = toString(it);
|
StringBuffer buf = toString(it);
|
||||||
String expecting = "nil DOWN A B UP EOF";
|
String expecting = "nil DOWN A B UP EOF";
|
||||||
String found = buf.toString();
|
String found = buf.toString();
|
||||||
|
@ -64,7 +64,7 @@ public class TestTreeIterator {
|
||||||
ASTAdaptor adaptor = new CommonASTAdaptor();
|
ASTAdaptor adaptor = new CommonASTAdaptor();
|
||||||
TreeWizard wiz = new TreeWizard(adaptor, tokens);
|
TreeWizard wiz = new TreeWizard(adaptor, tokens);
|
||||||
CommonAST t = (CommonAST)wiz.create("(A B)");
|
CommonAST t = (CommonAST)wiz.create("(A B)");
|
||||||
TreeIterator it = new TreeIterator(t);
|
ASTIterator it = new ASTIterator(t);
|
||||||
StringBuffer buf = toString(it);
|
StringBuffer buf = toString(it);
|
||||||
String expecting = "A DOWN B UP EOF";
|
String expecting = "A DOWN B UP EOF";
|
||||||
String found = buf.toString();
|
String found = buf.toString();
|
||||||
|
@ -75,7 +75,7 @@ public class TestTreeIterator {
|
||||||
ASTAdaptor adaptor = new CommonASTAdaptor();
|
ASTAdaptor adaptor = new CommonASTAdaptor();
|
||||||
TreeWizard wiz = new TreeWizard(adaptor, tokens);
|
TreeWizard wiz = new TreeWizard(adaptor, tokens);
|
||||||
CommonAST t = (CommonAST)wiz.create("(A B C)");
|
CommonAST t = (CommonAST)wiz.create("(A B C)");
|
||||||
TreeIterator it = new TreeIterator(t);
|
ASTIterator it = new ASTIterator(t);
|
||||||
StringBuffer buf = toString(it);
|
StringBuffer buf = toString(it);
|
||||||
String expecting = "A DOWN B C UP EOF";
|
String expecting = "A DOWN B C UP EOF";
|
||||||
String found = buf.toString();
|
String found = buf.toString();
|
||||||
|
@ -86,7 +86,7 @@ public class TestTreeIterator {
|
||||||
ASTAdaptor adaptor = new CommonASTAdaptor();
|
ASTAdaptor adaptor = new CommonASTAdaptor();
|
||||||
TreeWizard wiz = new TreeWizard(adaptor, tokens);
|
TreeWizard wiz = new TreeWizard(adaptor, tokens);
|
||||||
CommonAST t = (CommonAST)wiz.create("(A (B C))");
|
CommonAST t = (CommonAST)wiz.create("(A (B C))");
|
||||||
TreeIterator it = new TreeIterator(t);
|
ASTIterator it = new ASTIterator(t);
|
||||||
StringBuffer buf = toString(it);
|
StringBuffer buf = toString(it);
|
||||||
String expecting = "A DOWN B DOWN C UP UP EOF";
|
String expecting = "A DOWN B DOWN C UP UP EOF";
|
||||||
String found = buf.toString();
|
String found = buf.toString();
|
||||||
|
@ -97,7 +97,7 @@ public class TestTreeIterator {
|
||||||
ASTAdaptor adaptor = new CommonASTAdaptor();
|
ASTAdaptor adaptor = new CommonASTAdaptor();
|
||||||
TreeWizard wiz = new TreeWizard(adaptor, tokens);
|
TreeWizard wiz = new TreeWizard(adaptor, tokens);
|
||||||
CommonAST t = (CommonAST)wiz.create("(A (B (C D E) F) G)");
|
CommonAST t = (CommonAST)wiz.create("(A (B (C D E) F) G)");
|
||||||
TreeIterator it = new TreeIterator(t);
|
ASTIterator it = new ASTIterator(t);
|
||||||
StringBuffer buf = toString(it);
|
StringBuffer buf = toString(it);
|
||||||
String expecting = "A DOWN B DOWN C DOWN D E UP F UP G UP EOF";
|
String expecting = "A DOWN B DOWN C DOWN D E UP F UP G UP EOF";
|
||||||
String found = buf.toString();
|
String found = buf.toString();
|
||||||
|
@ -108,7 +108,7 @@ public class TestTreeIterator {
|
||||||
ASTAdaptor adaptor = new CommonASTAdaptor();
|
ASTAdaptor adaptor = new CommonASTAdaptor();
|
||||||
TreeWizard wiz = new TreeWizard(adaptor, tokens);
|
TreeWizard wiz = new TreeWizard(adaptor, tokens);
|
||||||
CommonAST t = (CommonAST)wiz.create("(A (B (C D E) F) G)");
|
CommonAST t = (CommonAST)wiz.create("(A (B (C D E) F) G)");
|
||||||
TreeIterator it = new TreeIterator(t);
|
ASTIterator it = new ASTIterator(t);
|
||||||
StringBuffer buf = toString(it);
|
StringBuffer buf = toString(it);
|
||||||
String expecting = "A DOWN B DOWN C DOWN D E UP F UP G UP EOF";
|
String expecting = "A DOWN B DOWN C DOWN D E UP F UP G UP EOF";
|
||||||
String found = buf.toString();
|
String found = buf.toString();
|
||||||
|
@ -121,7 +121,7 @@ public class TestTreeIterator {
|
||||||
assertEquals(expecting, found);
|
assertEquals(expecting, found);
|
||||||
}
|
}
|
||||||
|
|
||||||
protected static StringBuffer toString(TreeIterator it) {
|
protected static StringBuffer toString(ASTIterator it) {
|
||||||
StringBuffer buf = new StringBuffer();
|
StringBuffer buf = new StringBuffer();
|
||||||
while ( it.hasNext() ) {
|
while ( it.hasNext() ) {
|
||||||
CommonAST n = (CommonAST)it.next();
|
CommonAST n = (CommonAST)it.next();
|
||||||
|
|
|
@ -1,33 +1,34 @@
|
||||||
/*
|
/*
|
||||||
* [The "BSD license"]
|
[The "BSD license"]
|
||||||
* Copyright (c) 2010 Terence Parr
|
Copyright (c) 2011 Terence Parr
|
||||||
* All rights reserved.
|
All rights reserved.
|
||||||
*
|
|
||||||
* Redistribution and use in source and binary forms, with or without
|
|
||||||
* modification, are permitted provided that the following conditions
|
|
||||||
* are met:
|
|
||||||
* 1. Redistributions of source code must retain the above copyright
|
|
||||||
* notice, this list of conditions and the following disclaimer.
|
|
||||||
* 2. Redistributions in binary form must reproduce the above copyright
|
|
||||||
* notice, this list of conditions and the following disclaimer in the
|
|
||||||
* documentation and/or other materials provided with the distribution.
|
|
||||||
* 3. The name of the author may not be used to endorse or promote products
|
|
||||||
* derived from this software without specific prior written permission.
|
|
||||||
*
|
|
||||||
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
|
|
||||||
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
|
|
||||||
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
|
|
||||||
* IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
|
|
||||||
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
|
|
||||||
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
|
||||||
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
|
||||||
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
|
||||||
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
|
|
||||||
* THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
*/
|
|
||||||
package org.antlr.test;
|
|
||||||
|
|
||||||
import org.junit.Test;
|
Redistribution and use in source and binary forms, with or without
|
||||||
|
modification, are permitted provided that the following conditions
|
||||||
|
are met:
|
||||||
|
|
||||||
|
1. Redistributions of source code must retain the above copyright
|
||||||
|
notice, this list of conditions and the following disclaimer.
|
||||||
|
2. Redistributions in binary form must reproduce the above copyright
|
||||||
|
notice, this list of conditions and the following disclaimer in the
|
||||||
|
documentation and/or other materials provided with the distribution.
|
||||||
|
3. The name of the author may not be used to endorse or promote products
|
||||||
|
derived from this software without specific prior written permission.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
|
||||||
|
IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
|
||||||
|
OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
|
||||||
|
IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||||
|
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
|
||||||
|
NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||||
|
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||||
|
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
|
||||||
|
THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
*/
|
||||||
|
package org.antlr.v4.test;
|
||||||
|
|
||||||
|
import org.junit.*;
|
||||||
|
|
||||||
public class TestTreeParsing extends BaseTest {
|
public class TestTreeParsing extends BaseTest {
|
||||||
@Test public void testFlatList() throws Exception {
|
@Test public void testFlatList() throws Exception {
|
||||||
|
@ -40,7 +41,7 @@ public class TestTreeParsing extends BaseTest {
|
||||||
"WS : (' '|'\\n') {$channel=HIDDEN;} ;\n";
|
"WS : (' '|'\\n') {$channel=HIDDEN;} ;\n";
|
||||||
|
|
||||||
String treeGrammar =
|
String treeGrammar =
|
||||||
"tree grammar TP; options {ASTLabelType=CommonTree;}\n" +
|
"tree grammar TP; options {ASTLabelType=CommonAST;}\n" +
|
||||||
"a : ID INT\n" +
|
"a : ID INT\n" +
|
||||||
" {System.out.println($ID+\", \"+$INT);}\n" +
|
" {System.out.println($ID+\", \"+$INT);}\n" +
|
||||||
" ;\n";
|
" ;\n";
|
||||||
|
@ -60,7 +61,7 @@ public class TestTreeParsing extends BaseTest {
|
||||||
"WS : (' '|'\\n') {$channel=HIDDEN;} ;\n";
|
"WS : (' '|'\\n') {$channel=HIDDEN;} ;\n";
|
||||||
|
|
||||||
String treeGrammar =
|
String treeGrammar =
|
||||||
"tree grammar TP; options {ASTLabelType=CommonTree;}\n" +
|
"tree grammar TP; options {ASTLabelType=CommonAST;}\n" +
|
||||||
"a : ^(ID INT)\n" +
|
"a : ^(ID INT)\n" +
|
||||||
" {System.out.println($ID+\", \"+$INT);}\n" +
|
" {System.out.println($ID+\", \"+$INT);}\n" +
|
||||||
" ;\n";
|
" ;\n";
|
||||||
|
@ -82,7 +83,7 @@ public class TestTreeParsing extends BaseTest {
|
||||||
"WS : (' '|'\\n') {$channel=HIDDEN;} ;\n";
|
"WS : (' '|'\\n') {$channel=HIDDEN;} ;\n";
|
||||||
|
|
||||||
String treeGrammar =
|
String treeGrammar =
|
||||||
"tree grammar TP; options {ASTLabelType=CommonTree;}\n" +
|
"tree grammar TP; options {ASTLabelType=CommonAST;}\n" +
|
||||||
"a : b b ;\n" +
|
"a : b b ;\n" +
|
||||||
"b : ID INT {System.out.print($ID+\" \"+$INT);}\n" +
|
"b : ID INT {System.out.print($ID+\" \"+$INT);}\n" +
|
||||||
" | ^(ID INT) {System.out.print(\"^(\"+$ID+\" \"+$INT+')');}\n" +
|
" | ^(ID INT) {System.out.print(\"^(\"+$ID+\" \"+$INT+')');}\n" +
|
||||||
|
@ -105,7 +106,7 @@ public class TestTreeParsing extends BaseTest {
|
||||||
"WS : (' '|'\\n') {$channel=HIDDEN;} ;\n";
|
"WS : (' '|'\\n') {$channel=HIDDEN;} ;\n";
|
||||||
|
|
||||||
String treeGrammar =
|
String treeGrammar =
|
||||||
"tree grammar TP; options {ASTLabelType=CommonTree;}\n" +
|
"tree grammar TP; options {ASTLabelType=CommonAST;}\n" +
|
||||||
"a : b b ;\n" +
|
"a : b b ;\n" +
|
||||||
"b : ID INT+ {System.out.print($ID+\" \"+$INT);}\n" +
|
"b : ID INT+ {System.out.print($ID+\" \"+$INT);}\n" +
|
||||||
" | ^(x=ID (y=INT)+) {System.out.print(\"^(\"+$x+' '+$y+')');}\n" +
|
" | ^(x=ID (y=INT)+) {System.out.print(\"^(\"+$x+' '+$y+')');}\n" +
|
||||||
|
@ -129,7 +130,7 @@ public class TestTreeParsing extends BaseTest {
|
||||||
"WS : (' '|'\\n') {$channel=HIDDEN;} ;\n";
|
"WS : (' '|'\\n') {$channel=HIDDEN;} ;\n";
|
||||||
|
|
||||||
String treeGrammar =
|
String treeGrammar =
|
||||||
"tree grammar TP; options {ASTLabelType=CommonTree;}\n" +
|
"tree grammar TP; options {ASTLabelType=CommonAST;}\n" +
|
||||||
"a : ID INT+ PERIOD {System.out.print(\"alt 1\");}"+
|
"a : ID INT+ PERIOD {System.out.print(\"alt 1\");}"+
|
||||||
" | ID INT+ SEMI {System.out.print(\"alt 2\");}\n" +
|
" | ID INT+ SEMI {System.out.print(\"alt 2\");}\n" +
|
||||||
" ;\n";
|
" ;\n";
|
||||||
|
@ -139,7 +140,8 @@ public class TestTreeParsing extends BaseTest {
|
||||||
assertEquals("alt 1\n", found);
|
assertEquals("alt 1\n", found);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test public void testTemplateOutput() throws Exception {
|
@Ignore
|
||||||
|
public void testTemplateOutput() throws Exception {
|
||||||
String grammar =
|
String grammar =
|
||||||
"grammar T;\n" +
|
"grammar T;\n" +
|
||||||
"options {output=AST;}\n" +
|
"options {output=AST;}\n" +
|
||||||
|
@ -150,9 +152,9 @@ public class TestTreeParsing extends BaseTest {
|
||||||
|
|
||||||
String treeGrammar =
|
String treeGrammar =
|
||||||
"tree grammar TP;\n" +
|
"tree grammar TP;\n" +
|
||||||
"options {output=template; ASTLabelType=CommonTree;}\n" +
|
"options {output=template; ASTLabelType=CommonAST;}\n" +
|
||||||
"s : a {System.out.println($a.st);};\n" +
|
"s : a {System.out.println($a.st);};\n" +
|
||||||
"a : ID INT -> {new StringTemplate($INT.text)}\n" +
|
"a : ID INT -> {new ST($INT.text)}\n" +
|
||||||
" ;\n";
|
" ;\n";
|
||||||
|
|
||||||
String found = execTreeParser("T.g", grammar, "TParser", "TP.g",
|
String found = execTreeParser("T.g", grammar, "TParser", "TP.g",
|
||||||
|
@ -170,7 +172,7 @@ public class TestTreeParsing extends BaseTest {
|
||||||
"WS : (' '|'\\n') {$channel=HIDDEN;} ;\n";
|
"WS : (' '|'\\n') {$channel=HIDDEN;} ;\n";
|
||||||
|
|
||||||
String treeGrammar =
|
String treeGrammar =
|
||||||
"tree grammar TP; options {ASTLabelType=CommonTree;}\n" +
|
"tree grammar TP; options {ASTLabelType=CommonAST;}\n" +
|
||||||
"a : ^(ID INT?)\n" +
|
"a : ^(ID INT?)\n" +
|
||||||
" {System.out.println($ID);}\n" +
|
" {System.out.println($ID);}\n" +
|
||||||
" ;\n";
|
" ;\n";
|
||||||
|
@ -191,7 +193,7 @@ public class TestTreeParsing extends BaseTest {
|
||||||
"WS : (' '|'\\n') {$channel=HIDDEN;} ;\n";
|
"WS : (' '|'\\n') {$channel=HIDDEN;} ;\n";
|
||||||
|
|
||||||
String treeGrammar =
|
String treeGrammar =
|
||||||
"tree grammar TP; options {ASTLabelType=CommonTree;}\n" +
|
"tree grammar TP; options {ASTLabelType=CommonAST;}\n" +
|
||||||
"a : ^(ID INT?) SEMI\n" +
|
"a : ^(ID INT?) SEMI\n" +
|
||||||
" {System.out.println($ID);}\n" +
|
" {System.out.println($ID);}\n" +
|
||||||
" ;\n";
|
" ;\n";
|
||||||
|
@ -212,7 +214,7 @@ public class TestTreeParsing extends BaseTest {
|
||||||
"WS : (' '|'\\n') {$channel=HIDDEN;} ;\n";
|
"WS : (' '|'\\n') {$channel=HIDDEN;} ;\n";
|
||||||
|
|
||||||
String treeGrammar =
|
String treeGrammar =
|
||||||
"tree grammar TP; options {ASTLabelType=CommonTree;}\n" +
|
"tree grammar TP; options {ASTLabelType=CommonAST;}\n" +
|
||||||
"a : ^(ID INT? b) SEMI\n" +
|
"a : ^(ID INT? b) SEMI\n" +
|
||||||
" {System.out.println($ID+\", \"+$b.text);}\n" +
|
" {System.out.println($ID+\", \"+$b.text);}\n" +
|
||||||
" ;\n"+
|
" ;\n"+
|
||||||
|
@ -234,7 +236,7 @@ public class TestTreeParsing extends BaseTest {
|
||||||
"WS : (' '|'\\n') {$channel=HIDDEN;} ;\n";
|
"WS : (' '|'\\n') {$channel=HIDDEN;} ;\n";
|
||||||
|
|
||||||
String treeGrammar =
|
String treeGrammar =
|
||||||
"tree grammar TP; options {ASTLabelType=CommonTree;}\n" +
|
"tree grammar TP; options {ASTLabelType=CommonAST;}\n" +
|
||||||
"a @init {int x=0;} : ^(ID {x=1;} {x=2;} INT?)\n" +
|
"a @init {int x=0;} : ^(ID {x=1;} {x=2;} INT?)\n" +
|
||||||
" {System.out.println($ID+\", \"+x);}\n" +
|
" {System.out.println($ID+\", \"+x);}\n" +
|
||||||
" ;\n";
|
" ;\n";
|
||||||
|
@ -256,7 +258,7 @@ public class TestTreeParsing extends BaseTest {
|
||||||
"WS : (' '|'\\n') {$channel=HIDDEN;} ;\n";
|
"WS : (' '|'\\n') {$channel=HIDDEN;} ;\n";
|
||||||
|
|
||||||
String treeGrammar =
|
String treeGrammar =
|
||||||
"tree grammar TP; options {tokenVocab=T; ASTLabelType=CommonTree;}\n" +
|
"tree grammar TP; options {tokenVocab=T; ASTLabelType=CommonAST;}\n" +
|
||||||
"a : ^('+' . INT) {System.out.print(\"alt 1\");}"+
|
"a : ^('+' . INT) {System.out.print(\"alt 1\");}"+
|
||||||
" ;\n";
|
" ;\n";
|
||||||
|
|
||||||
|
@ -277,7 +279,7 @@ public class TestTreeParsing extends BaseTest {
|
||||||
"WS : (' '|'\\n') {$channel=HIDDEN;} ;\n";
|
"WS : (' '|'\\n') {$channel=HIDDEN;} ;\n";
|
||||||
|
|
||||||
String treeGrammar =
|
String treeGrammar =
|
||||||
"tree grammar TP; options {tokenVocab=T; ASTLabelType=CommonTree;}\n" +
|
"tree grammar TP; options {tokenVocab=T; ASTLabelType=CommonAST;}\n" +
|
||||||
"a : ^('+' . INT) {System.out.print(\"alt 1\");}"+
|
"a : ^('+' . INT) {System.out.print(\"alt 1\");}"+
|
||||||
" | ^('+' . .) {System.out.print(\"alt 2\");}\n" +
|
" | ^('+' . .) {System.out.print(\"alt 2\");}\n" +
|
||||||
" ;\n";
|
" ;\n";
|
||||||
|
@ -301,7 +303,7 @@ public class TestTreeParsing extends BaseTest {
|
||||||
"WS : (' '|'\\n') {$channel=HIDDEN;} ;\n";
|
"WS : (' '|'\\n') {$channel=HIDDEN;} ;\n";
|
||||||
|
|
||||||
String treeGrammar =
|
String treeGrammar =
|
||||||
"tree grammar TP; options {tokenVocab=T; ASTLabelType=CommonTree;}\n" +
|
"tree grammar TP; options {tokenVocab=T; ASTLabelType=CommonAST;}\n" +
|
||||||
"a : ^('+' ID INT) {System.out.print(\"alt 1\");}"+
|
"a : ^('+' ID INT) {System.out.print(\"alt 1\");}"+
|
||||||
" | ^('+' . .) {System.out.print(\"alt 2\");}\n" +
|
" | ^('+' . .) {System.out.print(\"alt 2\");}\n" +
|
||||||
" ;\n";
|
" ;\n";
|
||||||
|
@ -325,7 +327,7 @@ public class TestTreeParsing extends BaseTest {
|
||||||
"WS : (' '|'\\n') {$channel=HIDDEN;} ;\n";
|
"WS : (' '|'\\n') {$channel=HIDDEN;} ;\n";
|
||||||
|
|
||||||
String treeGrammar =
|
String treeGrammar =
|
||||||
"tree grammar TP; options {tokenVocab=T; ASTLabelType=CommonTree;}\n" +
|
"tree grammar TP; options {tokenVocab=T; ASTLabelType=CommonAST;}\n" +
|
||||||
"a : ^('+' INT INT ) {System.out.print(\"alt 1\");}"+
|
"a : ^('+' INT INT ) {System.out.print(\"alt 1\");}"+
|
||||||
" | ^('+' .+) {System.out.print(\"alt 2\");}\n" +
|
" | ^('+' .+) {System.out.print(\"alt 2\");}\n" +
|
||||||
" ;\n";
|
" ;\n";
|
||||||
|
|
Loading…
Reference in New Issue