Merge pull request #24 from parrt/master

lots of stuff; check out commits
This commit is contained in:
Terence Parr 2012-02-22 12:47:15 -08:00
commit 995f8e0108
38 changed files with 776 additions and 398 deletions

View File

@ -8,7 +8,7 @@ package org.antlr.v4.runtime;
* make the interface clear these semantics up. If you need the ctx,
* use Parser.getRuleContext().
*/
public interface ParseListener<Symbol> {
public interface ParseListener<Symbol extends Token> {
void visitTerminal(ParserRuleContext<Symbol> ctx, Symbol symbol);
/** Enter all but left-recursive rules */

View File

@ -476,13 +476,6 @@ public abstract class Parser extends Recognizer<Token, ParserATNSimulator<Token>
return stack;
}
/** For debugging and other purposes, might want the grammar name.
* Have ANTLR generate an implementation for this method.
*/
public String getGrammarFileName() {
return null;
}
/** For debugging and other purposes */
public List<String> getDFAStrings() {
List<String> s = new ArrayList<String>();

View File

@ -28,11 +28,17 @@
*/
package org.antlr.v4.runtime;
import org.antlr.v4.runtime.atn.*;
import org.antlr.v4.runtime.misc.*;
import org.antlr.v4.runtime.tree.*;
import org.antlr.v4.runtime.atn.ATN;
import org.antlr.v4.runtime.atn.ATNState;
import org.antlr.v4.runtime.misc.NotNull;
import org.antlr.v4.runtime.misc.Nullable;
import org.antlr.v4.runtime.tree.ParseTree;
import org.antlr.v4.runtime.tree.ParseTreeListener;
import org.antlr.v4.runtime.tree.ParseTreeVisitor;
import java.util.*;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
/** A rule invocation record for parsing and tree parsing.
*
@ -57,8 +63,8 @@ import java.util.*;
* group values such as this aggregate. The getters/setters are there to
* satisfy the superclass interface.
*/
public class ParserRuleContext<Symbol> extends RuleContext {
public static final ParserRuleContext<?> EMPTY = new ParserRuleContext<Object>();
public class ParserRuleContext<Symbol extends Token> extends RuleContext {
public static final ParserRuleContext<Token> EMPTY = new ParserRuleContext<Token>();
/** If we are debugging or building a parse tree for a visitor,
* we need to track all of the tokens and rule invocations associated
@ -137,7 +143,7 @@ public class ParserRuleContext<Symbol> extends RuleContext {
public void exitRule(ParseTreeListener<Symbol> listener) { }
// visitor
public <T> T accept(ParseTreeVisitor<? extends T> visitor) { visitor.visitChildren(this); return null; }
public <T> T accept(ParseTreeVisitor<? extends T> visitor) { return visitor.visitChildren(this); }
/** Does not set parent link; other add methods do */
@ -209,13 +215,11 @@ public class ParserRuleContext<Symbol> extends RuleContext {
for (ParseTree o : children) {
if ( o instanceof TerminalNode<?> ) {
TerminalNode<?> tnode = (TerminalNode<?>)o;
if ( tnode.getSymbol() instanceof Token ) {
Token symbol = (Token)tnode.getSymbol();
if ( symbol.getType()==ttype ) {
j++;
if ( j == i ) {
return symbol;
}
Token symbol = tnode.getSymbol();
if ( symbol.getType()==ttype ) {
j++;
if ( j == i ) {
return symbol;
}
}
}
@ -233,13 +237,11 @@ public class ParserRuleContext<Symbol> extends RuleContext {
for (ParseTree o : children) {
if ( o instanceof TerminalNode<?> ) {
TerminalNode<?> tnode = (TerminalNode<?>)o;
if ( tnode.getSymbol() instanceof Token ) {
Token symbol = (Token)tnode.getSymbol();
if ( tokens==null ) {
tokens = new ArrayList<Token>();
}
tokens.add(symbol);
Token symbol = tnode.getSymbol();
if ( tokens==null ) {
tokens = new ArrayList<Token>();
}
tokens.add(symbol);
}
}
@ -298,9 +300,6 @@ public class ParserRuleContext<Symbol> extends RuleContext {
String ruleName = recog.getRuleNames()[s.ruleIndex];
buf.append(ruleName);
if ( p.parent != null ) buf.append(" ");
// ATNState invoker = atn.states.get(ctx.invokingState);
// RuleTransition rt = (RuleTransition)invoker.transition(0);
// buf.append(recog.getRuleNames()[rt.target.ruleIndex]);
p = (ParserRuleContext<?>)p.parent;
}
buf.append("]");

View File

@ -53,15 +53,16 @@ public abstract class Recognizer<Symbol, ATNInterpreter extends ATNSimulator> {
* error reporting. The generated parsers implement a method
* that overrides this to point to their String[] tokenNames.
*/
public String[] getTokenNames() {
return null;
}
public abstract String[] getTokenNames();
public String[] getRuleNames() {
return null;
}
public abstract String[] getRuleNames();
public ATN getATN() { return null; }
/** For debugging and other purposes, might want the grammar name.
* Have ANTLR generate an implementation for this method.
*/
public abstract String getGrammarFileName();
public abstract ATN getATN();
public ATNInterpreter getInterpreter() { return _interp; }

View File

@ -114,7 +114,7 @@ public class LexerATNSimulator extends ATNSimulator {
protected int charPositionInLine = 0;
@NotNull
protected DFA[] dfa;
public final DFA[] dfa;
protected int mode = Lexer.DEFAULT_MODE;
/** Used during DFA/ATN exec to record the most recent accept configuration info */
@ -201,7 +201,7 @@ public class LexerATNSimulator extends ATNSimulator {
ATNConfigSet s0_closure = computeStartState(input, startState);
int old_mode = mode;
dfa[mode].s0 = addDFAState(s0_closure);
int predict = exec(input, s0_closure);
int predict = exec(input, s0_closure, dfa[mode].s0);
if ( debug ) {
System.out.format("DFA after matchATN: %s\n", dfa[old_mode].toLexerString());
@ -282,7 +282,7 @@ public class LexerATNSimulator extends ATNSimulator {
return dfaPrevAccept.state.prediction;
}
protected int exec(@NotNull CharStream input, @NotNull ATNConfigSet s0) {
protected int exec(@NotNull CharStream input, @NotNull ATNConfigSet s0, @Nullable DFAState ds0) {
//System.out.println("enter exec index "+input.index()+" from "+s0);
@NotNull
ATNConfigSet closure = new ATNConfigSet();
@ -297,32 +297,51 @@ public class LexerATNSimulator extends ATNSimulator {
traceLookahead1();
int t = input.LA(1);
DFAState s = ds0; // s is current/from DFA state
while ( true ) { // while more work
if ( debug ) {
System.out.format("in reach starting closure: %s\n", closure);
}
for (ATNConfig c : closure) {
if ( debug ) {
System.out.format("testing %s at %s\n", getTokenName(t), c.toString(recog, true));
}
int n = c.state.getNumberOfTransitions();
for (int ti=0; ti<n; ti++) { // for each transition
Transition trans = c.state.transition(ti);
ATNState target = getReachableTarget(trans, t);
if ( target!=null ) {
closure(new ATNConfig(c, target), reach);
// As we move src->trg, src->trg, we keep track of the previous trg to
// avoid looking up the DFA state again, which is expensive.
// If the previous target was already part of the DFA, we might
// be able to avoid doing a reach operation upon t. If s!=null,
// it means that semantic predicates didn't prevent us from
// creating a DFA state. Once we know s!=null, we check to see if
// the DFA state has an edge already for t. If so, we can just reuse
// it's configuration set; there's no point in re-computing it.
// This is kind of like doing DFA simulation within the ATN
// simulation because DFA simulation is really just a way to avoid
// computing reach/closure sets. Technically, once we know that
// we have a previously added DFA state, we could jump over to
// the DFA simulator. But, that would mean popping back and forth
// a lot and making things more complicated algorithmically.
// This optimization makes a lot of sense for loops within DFA.
// A character will take us back to an existing DFA state
// that already has lots of edges out of it. e.g., .* in comments.
DFAState target = null;
if (s != null) {
if ( s.edges != null && t < s.edges.length && t > CharStream.EOF ) {
closure = s.configset;
target = s.edges[t];
if (target != null) {
reach = target.configset;
}
}
}
if ( reach.isEmpty() ) {
if (target == null) { // if we don't find an existing DFA state
// Fill reach starting from closure, following t transitions
getReachableConfigSet(closure, reach, t);
}
if ( reach.isEmpty() ) { // we got nowhere on t from s
// we reached state associated with closure for sure, so
// make sure it's defined. worst case, we define s0 from
// start state configs.
DFAState from = addDFAState(closure);
DFAState from = s != null ? s : addDFAState(closure);
// we got nowhere on t, don't throw out this knowledge; it'd
// cause a failover from DFA later.
if (from != null) {
@ -335,20 +354,25 @@ public class LexerATNSimulator extends ATNSimulator {
processAcceptStates(input, reach);
consume(input);
addDFAEdge(closure, t, reach);
if (target == null) {
// Add an edge from s to target DFA found/created for reach
target = addDFAEdge(s, t, reach);
}
traceLookahead1();
t = input.LA(1);
// swap to avoid reallocating space
// TODO: faster to reallocate?
@NotNull
ATNConfigSet tmp = reach;
reach = closure;
closure = tmp;
reach.clear();
closure = reach;
reach = new ATNConfigSet();
s = target; // flip; current DFA target becomes new src/from state
}
return failOrAccept(atnPrevAccept, input, reach, t);
}
protected int failOrAccept(ATNExecState atnPrevAccept, CharStream input,
ATNConfigSet reach, int t)
{
if ( atnPrevAccept.config==null ) {
// if no accept and EOF is first char, return EOF
if ( t==CharStream.EOF && input.index()==startIndex ) {
@ -363,6 +387,26 @@ public class LexerATNSimulator extends ATNSimulator {
return atn.ruleToTokenType[ruleIndex];
}
/** Given a starting configuration set, figure out all ATN configurations
* we can reach upon input t. Parameter reach is a return parameter.
*/
protected void getReachableConfigSet(ATNConfigSet closure, ATNConfigSet reach, int t) {
for (ATNConfig c : closure) {
if ( debug ) {
System.out.format("testing %s at %s\n", getTokenName(t), c.toString(recog, true));
}
int n = c.state.getNumberOfTransitions();
for (int ti=0; ti<n; ti++) { // for each transition
Transition trans = c.state.transition(ti);
ATNState target = getReachableTarget(trans, t);
if ( target!=null ) {
closure(new ATNConfig(c, target), reach);
}
}
}
}
protected void processAcceptStates(@NotNull CharStream input, @NotNull ATNConfigSet reach) {
for (int ci=0; ci<reach.size(); ci++) {
ATNConfig c = reach.get(ci);
@ -565,7 +609,7 @@ public class LexerATNSimulator extends ATNSimulator {
input.substring(startIndex, input.index()), s.stateNumber, s.configset);
}
int ttype = exec(input, s.configset);
int ttype = exec(input, s.configset, s);
if ( dfa_debug ) {
System.out.format("back from DFA update, ttype=%d, dfa[mode %d]=\n%s\n",
@ -584,27 +628,24 @@ public class LexerATNSimulator extends ATNSimulator {
state.charPos = charPositionInLine;
}
protected void addDFAEdge(@NotNull ATNConfigSet p,
int t,
@NotNull ATNConfigSet q)
protected DFAState addDFAEdge(@NotNull DFAState from,
int t,
@NotNull ATNConfigSet q)
{
DFAState to = addDFAState(q);
// even if we can add the states, we can't add an edge for labels out of range
if (t < 0 || t > MAX_DFA_EDGE) {
return;
return to;
}
// System.out.println("MOVE "+p+" -> "+q+" upon "+getTokenName(t));
DFAState from = addDFAState(p);
if (from == null) {
return;
}
DFAState to = addDFAState(q);
if (to == null) {
return;
if (from == null || to == null) {
return to;
}
addDFAEdge(from, t, to);
return to;
}
protected void addDFAEdge(@NotNull DFAState p, int t, @NotNull DFAState q) {

View File

@ -225,7 +225,7 @@ import java.util.*;
* when closure operations fall off the end of the rule that
* holds the decision were evaluating
*/
public class ParserATNSimulator<Symbol> extends ATNSimulator {
public class ParserATNSimulator<Symbol extends Token> extends ATNSimulator {
public static boolean debug = false;
public static boolean dfa_debug = false;
public static boolean retry_debug = false;
@ -261,15 +261,14 @@ public class ParserATNSimulator<Symbol> extends ATNSimulator {
public void reset() {
}
public int adaptivePredict(@NotNull SymbolStream<Token> input, int decision,
@Nullable ParserRuleContext outerContext)
public int adaptivePredict(@NotNull SymbolStream<? extends Symbol> input, int decision,
@Nullable ParserRuleContext<?> outerContext)
{
predict_calls++;
DFA dfa = decisionToDFA[decision];
if ( dfa==null || dfa.s0==null ) {
DecisionState startState = atn.decisionToState.get(decision);
decisionToDFA[decision] = dfa = new DFA(startState);
dfa.decision = decision;
decisionToDFA[decision] = dfa = new DFA(startState, decision);
return predictATN(dfa, input, outerContext);
}
else {
@ -288,8 +287,8 @@ public class ParserATNSimulator<Symbol> extends ATNSimulator {
}
}
public int predictATN(@NotNull DFA dfa, @NotNull SymbolStream<Token> input,
@Nullable ParserRuleContext outerContext)
public int predictATN(@NotNull DFA dfa, @NotNull SymbolStream<? extends Symbol> input,
@Nullable ParserRuleContext<?> outerContext)
{
if ( outerContext==null ) outerContext = ParserRuleContext.EMPTY;
if ( debug ) System.out.println("ATN decision "+dfa.decision+
@ -320,8 +319,8 @@ public class ParserATNSimulator<Symbol> extends ATNSimulator {
}
public int execDFA(@NotNull DFA dfa, @NotNull DFAState s0,
@NotNull SymbolStream<Token> input, int startIndex,
@Nullable ParserRuleContext outerContext)
@NotNull SymbolStream<? extends Symbol> input, int startIndex,
@Nullable ParserRuleContext<?> outerContext)
{
if ( outerContext==null ) outerContext = ParserRuleContext.EMPTY;
if ( dfa_debug ) System.out.println("DFA decision "+dfa.decision+
@ -371,37 +370,32 @@ public class ParserATNSimulator<Symbol> extends ATNSimulator {
// if no edge, pop over to ATN interpreter, update DFA and return
if ( s.edges == null || t >= s.edges.length || t < -1 || s.edges[t+1] == null ) {
if ( dfa_debug && t>=0 ) System.out.println("no edge for "+parser.getTokenNames()[t]);
int alt = -1;
int alt;
if ( dfa_debug ) {
System.out.println("ATN exec upon "+
parser.getInputString(startIndex) +
" at DFA state "+s.stateNumber);
}
try {
alt = execATN(dfa, s, input, startIndex, outerContext);
// this adds edge even if next state is accept for
// same alt; e.g., s0-A->:s1=>2-B->:s2=>2
// TODO: This next stuff kills edge, but extra states remain. :(
if ( s.isAcceptState && alt!=-1 ) {
DFAState d = s.edges[input.LA(1)+1];
if ( d.isAcceptState && d.prediction==s.prediction ) {
// we can carve it out.
s.edges[input.LA(1)+1] = ERROR; // IGNORE really not error
}
alt = execATN(dfa, s, input, startIndex, outerContext);
// this adds edge even if next state is accept for
// same alt; e.g., s0-A->:s1=>2-B->:s2=>2
// TODO: This next stuff kills edge, but extra states remain. :(
if ( s.isAcceptState && alt!=-1 ) {
DFAState d = s.edges[input.LA(1)+1];
if ( d.isAcceptState && d.prediction==s.prediction ) {
// we can carve it out.
s.edges[input.LA(1)+1] = ERROR; // IGNORE really not error
}
if ( dfa_debug ) {
System.out.println("back from DFA update, alt="+alt+", dfa=\n"+dfa.toString(parser.getTokenNames()));
//dump(dfa);
}
// action already executed
if ( dfa_debug ) System.out.println("DFA decision "+dfa.decision+
" predicts "+alt);
return alt; // we've updated DFA, exec'd action, and have our deepest answer
}
catch (NoViableAltException nvae) {
addDFAEdge(s, t, ERROR);
throw nvae;
if ( dfa_debug ) {
System.out.println("back from DFA update, alt="+alt+", dfa=\n"+dfa.toString(parser.getTokenNames()));
//dump(dfa);
}
// action already executed
if ( dfa_debug ) System.out.println("DFA decision "+dfa.decision+
" predicts "+alt);
return alt; // we've updated DFA, exec'd action, and have our deepest answer
}
DFAState target = s.edges[t+1];
if ( target == ERROR ) {
@ -479,15 +473,15 @@ public class ParserATNSimulator<Symbol> extends ATNSimulator {
*/
public int execATN(@NotNull DFA dfa, @NotNull DFAState s0,
@NotNull SymbolStream<Token> input, int startIndex,
ParserRuleContext outerContext)
@NotNull SymbolStream<? extends Symbol> input, int startIndex,
ParserRuleContext<?> outerContext)
{
if ( debug ) System.out.println("execATN decision "+dfa.decision+" exec LA(1)=="+ getLookaheadName(input));
ATN_failover++;
ATNConfigSet previous = s0.configset;
DFAState D = null;
ATNConfigSet fullCtxSet = null;
DFAState D;
ATNConfigSet fullCtxSet;
if ( debug ) System.out.println("s0 = "+s0);
@ -571,21 +565,28 @@ public class ParserATNSimulator<Symbol> extends ATNSimulator {
int nalts = decState.getNumberOfTransitions();
List<DFAState.PredPrediction> predPredictions =
predicateDFAState(D, D.configset, outerContext, nalts);
IntervalSet conflictingAlts = getConflictingAltsFromConfigSet(D.configset);
if ( D.predicates.size() < conflictingAlts.size() ) {
reportInsufficientPredicates(dfa, startIndex, input.index(),
conflictingAlts,
decState,
getPredsForAmbigAlts(conflictingAlts, D.configset, nalts),
D.configset,
false);
if ( predPredictions!=null ) {
IntervalSet conflictingAlts = getConflictingAltsFromConfigSet(D.configset);
if ( D.predicates.size() < conflictingAlts.size() ) {
reportInsufficientPredicates(dfa, startIndex, input.index(),
conflictingAlts,
decState,
getPredsForAmbigAlts(conflictingAlts, D.configset, nalts),
D.configset,
false);
}
input.seek(startIndex);
predictedAlt = evalSemanticContext(predPredictions, outerContext);
if ( predictedAlt!=ATN.INVALID_ALT_NUMBER ) {
return predictedAlt;
}
// Consistency check - the DFAState should not have a "fallback"
// prediction specified for the case where no predicates succeed.
assert D.prediction == ATN.INVALID_ALT_NUMBER;
throw noViableAlt(input, outerContext, D.configset, startIndex);
}
input.seek(startIndex);
predictedAlt = evalSemanticContext(predPredictions, outerContext);
if ( predictedAlt!=ATN.INVALID_ALT_NUMBER ) {
return predictedAlt;
}
throw noViableAlt(input, outerContext, D.configset, startIndex);
}
if ( D.isAcceptState ) return predictedAlt;
@ -600,8 +601,8 @@ public class ParserATNSimulator<Symbol> extends ATNSimulator {
public ATNConfigSet execATNWithFullContext(DFA dfa,
DFAState D, // how far we got before failing over
@NotNull ATNConfigSet s0,
@NotNull SymbolStream<Token> input, int startIndex,
ParserRuleContext outerContext,
@NotNull SymbolStream<? extends Symbol> input, int startIndex,
ParserRuleContext<?> outerContext,
int nalts,
boolean greedy)
{
@ -637,7 +638,7 @@ public class ParserATNSimulator<Symbol> extends ATNSimulator {
if ( reach.hasSemanticContext ) {
SemanticContext[] altToPred = getPredsForAmbigAlts(reach.conflictingAlts, reach, nalts);
// altToPred[uniqueAlt] is now our validating predicate (if any)
List<DFAState.PredPrediction> predPredictions = null;
List<DFAState.PredPrediction> predPredictions;
if ( altToPred!=null ) {
// we have a validating predicate; test it
predPredictions = getPredicatePredictions(reach.conflictingAlts, altToPred);
@ -742,14 +743,9 @@ public class ParserATNSimulator<Symbol> extends ATNSimulator {
// we have a validating predicate; test it
// Update DFA so reach becomes accept state with predicate
predPredictions = getPredicatePredictions(conflictingAlts, altToPred);
if ( D.isCtxSensitive ) {
// D.ctxToPredicates.put(outerContext, predPredictions);
}
else {
D.predicates = predPredictions;
}
D.predicates = predPredictions;
D.prediction = ATN.INVALID_ALT_NUMBER; // make sure we use preds
}
D.prediction = ATN.INVALID_ALT_NUMBER; // make sure we use preds
return predPredictions;
}
@ -758,13 +754,32 @@ public class ParserATNSimulator<Symbol> extends ATNSimulator {
int nalts)
{
// REACH=[1|1|[]|0:0, 1|2|[]|0:1]
/* altToPred starts as an array of all null contexts. The entry at index i
* corresponds to alternative i. altToPred[i] may have one of three values:
* 1. null: no ATNConfig c is found such that c.alt==i
* 2. SemanticContext.NONE: At least one ATNConfig c exists such that
* c.alt==i and c.semanticContext==SemanticContext.NONE. In other words,
* alt i has at least one unpredicated config.
* 3. Non-NONE Semantic Context: There exists at least one, and for all
* ATNConfig c such that c.alt==i, c.semanticContext!=SemanticContext.NONE.
*
* From this, it is clear that NONE||anything==NONE.
*/
SemanticContext[] altToPred = new SemanticContext[nalts +1];
int n = altToPred.length;
for (int i = 0; i < n; i++) altToPred[i] = SemanticContext.NONE;
int nPredAlts = 0;
for (ATNConfig c : configs) {
if ( c.semanticContext!=SemanticContext.NONE && ambigAlts.contains(c.alt) ) {
if ( ambigAlts.contains(c.alt) ) {
altToPred[c.alt] = SemanticContext.or(altToPred[c.alt], c.semanticContext);
}
}
int nPredAlts = 0;
for (int i = 0; i < n; i++) {
if (altToPred[i] == null) {
altToPred[i] = SemanticContext.NONE;
}
else if (altToPred[i] != SemanticContext.NONE) {
nPredAlts++;
}
}
@ -799,7 +814,7 @@ public class ParserATNSimulator<Symbol> extends ATNSimulator {
pairs.add(new DFAState.PredPrediction(pred, i));
}
}
if ( pairs.size()==0 ) pairs = null;
if ( pairs.isEmpty() ) pairs = null;
else if ( firstUnpredicated!=ATN.INVALID_ALT_NUMBER ) {
// add default prediction if we found null predicate
pairs.add(new DFAState.PredPrediction(null, firstUnpredicated));
@ -813,7 +828,7 @@ public class ParserATNSimulator<Symbol> extends ATNSimulator {
* prediction for disambiguating predicates.
*/
public int evalSemanticContext(List<DFAState.PredPrediction> predPredictions,
ParserRuleContext outerContext)
ParserRuleContext<?> outerContext)
{
int predictedAlt = ATN.INVALID_ALT_NUMBER;
// List<DFAState.PredPrediction> predPredictions = D.predicates;
@ -823,10 +838,12 @@ public class ParserATNSimulator<Symbol> extends ATNSimulator {
predictedAlt = pair.alt; // default prediction
break;
}
boolean evaluatedResult = pair.pred.eval(parser, outerContext);
if ( debug || dfa_debug ) {
System.out.println("eval pred "+pair+"="+pair.pred.eval(parser, outerContext));
System.out.println("eval pred "+pair+"="+evaluatedResult);
}
if ( pair.pred.eval(parser, outerContext) ) {
if ( evaluatedResult ) {
if ( debug || dfa_debug ) System.out.println("PREDICT "+pair.alt);
predictedAlt = pair.alt;
break;
@ -851,6 +868,17 @@ public class ParserATNSimulator<Symbol> extends ATNSimulator {
@NotNull Set<ATNConfig> closureBusy,
boolean collectPredicates,
boolean greedy, boolean loopsSimulateTailRecursion)
{
final int initialDepth = 0;
closure(config, configs, closureBusy, collectPredicates, greedy, loopsSimulateTailRecursion, initialDepth);
}
protected void closure(@NotNull ATNConfig config,
@NotNull ATNConfigSet configs,
@NotNull Set<ATNConfig> closureBusy,
boolean collectPredicates,
boolean greedy, boolean loopsSimulateTailRecursion,
int depth)
{
if ( debug ) System.out.println("closure("+config.toString(parser,true)+")");
@ -875,7 +903,8 @@ public class ParserATNSimulator<Symbol> extends ATNSimulator {
// gotten that context AFTER having falling off a rule.
// Make sure we track that we are now out of context.
c.reachesIntoOuterContext = config.reachesIntoOuterContext;
closure(c, configs, closureBusy, collectPredicates, greedy, loopsSimulateTailRecursion);
assert depth > Integer.MIN_VALUE;
closure(c, configs, closureBusy, collectPredicates, greedy, loopsSimulateTailRecursion, depth - 1);
return;
}
else {
@ -919,8 +948,9 @@ public class ParserATNSimulator<Symbol> extends ATNSimulator {
Transition t = p.transition(i);
boolean continueCollecting =
!(t instanceof ActionTransition) && collectPredicates;
ATNConfig c = getEpsilonTarget(config, t, continueCollecting);
ATNConfig c = getEpsilonTarget(config, t, continueCollecting, depth == 0);
if ( c!=null ) {
int newDepth = depth;
if ( config.state instanceof RuleStopState ) {
// target fell off end of rule; mark resulting c as having dipped into outer context
// We can't get here if incoming config was rule stop and we had context
@ -929,9 +959,18 @@ public class ParserATNSimulator<Symbol> extends ATNSimulator {
// preds if this is > 0.
c.reachesIntoOuterContext++;
configs.dipsIntoOuterContext = true; // TODO: can remove? only care when we add to set per middle of this method
assert newDepth > Integer.MIN_VALUE;
newDepth--;
if ( debug ) System.out.println("dips into outer ctx: "+c);
}
closure(c, configs, closureBusy, continueCollecting, greedy, loopsSimulateTailRecursion);
else if (t instanceof RuleTransition) {
// latch when newDepth goes negative - once we step out of the entry context we can't return
if (newDepth >= 0) {
newDepth++;
}
}
closure(c, configs, closureBusy, continueCollecting, greedy, loopsSimulateTailRecursion, newDepth);
}
}
}
@ -943,12 +982,12 @@ public class ParserATNSimulator<Symbol> extends ATNSimulator {
}
@Nullable
public ATNConfig getEpsilonTarget(@NotNull ATNConfig config, @NotNull Transition t, boolean collectPredicates) {
public ATNConfig getEpsilonTarget(@NotNull ATNConfig config, @NotNull Transition t, boolean collectPredicates, boolean inContext) {
if ( t instanceof RuleTransition ) {
return ruleTransition(config, t);
}
else if ( t instanceof PredicateTransition ) {
return predTransition(config, (PredicateTransition)t, collectPredicates);
return predTransition(config, (PredicateTransition)t, collectPredicates, inContext);
}
else if ( t instanceof ActionTransition ) {
return actionTransition(config, (ActionTransition)t);
@ -968,7 +1007,8 @@ public class ParserATNSimulator<Symbol> extends ATNSimulator {
@Nullable
public ATNConfig predTransition(@NotNull ATNConfig config,
@NotNull PredicateTransition pt,
boolean collectPredicates)
boolean collectPredicates,
boolean inContext)
{
if ( debug ) {
System.out.println("PRED (collectPredicates="+collectPredicates+") "+
@ -979,12 +1019,6 @@ public class ParserATNSimulator<Symbol> extends ATNSimulator {
parser.getRuleInvocationStack());
}
}
// We know the correct context in exactly one spot: in the original
// rule that invokes the ATN simulation. We know we are in this rule
// when the context stack is empty and we've not dipped into
// the outer context.
boolean inContext =
config.context==ParserRuleContext.EMPTY && config.reachesIntoOuterContext==0;
ATNConfig c;
if ( collectPredicates &&
@ -1207,7 +1241,7 @@ public class ParserATNSimulator<Symbol> extends ATNSimulator {
@NotNull
public String getTokenName(int t) {
if ( t==-1 ) return "EOF";
if ( t==Token.EOF ) return "EOF";
if ( parser!=null && parser.getTokenNames()!=null ) {
String[] tokensNames = parser.getTokenNames();
if ( t>=tokensNames.length ) {
@ -1221,7 +1255,7 @@ public class ParserATNSimulator<Symbol> extends ATNSimulator {
return String.valueOf(t);
}
public String getLookaheadName(SymbolStream<Token> input) {
public String getLookaheadName(SymbolStream<? extends Symbol> input) {
return getTokenName(input.LA(1));
}
@ -1246,18 +1280,18 @@ public class ParserATNSimulator<Symbol> extends ATNSimulator {
}
@NotNull
public NoViableAltException noViableAlt(@NotNull SymbolStream<Token> input,
@NotNull ParserRuleContext outerContext,
public NoViableAltException noViableAlt(@NotNull SymbolStream<? extends Symbol> input,
@NotNull ParserRuleContext<?> outerContext,
@NotNull ATNConfigSet configs,
int startIndex)
{
return new NoViableAltException(parser, input,
(Token)input.get(startIndex),
(Token)input.LT(1),
input.get(startIndex),
input.LT(1),
configs, outerContext);
}
public static int getUniqueAlt(@NotNull Collection<ATNConfig> configs) {
public int getUniqueAlt(@NotNull Collection<ATNConfig> configs) {
int alt = ATN.INVALID_ALT_NUMBER;
for (ATNConfig c : configs) {
if ( alt == ATN.INVALID_ALT_NUMBER ) {
@ -1282,6 +1316,7 @@ public class ParserATNSimulator<Symbol> extends ATNSimulator {
return false;
}
@NotNull
protected DFAState addDFAEdge(@NotNull DFA dfa,
@NotNull ATNConfigSet p,
int t,
@ -1304,7 +1339,7 @@ public class ParserATNSimulator<Symbol> extends ATNSimulator {
}
/** See comment on LexerInterpreter.addDFAState. */
@Nullable
@NotNull
protected DFAState addDFAState(@NotNull DFA dfa, @NotNull ATNConfigSet configs) {
DFAState proposed = new DFAState(configs);
DFAState existing = dfa.states.get(proposed);

View File

@ -189,14 +189,19 @@ public abstract class SemanticContext {
}
public static SemanticContext and(SemanticContext a, SemanticContext b) {
if ( a == NONE ) return b;
if ( b == NONE ) return a;
if ( a == null || a == NONE ) return b;
if ( b == null || b == NONE ) return a;
return new AND(a, b);
}
/**
*
* @see ParserATNSimulator#getPredsForAmbigAlts
*/
public static SemanticContext or(SemanticContext a, SemanticContext b) {
if ( a == NONE ) return b;
if ( b == NONE ) return a;
if ( a == null ) return b;
if ( b == null ) return a;
if ( a == NONE || b == NONE ) return NONE;
return new OR(a, b);
}
}

View File

@ -43,7 +43,8 @@ public class DFA {
public final Map<DFAState, DFAState> states = new LinkedHashMap<DFAState, DFAState>();
@Nullable
public DFAState s0;
public int decision;
public final int decision;
/** From which ATN state did we create this DFA? */
@NotNull
@ -54,7 +55,14 @@ public class DFA {
*/
// public OrderedHashSet<ATNConfig> conflictSet;
public DFA(@NotNull DecisionState atnStartState) { this.atnStartState = atnStartState; }
public DFA(@NotNull DecisionState atnStartState) {
this(atnStartState, 0);
}
public DFA(@NotNull DecisionState atnStartState, int decision) {
this.atnStartState = atnStartState;
this.decision = decision;
}
/** Find the path in DFA from s0 to s, returning list of states encountered (inclusively) */
// public List<DFAState> getPathToState(DFAState finalState, TokenStream input, int start, int stop) {

View File

@ -45,11 +45,11 @@ public interface ParseTree extends SyntaxTree {
RuleContext getRuleContext();
}
public interface TerminalNode<Symbol> extends ParseTree {
public interface TerminalNode<Symbol extends Token> extends ParseTree {
Symbol getSymbol();
}
public static class TerminalNodeImpl<Symbol> implements TerminalNode<Symbol> {
public static class TerminalNodeImpl<Symbol extends Token> implements TerminalNode<Symbol> {
public Symbol symbol;
public ParseTree parent;
/** Which ATN node matched this token? */
@ -72,13 +72,7 @@ public interface ParseTree extends SyntaxTree {
public Interval getSourceInterval() {
if ( symbol ==null ) return Interval.INVALID;
if (symbol instanceof Token) {
return new Interval(((Token)symbol).getStartIndex(), ((Token)symbol).getStopIndex());
} else if (symbol instanceof SyntaxTree) {
return ((SyntaxTree)symbol).getSourceInterval();
} else {
throw new UnsupportedOperationException("This symbol type is not supported by the default implementation.");
}
return new Interval(symbol.getStartIndex(), symbol.getStopIndex());
}
@Override
@ -86,13 +80,8 @@ public interface ParseTree extends SyntaxTree {
@Override
public String toString() {
if (symbol instanceof Token) {
if ( ((Token)symbol).getType() == Token.EOF ) return "<EOF>";
return ((Token)symbol).getText();
}
else {
throw new UnsupportedOperationException("This symbol type is not supported by the default implementation.");
}
if ( symbol.getType() == Token.EOF ) return "<EOF>";
return symbol.getText();
}
@Override
@ -107,12 +96,10 @@ public interface ParseTree extends SyntaxTree {
* and deletion as well as during "consume until error recovery set"
* upon no viable alternative exceptions.
*/
public static class ErrorNodeImpl<Symbol> extends TerminalNodeImpl<Symbol> {
public static class ErrorNodeImpl<Symbol extends Token> extends TerminalNodeImpl<Symbol> {
public ErrorNodeImpl(Symbol token) {
super(token);
}
// @Override
// public String toString() { return "<ERROR:"+super.toString()+">"; }
}
// the following methods narrow the return type; they are not additional methods

View File

@ -30,8 +30,9 @@
package org.antlr.v4.runtime.tree;
import org.antlr.v4.runtime.ParserRuleContext;
import org.antlr.v4.runtime.Token;
public interface ParseTreeListener<Symbol> {
public interface ParseTreeListener<Symbol extends Token> {
void visitTerminal(ParserRuleContext<Symbol> ctx, Symbol symbol);
void enterEveryRule(ParserRuleContext<Symbol> ctx);
void exitEveryRule(ParserRuleContext<Symbol> ctx);

View File

@ -1,6 +1,7 @@
package org.antlr.v4.runtime.tree;
import org.antlr.v4.runtime.ParserRuleContext;
import org.antlr.v4.runtime.Token;
/** T is return type of visit methods. Use T=Void for no return type. */
public class ParseTreeVisitor<T> {
@ -8,19 +9,30 @@ public class ParseTreeVisitor<T> {
return ctx.accept(this);
}
/** Visit all rule, nonleaf children. Not useful if you are using T as
* non-Void. This returns nothing, losing all computations from below.
* But handy if you are just walking the tree with a visitor and only
/** Visit all rule, nonleaf children. Not that useful if you are using T as
* non-Void. This returns value returned from last child visited,
* losing all computations from first n-1 children. Works fine for
* ctxs with one child then.
* Handy if you are just walking the tree with a visitor and only
* care about some nodes. The ParserRuleContext.accept() method
* walks all children by default; i.e., calls this method.
*/
public <Symbol> void visitChildren(ParserRuleContext<Symbol> ctx) {
public T visitChildren(ParserRuleContext<? extends Token> ctx) {
T result = null;
for (ParseTree c : ctx.children) {
if ( c instanceof ParseTree.RuleNode) {
ParseTree.RuleNode r = (ParseTree.RuleNode)c;
ParserRuleContext<Symbol> rctx = (ParserRuleContext<Symbol>)r.getRuleContext();
visit(rctx);
ParserRuleContext<?> rctx = (ParserRuleContext<? extends Token>)r.getRuleContext();
result = visit(rctx);
}
else {
result = visitTerminal(ctx, ((ParseTree.TerminalNode<? extends Token>)c).getSymbol());
}
}
return result;
}
public T visitTerminal(ParserRuleContext<? extends Token> ctx, Token symbol) {
return null;
}
}

View File

@ -30,12 +30,13 @@
package org.antlr.v4.runtime.tree;
import org.antlr.v4.runtime.ParserRuleContext;
import org.antlr.v4.runtime.Token;
public class ParseTreeWalker {
public static final ParseTreeWalker DEFAULT = new ParseTreeWalker();
@SuppressWarnings("unchecked")
public <Symbol> void walk(ParseTreeListener<Symbol> listener, ParseTree t) {
public <Symbol extends Token> void walk(ParseTreeListener<Symbol> listener, ParseTree t) {
if ( t instanceof ParseTree.TerminalNode) {
visitTerminal(listener, (ParseTree.TerminalNode<Symbol>) t);
return;
@ -50,7 +51,7 @@ public class ParseTreeWalker {
}
@SuppressWarnings("unchecked")
protected <Symbol> void visitTerminal(ParseTreeListener<Symbol> listener,
protected <Symbol extends Token> void visitTerminal(ParseTreeListener<Symbol> listener,
ParseTree.TerminalNode<Symbol> t)
{
ParseTree.RuleNode r = (ParseTree.RuleNode)t.getParent();
@ -66,14 +67,14 @@ public class ParseTreeWalker {
* First we trigger the generic and then the rule specific.
* We to them in reverse order upon finishing the node.
*/
protected <Symbol> void enterRule(ParseTreeListener<Symbol> listener, ParseTree.RuleNode r) {
protected <Symbol extends Token> void enterRule(ParseTreeListener<Symbol> listener, ParseTree.RuleNode r) {
@SuppressWarnings("unchecked")
ParserRuleContext<Symbol> ctx = (ParserRuleContext<Symbol>)r.getRuleContext();
listener.enterEveryRule(ctx);
ctx.enterRule(listener);
}
protected <Symbol> void exitRule(ParseTreeListener<Symbol> listener, ParseTree.RuleNode r) {
protected <Symbol extends Token> void exitRule(ParseTreeListener<Symbol> listener, ParseTree.RuleNode r) {
@SuppressWarnings("unchecked")
ParserRuleContext<Symbol> ctx = (ParserRuleContext<Symbol>)r.getRuleContext();
ctx.exitRule(listener);

View File

@ -1,56 +0,0 @@
grammar Errors;
stat: 'return' INT
| ID '=' expr ';'
| ID '(' expr (',' expr)* ')' ';'
/ ID .* '(' expr (',' expr)* ')' ';'
/ ID '=' .* ';' // bad assignment
/ .* ';' // bad stat
/ .* // match anything else? when to stop?
/ // match anything else?
;
catch[Exception e] { }
finally { }
// error to match might be diff than how to resynch? maybe just
// include resynch pattern on end of error alt.
/*
Traps any recog exception in anything called from rule or matched in that rule.
a : expr ';'
/ '--' ID ';' // catches any problem in expr or matching ';'
;
If no err alt matches, defaults to normal error mechanism at rule level.
report. resync.
*/
atom: '(' expr ')'
| INT
/ '(' expr // missing RP; how to resync?
/ '(' ')'
;
// do error alts affect FOLLOW sync sets? nope.
// foo -> bar says how to make resulting tree for bad alts
expr: atom ('*' atom)* ;
atom: INT ;
ID : 'a'..'z'+ ;
WS : (' '|'\n')* ;
/*
Stop .* when it sees any viable following token, even if it uses FOLLOW. So,
err alt
/ .*
would match until it sees something in FOLLOW (but not context-sensitive follow).
actually maybe it would be sensitive; just use real outer context when matching
error alts. who cares about speed.
*/

View File

@ -1,19 +1,27 @@
grammar T;
/* This is ambig too.
s_ : s EOF ;
s : a s
|
;
@members {
public static class LeafListener extends TBaseListener {
public void exitA(TParser.EContext ctx) {
/*
if (ctx.getChildCount()==3) {
System.out.printf("%s %s %s",ctx.e(0).start.getText(),
ctx.e(1).start.getText(),ctx.e().get(0).start.getText());
}
else System.out.println(ctx.INT(0).start.getText());
*/
s : (a)* EOF ; // ambig; can match A B in alt 3 or alt 2 then alt 1
a : e '!'
| e
}
}}
s
@init {setBuildParseTree(true);}
@after { System.out.println($r.ctx.toStringTree(this)); ParseTreeWalker walker = new ParseTreeWalker();
walker.walk(new LeafListener(), $r.ctx);}
: r=e ;
e : e op='*' e
| e op='+' e
| e '++'
| INT
;
e : B
| A // both alts 2,3 can reach end of s upon abEOF
| A B
;
A : 'a' ;
B : 'b' ;
WS : (' '|'\n')+ {skip();} ;
MULT: '*' ;
ADD : '+' ;
INT : [0-9]+ ;
WS : [ \t\n]+ -> skip ;

View File

@ -40,8 +40,8 @@ public class TestT {
}
TParser p = new TParser(tokens);
p.setBuildParseTree(true);
final TParser.sContext tree = p.s();
System.out.println(tree.toStringTree(p));
// final TParser.sContext tree = p.s();
// System.out.println(tree.toStringTree(p));
// TreeViewer v = new TreeViewer(p, tree);
// v.setHighlightedBoxColor(TreeViewer.LIGHT_RED);
// v.addHighlightedNodes(new ArrayList<Tree>() {{

View File

@ -47,12 +47,12 @@ import org.antlr.v4.runtime.Token;
public class <file.grammarName>BaseListener implements <file.grammarName>Listener {
<file.listenerNames:{lname |
public void enter<lname; format="cap">(<file.parserName>.<lname; format="cap">Context ctx) { \}
public void exit<lname; format="cap">(<file.parserName>.<lname; format="cap">Context ctx) { \}}; separator="\n">
@Override public void enter<lname; format="cap">(<file.parserName>.<lname; format="cap">Context ctx) { \}
@Override public void exit<lname; format="cap">(<file.parserName>.<lname; format="cap">Context ctx) { \}}; separator="\n">
public void enterEveryRule(ParserRuleContext\<<InputSymbolType()>\> ctx) { }
public void exitEveryRule(ParserRuleContext\<<InputSymbolType()>\> ctx) { }
public void visitTerminal(ParserRuleContext\<<InputSymbolType()>\> ctx, <InputSymbolType()> symbol) { }
@Override public void enterEveryRule(ParserRuleContext\<<InputSymbolType()>\> ctx) { }
@Override public void exitEveryRule(ParserRuleContext\<<InputSymbolType()>\> ctx) { }
@Override public void visitTerminal(ParserRuleContext\<<InputSymbolType()>\> ctx, <InputSymbolType()> symbol) { }
}
>>
@ -76,13 +76,13 @@ import org.antlr.v4.runtime.*;
public class <file.grammarName>BaseParseListener implements <file.grammarName>ParseListener {
<file.listenerEnterNames:{lname |
public void enter<lname; format="cap">(ParserRuleContext\<<InputSymbolType()>\> ctx) { \}}; separator="\n">
@Override public void enter<lname; format="cap">(ParserRuleContext\<<InputSymbolType()>\> ctx) { \}}; separator="\n">
<file.listenerExitNames:{lname |
public void exit<lname; format="cap">(<file.parserName>.<lname; format="cap">Context ctx) { \}}; separator="\n">
@Override public void exit<lname; format="cap">(<file.parserName>.<lname; format="cap">Context ctx) { \}}; separator="\n">
public void enterNonLRRule(ParserRuleContext\<<InputSymbolType()>\> ctx) { }
public void exitEveryRule(ParserRuleContext\<<InputSymbolType()>\> ctx) { }
public void visitTerminal(ParserRuleContext\<<InputSymbolType()>\> ctx, <InputSymbolType()> symbol) { }
@Override public void enterNonLRRule(ParserRuleContext\<<InputSymbolType()>\> ctx) { }
@Override public void exitEveryRule(ParserRuleContext\<<InputSymbolType()>\> ctx) { }
@Override public void visitTerminal(ParserRuleContext\<<InputSymbolType()>\> ctx, <InputSymbolType()> symbol) { }
}
>>
@ -101,10 +101,12 @@ BaseVisitorFile(file, header) ::= <<
<header>
import org.antlr.v4.runtime.tree.*;
import org.antlr.v4.runtime.Token;
import org.antlr.v4.runtime.ParserRuleContext;
public class <file.grammarName>BaseVisitor\<T> extends ParseTreeVisitor\<T> implements <file.grammarName>Visitor\<T> {
<file.visitorNames:{lname |
public T visit<lname; format="cap">(<file.parserName>.<lname; format="cap">Context ctx) { visitChildren(ctx); return null; \}}; separator="\n">
@Override public T visit<lname; format="cap">(<file.parserName>.<lname; format="cap">Context ctx) { return visitChildren(ctx); \}}; separator="\n">
@Override public T visitTerminal(ParserRuleContext\<? extends Token> ctx, Token symbol) { return null; }
}
>>
@ -128,18 +130,24 @@ public class <parser.name> extends <superclass> {
public static final String[] ruleNames = {
<parser.ruleNames:{r | "<r>"}; separator=", ", wrap, anchor>
};
@Override
public String getGrammarFileName() { return "<parser.grammarFileName>"; }
@Override
public String[] getTokenNames() { return tokenNames; }
@Override
public String[] getRuleNames() { return ruleNames; }
@Override
public ATN getATN() { return _ATN; }
<namedActions.members>
<extras>
<parser:(ctor)()>
<funcs; separator="\n">
@Override
public String[] getTokenNames() { return tokenNames; }
@Override
public String[] getRuleNames() { return ruleNames; }
@Override
public ATN getATN() { return _ATN; }
<if(sempredFuncs)>
public boolean sempred(RuleContext _localctx, int ruleIndex, int predIndex) {
switch ( ruleIndex ) {
@ -617,19 +625,22 @@ public static class <struct.name> extends <currentRule.name; format="cap">Contex
>>
ListenerDispatchMethod(method) ::= <<
@Override
public void <if(method.isEnter)>enter<else>exit<endif>Rule(ParseTreeListener\<<InputSymbolType()>\> listener) {
if ( listener instanceof <parser.grammarName>Listener ) ((<parser.grammarName>Listener)listener).<if(method.isEnter)>enter<else>exit<endif><struct.derivedFromName; format="cap">(this);
}
>>
VisitorDispatchMethod(method) ::= <<
@Override
public \<T> T accept(ParseTreeVisitor\<? extends T> visitor) {
if ( visitor instanceof <parser.grammarName>Visitor ) return ((<parser.grammarName>Visitor\<T>)visitor).visit<struct.derivedFromName; format="cap">(this);
else return null;
if ( visitor instanceof <parser.grammarName>Visitor ) return ((<parser.grammarName>Visitor\<T>)visitor).visit<struct.derivedFromName; format="cap">(this);
else return null;
}
>>
ParseListenerDispatchMethod(method) ::= <<
@Override
public void <if(method.isEnter)>enter<else>exit<endif>Rule(ParseListener\<<InputSymbolType()>\> listener) {
if ( listener instanceof <parser.grammarName>ParseListener ) ((<parser.grammarName>ParseListener)listener).<if(method.isEnter)>enter<else>exit<endif><struct.derivedFromName; format="cap">(this);
}
@ -720,7 +731,8 @@ public class <lexer.name> extends Lexer {
_interp = new LexerATNSimulator(this,_ATN);
}
public String getGrammarFileName() { return "<lexerFile.fileName>"; }
@Override
public String getGrammarFileName() { return "<lexer.grammarFileName>"; }
@Override
public String[] getTokenNames() { return tokenNames; }

View File

@ -29,20 +29,50 @@
package org.antlr.v4;
import org.antlr.runtime.*;
import org.antlr.runtime.ANTLRFileStream;
import org.antlr.runtime.ANTLRStringStream;
import org.antlr.runtime.CharStream;
import org.antlr.runtime.CommonTokenStream;
import org.antlr.runtime.ParserRuleReturnScope;
import org.antlr.runtime.RecognitionException;
import org.antlr.v4.analysis.AnalysisPipeline;
import org.antlr.v4.automata.*;
import org.antlr.v4.automata.ATNFactory;
import org.antlr.v4.automata.LexerATNFactory;
import org.antlr.v4.automata.ParserATNFactory;
import org.antlr.v4.codegen.CodeGenPipeline;
import org.antlr.v4.parse.*;
import org.antlr.v4.runtime.misc.*;
import org.antlr.v4.parse.ANTLRLexer;
import org.antlr.v4.parse.ANTLRParser;
import org.antlr.v4.parse.GrammarASTAdaptor;
import org.antlr.v4.parse.ToolANTLRParser;
import org.antlr.v4.runtime.misc.LogManager;
import org.antlr.v4.runtime.misc.Nullable;
import org.antlr.v4.semantics.SemanticPipeline;
import org.antlr.v4.tool.*;
import org.antlr.v4.tool.ast.*;
import org.antlr.v4.tool.ANTLRMessage;
import org.antlr.v4.tool.ANTLRToolListener;
import org.antlr.v4.tool.DOTGenerator;
import org.antlr.v4.tool.DefaultToolListener;
import org.antlr.v4.tool.ErrorManager;
import org.antlr.v4.tool.ErrorType;
import org.antlr.v4.tool.Grammar;
import org.antlr.v4.tool.GrammarTransformPipeline;
import org.antlr.v4.tool.LexerGrammar;
import org.antlr.v4.tool.Rule;
import org.antlr.v4.tool.ast.GrammarAST;
import org.antlr.v4.tool.ast.GrammarASTErrorNode;
import org.antlr.v4.tool.ast.GrammarRootAST;
import org.stringtemplate.v4.STGroup;
import java.io.*;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.StringWriter;
import java.io.Writer;
import java.lang.reflect.Field;
import java.util.*;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.List;
public class Tool {
public String VERSION = "4.0-"+new Date();
@ -83,7 +113,6 @@ public class Tool {
public boolean force_atn = false;
public boolean log = false;
public boolean verbose_dfa = false;
public boolean no_auto_element_labels = false;
public boolean gen_listener = true;
public boolean gen_parse_listener = false;
public boolean gen_visitor = false;

View File

@ -36,17 +36,23 @@ public class LeftRecursiveRuleAltInfo {
public String leftRecursiveRuleRefLabel;
public String altLabel;
public String altText;
public AltAST altAST;
public AltAST altAST; // transformed ALT
public AltAST originalAltAST;
public int nextPrec;
public LeftRecursiveRuleAltInfo(int altNum, String altText) {
this(altNum, altText, null, null);
this(altNum, altText, null, null, null);
}
public LeftRecursiveRuleAltInfo(int altNum, String altText, String leftRecursiveRuleRefLabel, String altLabel) {
public LeftRecursiveRuleAltInfo(int altNum, String altText,
String leftRecursiveRuleRefLabel,
String altLabel,
AltAST originalAltAST)
{
this.altNum = altNum;
this.altText = altText;
this.leftRecursiveRuleRefLabel = leftRecursiveRuleRefLabel;
this.altLabel = altLabel;
this.originalAltAST = originalAltAST;
}
}

View File

@ -29,17 +29,28 @@
package org.antlr.v4.analysis;
import org.antlr.runtime.*;
import org.antlr.runtime.tree.*;
import org.antlr.runtime.CommonToken;
import org.antlr.runtime.TokenStream;
import org.antlr.runtime.tree.CommonTreeNodeStream;
import org.antlr.runtime.tree.Tree;
import org.antlr.v4.Tool;
import org.antlr.v4.codegen.CodeGenerator;
import org.antlr.v4.misc.Pair;
import org.antlr.v4.parse.*;
import org.antlr.v4.parse.GrammarASTAdaptor;
import org.antlr.v4.parse.LeftRecursiveRuleWalker;
import org.antlr.v4.tool.ErrorType;
import org.antlr.v4.tool.ast.*;
import org.stringtemplate.v4.*;
import org.antlr.v4.tool.ast.AltAST;
import org.antlr.v4.tool.ast.GrammarAST;
import org.antlr.v4.tool.ast.GrammarASTWithOptions;
import org.stringtemplate.v4.ST;
import org.stringtemplate.v4.STGroup;
import org.stringtemplate.v4.STGroupFile;
import java.util.*;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
/** Using a tree walker on the rules, determine if a rule is directly left-recursive and if it follows
* our pattern.
@ -124,8 +135,8 @@ public class LeftRecursiveRuleAnalyzer extends LeftRecursiveRuleWalker {
}
@Override
public void binaryAlt(AltAST altTree, int alt) {
altTree = (AltAST)altTree.dupTree();
public void binaryAlt(AltAST originalAltTree, int alt) {
AltAST altTree = (AltAST)originalAltTree.dupTree();
String altLabel = altTree.altLabel!=null ? altTree.altLabel.getText() : null;
GrammarAST lrlabel = stripLeftRecursion(altTree);
@ -143,7 +154,8 @@ public class LeftRecursiveRuleAnalyzer extends LeftRecursiveRuleWalker {
stripAltLabel(altTree);
String altText = text(altTree);
altText = altText.trim();
LeftRecursiveRuleAltInfo a = new LeftRecursiveRuleAltInfo(alt, altText, label, altLabel);
LeftRecursiveRuleAltInfo a =
new LeftRecursiveRuleAltInfo(alt, altText, label, altLabel, originalAltTree);
a.nextPrec = nextPrec;
binaryAlts.put(alt, a);
//System.out.println("binaryAlt " + alt + ": " + altText + ", rewrite=" + rewriteText);
@ -151,8 +163,8 @@ public class LeftRecursiveRuleAnalyzer extends LeftRecursiveRuleWalker {
/** Convert e ? e : e -> ? e : e_[nextPrec] */
@Override
public void ternaryAlt(AltAST altTree, int alt) {
altTree = (AltAST)altTree.dupTree();
public void ternaryAlt(AltAST originalAltTree, int alt) {
AltAST altTree = (AltAST)originalAltTree.dupTree();
String altLabel = altTree.altLabel!=null ? altTree.altLabel.getText() : null;
GrammarAST lrlabel = stripLeftRecursion(altTree);
@ -168,15 +180,16 @@ public class LeftRecursiveRuleAnalyzer extends LeftRecursiveRuleWalker {
String altText = text(altTree);
altText = altText.trim();
LeftRecursiveRuleAltInfo a = new LeftRecursiveRuleAltInfo(alt, altText, label, altLabel);
LeftRecursiveRuleAltInfo a =
new LeftRecursiveRuleAltInfo(alt, altText, label, altLabel, originalAltTree);
a.nextPrec = nextPrec;
ternaryAlts.put(alt, a);
//System.out.println("ternaryAlt " + alt + ": " + altText + ", rewrite=" + rewriteText);
}
@Override
public void prefixAlt(AltAST altTree, int alt) {
altTree = (AltAST)altTree.dupTree();
public void prefixAlt(AltAST originalAltTree, int alt) {
AltAST altTree = (AltAST)originalAltTree.dupTree();
stripAltLabel(altTree);
int nextPrec = precedence(alt);
@ -185,15 +198,16 @@ public class LeftRecursiveRuleAnalyzer extends LeftRecursiveRuleWalker {
String altText = text(altTree);
altText = altText.trim();
String altLabel = altTree.altLabel!=null ? altTree.altLabel.getText() : null;
LeftRecursiveRuleAltInfo a = new LeftRecursiveRuleAltInfo(alt, altText, null, altLabel);
LeftRecursiveRuleAltInfo a =
new LeftRecursiveRuleAltInfo(alt, altText, null, altLabel, originalAltTree);
a.nextPrec = nextPrec;
prefixAlts.add(a);
//System.out.println("prefixAlt " + alt + ": " + altText + ", rewrite=" + rewriteText);
}
@Override
public void suffixAlt(AltAST altTree, int alt) {
altTree = (AltAST)altTree.dupTree();
public void suffixAlt(AltAST originalAltTree, int alt) {
AltAST altTree = (AltAST)originalAltTree.dupTree();
String altLabel = altTree.altLabel!=null ? altTree.altLabel.getText() : null;
GrammarAST lrlabel = stripLeftRecursion(altTree);
@ -204,18 +218,20 @@ public class LeftRecursiveRuleAnalyzer extends LeftRecursiveRuleWalker {
stripAltLabel(altTree);
String altText = text(altTree);
altText = altText.trim();
LeftRecursiveRuleAltInfo a = new LeftRecursiveRuleAltInfo(alt, altText, label, altLabel);
LeftRecursiveRuleAltInfo a =
new LeftRecursiveRuleAltInfo(alt, altText, label, altLabel, originalAltTree);
suffixAlts.put(alt, a);
// System.out.println("suffixAlt " + alt + ": " + altText + ", rewrite=" + rewriteText);
}
@Override
public void otherAlt(AltAST altTree, int alt) {
altTree = (AltAST)altTree.dupTree();
public void otherAlt(AltAST originalAltTree, int alt) {
AltAST altTree = (AltAST)originalAltTree.dupTree();
stripAltLabel(altTree);
String altText = text(altTree);
String altLabel = altTree.altLabel!=null ? altTree.altLabel.getText() : null;
LeftRecursiveRuleAltInfo a = new LeftRecursiveRuleAltInfo(alt, altText, null, altLabel);
LeftRecursiveRuleAltInfo a =
new LeftRecursiveRuleAltInfo(alt, altText, null, altLabel, originalAltTree);
otherAlts.add(a);
// System.out.println("otherAlt " + alt + ": " + altText);
}

View File

@ -29,14 +29,37 @@
package org.antlr.v4.analysis;
import org.antlr.runtime.*;
import org.antlr.runtime.ANTLRStringStream;
import org.antlr.runtime.CommonToken;
import org.antlr.runtime.CommonTokenStream;
import org.antlr.runtime.ParserRuleReturnScope;
import org.antlr.runtime.RecognitionException;
import org.antlr.runtime.TokenStream;
import org.antlr.v4.Tool;
import org.antlr.v4.misc.*;
import org.antlr.v4.parse.*;
import org.antlr.v4.tool.*;
import org.antlr.v4.tool.ast.*;
import org.antlr.v4.misc.OrderedHashMap;
import org.antlr.v4.misc.Pair;
import org.antlr.v4.parse.ANTLRLexer;
import org.antlr.v4.parse.ANTLRParser;
import org.antlr.v4.parse.GrammarASTAdaptor;
import org.antlr.v4.parse.ScopeParser;
import org.antlr.v4.parse.ToolANTLRParser;
import org.antlr.v4.tool.AttributeDict;
import org.antlr.v4.tool.ErrorType;
import org.antlr.v4.tool.Grammar;
import org.antlr.v4.tool.GrammarTransformPipeline;
import org.antlr.v4.tool.LabelElementPair;
import org.antlr.v4.tool.LeftRecursiveRule;
import org.antlr.v4.tool.Rule;
import org.antlr.v4.tool.ast.ActionAST;
import org.antlr.v4.tool.ast.AltAST;
import org.antlr.v4.tool.ast.BlockAST;
import org.antlr.v4.tool.ast.GrammarAST;
import org.antlr.v4.tool.ast.GrammarRootAST;
import org.antlr.v4.tool.ast.RuleAST;
import java.util.*;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
/** Remove left-recursive rule refs, add precedence args to recursive rule refs.
* Rewrite rule so we can create ATN.
@ -201,12 +224,16 @@ public class LeftRecursiveRuleTransformer {
LeftRecursiveRuleAltInfo altInfo = r.recPrimaryAlts.get(i);
altInfo.altAST = (AltAST)primaryBlk.getChild(i);
altInfo.altAST.leftRecursiveAltInfo = altInfo;
altInfo.originalAltAST.leftRecursiveAltInfo = altInfo;
altInfo.originalAltAST.parent = altInfo.altAST.parent;
// System.out.println(altInfo.altAST.toStringTree());
}
for (int i = 0; i < r.recOpAlts.size(); i++) {
LeftRecursiveRuleAltInfo altInfo = r.recOpAlts.getElement(i);
altInfo.altAST = (AltAST)opsBlk.getChild(i);
altInfo.altAST.leftRecursiveAltInfo = altInfo;
altInfo.originalAltAST.leftRecursiveAltInfo = altInfo;
altInfo.originalAltAST.parent = altInfo.altAST.parent;
// System.out.println(altInfo.altAST.toStringTree());
}
}

View File

@ -29,11 +29,14 @@
package org.antlr.v4.codegen.model;
import org.antlr.v4.codegen.*;
import org.antlr.v4.codegen.model.decl.*;
import org.antlr.v4.codegen.CodeGenerator;
import org.antlr.v4.codegen.OutputModelFactory;
import org.antlr.v4.codegen.model.decl.RuleContextDecl;
import org.antlr.v4.codegen.model.decl.StructDecl;
import org.antlr.v4.misc.Pair;
import org.antlr.v4.parse.ANTLRParser;
import org.antlr.v4.tool.*;
import org.antlr.v4.tool.LeftRecursiveRule;
import org.antlr.v4.tool.Rule;
import org.antlr.v4.tool.ast.GrammarAST;
public class LeftRecursiveRuleFunction extends RuleFunction {

View File

@ -35,6 +35,7 @@ import org.antlr.v4.tool.Grammar;
import org.antlr.v4.tool.LexerGrammar;
import org.antlr.v4.tool.Rule;
import java.io.File;
import java.util.Collection;
import java.util.LinkedHashMap;
import java.util.Map;
@ -42,6 +43,7 @@ import java.util.Set;
public class Lexer extends OutputModelObject {
public String name;
public String grammarFileName;
public Map<String,Integer> tokens;
public LexerFile file;
public String[] tokenNames;
@ -58,6 +60,7 @@ public class Lexer extends OutputModelObject {
this.factory = factory;
this.file = file; // who contains us?
Grammar g = factory.getGrammar();
grammarFileName = new File(g.fileName).getName();
name = g.getRecognizerName();
tokens = new LinkedHashMap<String,Integer>();
LexerGrammar lg = (LexerGrammar)g;

View File

@ -33,11 +33,13 @@ import org.antlr.v4.codegen.*;
import org.antlr.v4.codegen.model.chunk.*;
import org.antlr.v4.tool.*;
import java.io.File;
import java.util.*;
/** */
public class Parser extends OutputModelObject {
public String name;
public String grammarFileName;
public String grammarName;
@ModelElement public ActionChunk superclass;
public Map<String,Integer> tokens;
@ -55,6 +57,7 @@ public class Parser extends OutputModelObject {
this.factory = factory;
this.file = file; // who contains us?
Grammar g = factory.getGrammar();
grammarFileName = new File(g.fileName).getName();
grammarName = g.name;
name = g.getRecognizerName();
tokens = new LinkedHashMap<String,Integer>();

View File

@ -30,7 +30,15 @@
package org.antlr.v4.codegen.model;
import org.antlr.v4.codegen.OutputModelFactory;
import org.antlr.v4.codegen.model.decl.*;
import org.antlr.v4.codegen.model.decl.AltLabelStructDecl;
import org.antlr.v4.codegen.model.decl.ContextRuleGetterDecl;
import org.antlr.v4.codegen.model.decl.ContextRuleListGetterDecl;
import org.antlr.v4.codegen.model.decl.ContextRuleListIndexedGetterDecl;
import org.antlr.v4.codegen.model.decl.ContextTokenGetterDecl;
import org.antlr.v4.codegen.model.decl.ContextTokenListGetterDecl;
import org.antlr.v4.codegen.model.decl.ContextTokenListIndexedGetterDecl;
import org.antlr.v4.codegen.model.decl.Decl;
import org.antlr.v4.codegen.model.decl.StructDecl;
import org.antlr.v4.misc.FrequencySet;
import org.antlr.v4.misc.Triple;
import org.antlr.v4.misc.Utils;
@ -42,9 +50,18 @@ import org.antlr.v4.tool.Rule;
import org.antlr.v4.tool.ast.AltAST;
import org.antlr.v4.tool.ast.GrammarAST;
import java.util.*;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import static org.antlr.v4.parse.ANTLRParser.*;
import static org.antlr.v4.parse.ANTLRParser.CLOSURE;
import static org.antlr.v4.parse.ANTLRParser.POSITIVE_CLOSURE;
import static org.antlr.v4.parse.ANTLRParser.RULE_REF;
import static org.antlr.v4.parse.ANTLRParser.TOKEN_REF;
/** */
public class RuleFunction extends OutputModelObject {
@ -84,13 +101,11 @@ public class RuleFunction extends OutputModelObject {
altToContext = new AltLabelStructDecl[r.getOriginalNumberOfAlts()+1];
// Add ctx labels for elements in alts with no -> label
if ( !factory.getGrammar().tool.no_auto_element_labels ) {
List<AltAST> altsNoLabels = r.getUnlabeledAltASTs();
if ( altsNoLabels!=null ) {
Set<Decl> decls = getDeclsForAllElements(altsNoLabels);
// we know to put in rule ctx, so do it directly
for (Decl d : decls) ruleCtx.addDecl(d);
}
List<AltAST> altsNoLabels = r.getUnlabeledAltASTs();
if ( altsNoLabels!=null ) {
Set<Decl> decls = getDeclsForAllElements(altsNoLabels);
// we know to put in rule ctx, so do it directly
for (Decl d : decls) ruleCtx.addDecl(d);
}
// make structs for -> labeled alts, define ctx labels for elements
@ -103,11 +118,9 @@ public class RuleFunction extends OutputModelObject {
String label = pair.c;
altToContext[altNum] = new AltLabelStructDecl(factory, r, altNum, label);
altLabelCtxs.put(label, altToContext[altNum]);
if ( !factory.getGrammar().tool.no_auto_element_labels ) {
Set<Decl> decls = getDeclsForAltElements(altAST);
// we know which ctx to put in, so do it directly
for (Decl d : decls) altToContext[altNum].addDecl(d);
}
Set<Decl> decls = getDeclsForAltElements(altAST);
// we know which ctx to put in, so do it directly
for (Decl d : decls) altToContext[altNum].addDecl(d);
}
}
@ -139,23 +152,18 @@ public class RuleFunction extends OutputModelObject {
}
}
/** for all alts, find which ref X or r in way which needs List
/** for all alts, find which ref X or r needs List
Must see across alts. If any alt needs X or r as list, then
define as list.
*/
public Set<Decl> getDeclsForAllElements(List<AltAST> altASTs) {
Set<String> needsList = new HashSet<String>();
List<GrammarAST> allRefs = new ArrayList<GrammarAST>();
// for (Alternative a :alts) {
for (AltAST ast : altASTs) {
IntervalSet reftypes = new IntervalSet(RULE_REF, TOKEN_REF);
List<GrammarAST> refs = ast.getNodesWithType(reftypes);
FrequencySet<String> altFreq = new FrequencySet<String>();
for (GrammarAST t : refs) {
String refLabelName = t.getText();
altFreq.add(refLabelName);
allRefs.add(t);
}
allRefs.addAll(refs);
FrequencySet<String> altFreq = getElementFrequenciesForAlt(ast);
for (GrammarAST t : refs) {
String refLabelName = t.getText();
if ( altFreq.count(t.getText())>1 ) needsList.add(refLabelName);
@ -172,6 +180,18 @@ public class RuleFunction extends OutputModelObject {
return decls;
}
/** Given list of X and r refs in alt, compute how many of each there are */
protected FrequencySet<String> getElementFrequenciesForAlt(AltAST ast) {
IntervalSet reftypes = new IntervalSet(RULE_REF, TOKEN_REF);
List<GrammarAST> refs = ast.getNodesWithType(reftypes);
FrequencySet<String> altFreq = new FrequencySet<String>();
for (GrammarAST t : refs) {
String refLabelName = t.getText();
altFreq.add(refLabelName);
}
return altFreq;
}
/** Get list of decls for token/rule refs.
* Single ref X becomes X() getter
* Multiple refs to X becomes List X() method, X(int i) method.

View File

@ -30,11 +30,20 @@
package org.antlr.v4.codegen.model.decl;
import org.antlr.v4.codegen.OutputModelFactory;
import org.antlr.v4.codegen.model.*;
import org.antlr.v4.codegen.model.DispatchMethod;
import org.antlr.v4.codegen.model.ListenerDispatchMethod;
import org.antlr.v4.codegen.model.ModelElement;
import org.antlr.v4.codegen.model.OutputModelObject;
import org.antlr.v4.codegen.model.ParseListenerDispatchMethod;
import org.antlr.v4.codegen.model.VisitorDispatchMethod;
import org.antlr.v4.runtime.misc.OrderedHashSet;
import org.antlr.v4.tool.*;
import org.antlr.v4.tool.Attribute;
import org.antlr.v4.tool.LeftRecursiveRule;
import org.antlr.v4.tool.Rule;
import java.util.*;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
/** This object models the structure holding all of the parameters,
* return values, local variables, and labels associated with a rule.
@ -60,8 +69,10 @@ public class StructDecl extends Decl {
dispatchMethods = new ArrayList<DispatchMethod>();
if ( !r.hasAltSpecificContexts() ) {
// no enter/exit for this ruleContext if rule has labels
dispatchMethods.add(new ListenerDispatchMethod(factory, true));
dispatchMethods.add(new ListenerDispatchMethod(factory, false));
if ( factory.getGrammar().tool.gen_listener ) {
dispatchMethods.add(new ListenerDispatchMethod(factory, true));
dispatchMethods.add(new ListenerDispatchMethod(factory, false));
}
if ( factory.getGrammar().tool.gen_visitor ) {
dispatchMethods.add(new VisitorDispatchMethod(factory));
}

View File

@ -148,7 +148,7 @@ ternary
prefix
: ^( ALT {setTokenPrec((GrammarAST)input.LT(1), currentOuterAltNumber);}
({!((CommonTree)input.LT(1)).getText().equals(ruleName)}? element)+
recurse
recurse ACTION?
)
;

View File

@ -72,11 +72,11 @@ public class LeftRecursiveRule extends Rule {
List<AltAST> alts = new ArrayList<AltAST>();
for (int i = 0; i < recPrimaryAlts.size(); i++) {
LeftRecursiveRuleAltInfo altInfo = recPrimaryAlts.get(i);
if ( altInfo.altLabel==null ) alts.add(altInfo.altAST);
if ( altInfo.altLabel==null ) alts.add(altInfo.originalAltAST);
}
for (int i = 0; i < recOpAlts.size(); i++) {
LeftRecursiveRuleAltInfo altInfo = recOpAlts.getElement(i);
if ( altInfo.altLabel==null ) alts.add(altInfo.altAST);
if ( altInfo.altLabel==null ) alts.add(altInfo.originalAltAST);
}
if ( alts.size()==0 ) return null;
return alts;
@ -91,13 +91,17 @@ public class LeftRecursiveRule extends Rule {
for (int i = 0; i < recPrimaryAlts.size(); i++) {
LeftRecursiveRuleAltInfo altInfo = recPrimaryAlts.get(i);
if ( altInfo.altLabel!=null ) {
labels.add(new Triple<Integer,AltAST,String>(altInfo.altNum,altInfo.altAST,altInfo.altLabel));
labels.add(new Triple<Integer,AltAST,String>(altInfo.altNum,
altInfo.originalAltAST,
altInfo.altLabel));
}
}
for (int i = 0; i < recOpAlts.size(); i++) {
LeftRecursiveRuleAltInfo altInfo = recOpAlts.getElement(i);
if ( altInfo.altLabel!=null ) {
labels.add(new Triple<Integer,AltAST,String>(altInfo.altNum,altInfo.altAST,altInfo.altLabel));
labels.add(new Triple<Integer,AltAST,String>(altInfo.altNum,
altInfo.originalAltAST,
altInfo.altLabel));
}
}
if ( labels.size()==0 ) return null;

View File

@ -50,6 +50,7 @@ public class AltAST extends GrammarAST {
super(node);
this.alt = ((AltAST)node).alt;
this.altLabel = ((AltAST)node).altLabel;
this.leftRecursiveAltInfo = ((AltAST)node).leftRecursiveAltInfo;
}
public AltAST(Token t) { super(t); }

View File

@ -29,6 +29,7 @@
package org.antlr.v4.tool.ast;
import org.antlr.runtime.CommonToken;
import org.antlr.runtime.Token;
import org.antlr.runtime.tree.Tree;
@ -41,8 +42,13 @@ public class RuleRefAST extends GrammarASTWithOptions implements RuleElementAST
public RuleRefAST(int type) { super(type); }
public RuleRefAST(int type, Token t) { super(type, t); }
/** Dup token too since we overwrite during LR rule transform */
@Override
public Tree dupNode() { return new TerminalAST(this); }
public Tree dupNode() {
RuleRefAST r = new RuleRefAST(this);
r.token = new CommonToken(r.token);
return r;
}
@Override
public Object visit(GrammarASTVisitor v) { return v.visit(this); }

View File

@ -45,6 +45,11 @@ public class ParserInterpreter {
this.g = g;
}
@Override
public String getGrammarFileName() {
return null;
}
@Override
public String[] getRuleNames() {
return g.rules.keySet().toArray(new String[g.rules.size()]);
@ -54,6 +59,11 @@ public class ParserInterpreter {
public String[] getTokenNames() {
return g.getTokenNames();
}
@Override
public ATN getATN() {
return null;
}
}
protected Grammar g;

View File

@ -181,7 +181,9 @@ compilationUnit
( packageDeclaration importDeclaration* typeDeclaration*
| classOrInterfaceDeclaration typeDeclaration*
)
EOF
| packageDeclaration? importDeclaration* typeDeclaration*
EOF
;
packageDeclaration

View File

@ -33,6 +33,7 @@ import org.antlr.v4.Tool;
import org.antlr.v4.automata.ParserATNFactory;
import org.antlr.v4.runtime.NoViableAltException;
import org.antlr.v4.runtime.ParserRuleContext;
import org.antlr.v4.runtime.Token;
import org.antlr.v4.runtime.TokenStream;
import org.antlr.v4.runtime.atn.*;
import org.antlr.v4.runtime.dfa.DFA;
@ -504,8 +505,7 @@ public class TestATNParserPrediction extends BaseTest {
TokenStream input = new IntTokenStream(types);
ParserInterpreter interp = new ParserInterpreter(g, input);
DecisionState startState = atn.decisionToState.get(decision);
DFA dfa = new DFA(startState);
dfa.decision = decision;
DFA dfa = new DFA(startState, decision);
int alt = interp.predictATN(dfa, input, ParserRuleContext.EMPTY, false);
System.out.println(dot.getDOT(dfa, false));
@ -523,7 +523,7 @@ public class TestATNParserPrediction extends BaseTest {
}
public DFA getDFA(LexerGrammar lg, Grammar g, String ruleName,
String inputString, ParserRuleContext ctx)
String inputString, ParserRuleContext<?> ctx)
{
Tool.internalOption_ShowATNConfigsInDFA = true;
ATN lexatn = createATN(lg);
@ -541,7 +541,7 @@ public class TestATNParserPrediction extends BaseTest {
// System.out.println(dot.getDOT(atn.ruleToStartState.get(g.getRule("b"))));
// System.out.println(dot.getDOT(atn.ruleToStartState.get(g.getRule("e"))));
ParserATNSimulator interp = new ParserATNSimulator(atn);
ParserATNSimulator<Token> interp = new ParserATNSimulator<Token>(atn);
List<Integer> types = getTokenTypesViaATN(inputString, lexInterp);
System.out.println(types);
TokenStream input = new IntTokenStream(types);

View File

@ -69,8 +69,8 @@ public class TestActionTranslation extends BaseTest {
String action = "x, $ID.text+\"3242\", (*$ID).foo(21,33), 3.2+1, '\\n', "+
"\"a,oo\\nick\", {bl, \"fdkj\"eck}";
String expected =
"x, (((aContext)_localctx).ID!=null?((aContext)_localctx).ID.getText():null)+\"3242\", " +
"(*((aContext)_localctx).ID).foo(21,33), 3.2+1, '\\n', \"a,oo\\nick\", {bl, \"fdkj\"eck}";
"x, (((AContext)_localctx).ID!=null?((AContext)_localctx).ID.getText():null)+\"3242\", " +
"(*((AContext)_localctx).ID).foo(21,33), 3.2+1, '\\n', \"a,oo\\nick\", {bl, \"fdkj\"eck}";
testActions(attributeTemplate, "inline", action, expected);
}
@ -100,25 +100,25 @@ public class TestActionTranslation extends BaseTest {
@Test public void testReturnValues() throws Exception {
String action = "$lab.e; $b.e;";
String expected = "((aContext)_localctx).lab.e; ((aContext)_localctx).b.e;";
String expected = "((AContext)_localctx).lab.e; ((AContext)_localctx).b.e;";
testActions(attributeTemplate, "inline", action, expected);
}
@Test public void testReturnWithMultipleRuleRefs() throws Exception {
String action = "$c.x; $c.y;";
String expected = "((aContext)_localctx).c.x; ((aContext)_localctx).c.y;";
String expected = "((AContext)_localctx).c.x; ((AContext)_localctx).c.y;";
testActions(attributeTemplate, "inline", action, expected);
}
@Test public void testTokenRefs() throws Exception {
String action = "$id; $ID; $id.text; $id.getText(); $id.line;";
String expected = "((aContext)_localctx).id; ((aContext)_localctx).ID; (((aContext)_localctx).id!=null?((aContext)_localctx).id.getText():null); ((aContext)_localctx).id.getText(); (((aContext)_localctx).id!=null?((aContext)_localctx).id.getLine():0);";
String expected = "((AContext)_localctx).id; ((AContext)_localctx).ID; (((AContext)_localctx).id!=null?((AContext)_localctx).id.getText():null); ((AContext)_localctx).id.getText(); (((AContext)_localctx).id!=null?((AContext)_localctx).id.getLine():0);";
testActions(attributeTemplate, "inline", action, expected);
}
@Test public void testRuleRefs() throws Exception {
String action = "$lab.start; $c.text;";
String expected = "(((aContext)_localctx).lab!=null?(((aContext)_localctx).lab.start):null); (((aContext)_localctx).c!=null?_input.toString(((aContext)_localctx).c.start,((aContext)_localctx).c.stop):null);";
String expected = "(((AContext)_localctx).lab!=null?(((AContext)_localctx).lab.start):null); (((AContext)_localctx).c!=null?_input.toString(((AContext)_localctx).c.start,((AContext)_localctx).c.stop):null);";
testActions(attributeTemplate, "inline", action, expected);
}

View File

@ -178,24 +178,17 @@ public class TestCommonTokenStream extends BaseTest {
new TokenSource() {
int i = 0;
WritableToken[] tokens = {
new CommonToken(1," "),
new CommonToken(1," ") {{channel = Lexer.HIDDEN;}},
new CommonToken(1,"x"),
new CommonToken(1," "),
new CommonToken(1," ") {{channel = Lexer.HIDDEN;}},
new CommonToken(1,"="),
new CommonToken(1,"34"),
new CommonToken(1," "),
new CommonToken(1," "),
new CommonToken(1," ") {{channel = Lexer.HIDDEN;}},
new CommonToken(1," ") {{channel = Lexer.HIDDEN;}},
new CommonToken(1,";"),
new CommonToken(1,"\n"),
new CommonToken(1,"\n") {{channel = Lexer.HIDDEN;}},
new CommonToken(Token.EOF,"")
};
{
tokens[0].setChannel(Lexer.HIDDEN);
tokens[2].setChannel(Lexer.HIDDEN);
tokens[5].setChannel(Lexer.HIDDEN);
tokens[6].setChannel(Lexer.HIDDEN);
tokens[8].setChannel(Lexer.HIDDEN);
}
public Token nextToken() {
return tokens[i++];
}

View File

@ -236,10 +236,10 @@ public class TestLeftRecursion extends BaseTest {
"e returns [int v]\n" +
" : a=e op='*' b=e {$v = $a.v * $b.v;} -> mult\n" +
" | a=e '+' b=e {$v = $a.v + $b.v;} -> add\n" +
" | INT {$v = $INT.int;}\n" +
" | '(' x=e ')' {$v = $x.v;}\n" +
" | INT {$v = $INT.int;} -> anInt\n" +
" | '(' x=e ')' {$v = $x.v;} -> parens\n" +
" | x=e '++' {$v = $x.v+1;} -> inc\n" +
" | e '--'\n" +
" | e '--' -> dec\n" +
" | ID {$v = 3;} -> anID\n" +
" ; \n" +
"\n" +
@ -255,6 +255,27 @@ public class TestLeftRecursion extends BaseTest {
runTests(grammar, tests, "s");
}
@Test public void testPrefixOpWithActionAndLabel() throws Exception {
String grammar =
"grammar T;\n" +
"s : e {System.out.println($e.result);} ;\n" +
"\n" +
"e returns [String result]\n" +
" : ID '=' e1=e { $result = \"(\" + $ID.getText() + \"=\" + $e1.result + \")\"; }\n" +
" | ID { $result = $ID.getText(); }\n" +
" | e1=e '+' e2=e { $result = \"(\" + $e1.result + \"+\" + $e2.result + \")\"; }\n" +
" ;\n" +
"ID : 'a'..'z'+ ;\n" +
"INT : '0'..'9'+ ;\n" +
"WS : (' '|'\\n') {skip();} ;\n";
String[] tests = {
"a", "a",
"a+b", "(a+b)",
"a=b+c", "((a=b)+c)",
};
runTests(grammar, tests, "s");
}
public void runTests(String grammar, String[] tests, String startRule) {
rawGenerateAndBuildRecognizer("T.g", grammar, "TParser", "TLexer");
writeRecognizerAndCompile("TParser",

View File

@ -0,0 +1,149 @@
package org.antlr.v4.test;
import org.junit.Test;
public class TestListeners extends BaseTest {
@Test public void testBasic() throws Exception {
String grammar =
"grammar T;\n" +
"@members {\n" +
"public static class LeafListener extends TBaseListener {\n" +
" public void visitTerminal(ParserRuleContext<Token> ctx, Token symbol) {\n" +
" System.out.println(symbol.getText());\n" +
" }\n" +
" }}\n" +
"s\n" +
"@init {setBuildParseTree(true);}\n" +
"@after {" +
" System.out.println($r.ctx.toStringTree(this));" +
" ParseTreeWalker walker = new ParseTreeWalker();\n" +
" walker.walk(new LeafListener(), $r.ctx);" +
"}\n" +
" : r=a ;\n" +
"a : INT INT" +
" | ID" +
" ;\n" +
"MULT: '*' ;\n" +
"ADD : '+' ;\n" +
"INT : [0-9]+ ;\n" +
"ID : [a-z]+ ;\n" +
"WS : [ \\t\\n]+ -> skip ;\n";
String result = execParser("T.g", grammar, "TParser", "TLexer", "s", "1 2", false);
String expecting = "(a 1 2)\n" +
"1\n" +
"2\n";
assertEquals(expecting, result);
}
@Test public void testTokenGetters() throws Exception {
String grammar =
"grammar T;\n" +
"@members {\n" +
"public static class LeafListener extends TBaseListener {\n" +
" public void exitA(TParser.AContext ctx) {\n" +
" if (ctx.getChildCount()==2) System.out.printf(\"%s %s %s\",ctx.INT(0).getText(),ctx.INT(1).getText(),ctx.INT());\n" +
" else System.out.println(ctx.ID());\n" +
" }\n" +
" }}\n" +
"s\n" +
"@init {setBuildParseTree(true);}\n" +
"@after {" +
" System.out.println($r.ctx.toStringTree(this));" +
" ParseTreeWalker walker = new ParseTreeWalker();\n" +
" walker.walk(new LeafListener(), $r.ctx);" +
"}\n" +
" : r=a ;\n" +
"a : INT INT" +
" | ID" +
" ;\n" +
"MULT: '*' ;\n" +
"ADD : '+' ;\n" +
"INT : [0-9]+ ;\n" +
"ID : [a-z]+ ;\n" +
"WS : [ \\t\\n]+ -> skip ;\n";
String result = execParser("T.g", grammar, "TParser", "TLexer", "s", "1 2", false);
String expecting = "(a 1 2)\n" +
"1 2 [[@0,0:0='1',<5>,1:0], [@1,2:2='2',<5>,1:2]]\n";
assertEquals(expecting, result);
result = execParser("T.g", grammar, "TParser", "TLexer", "s", "abc", false);
expecting = "(a abc)\n" +
"[@0,0:2='abc',<6>,1:0]\n";
assertEquals(expecting, result);
}
@Test public void testRuleGetters() throws Exception {
String grammar =
"grammar T;\n" +
"@members {\n" +
"public static class LeafListener extends TBaseListener {\n" +
" public void exitA(TParser.AContext ctx) {\n" +
" if (ctx.getChildCount()==2) {\n" +
" System.out.printf(\"%s %s %s\",ctx.b(0).start.getText(),\n" +
" ctx.b(1).start.getText(),ctx.b().get(0).start.getText());\n" +
" }\n" +
" else System.out.println(ctx.b(0).start.getText());\n" +
" }\n" +
" }}\n" +
"s\n" +
"@init {setBuildParseTree(true);}\n" +
"@after {" +
" System.out.println($r.ctx.toStringTree(this));" +
" ParseTreeWalker walker = new ParseTreeWalker();\n" +
" walker.walk(new LeafListener(), $r.ctx);" +
"}\n" +
" : r=a ;\n" +
"a : b b" + // forces list
" | b" + // a list still
" ;\n" +
"b : ID | INT ;\n" +
"MULT: '*' ;\n" +
"ADD : '+' ;\n" +
"INT : [0-9]+ ;\n" +
"ID : [a-z]+ ;\n" +
"WS : [ \\t\\n]+ -> skip ;\n";
String result = execParser("T.g", grammar, "TParser", "TLexer", "s", "1 2", false);
String expecting = "(a (b 1) (b 2))\n" +
"1 2 1\n";
assertEquals(expecting, result);
result = execParser("T.g", grammar, "TParser", "TLexer", "s", "abc", false);
expecting = "(a (b abc))\n" +
"abc\n";
assertEquals(expecting, result);
}
@Test public void testLR() throws Exception {
String grammar =
"grammar T;\n" +
"@members {\n" +
"public static class LeafListener extends TBaseListener {\n" +
" public void exitA(TParser.EContext ctx) {\n" +
" if (ctx.getChildCount()==3) {\n" +
" System.out.printf(\"%s %s %s\",ctx.e(0).start.getText(),\n" +
" ctx.e(1).start.getText(),ctx.e().get(0).start.getText());\n" +
" }\n" +
" else System.out.println(ctx.INT().getText());\n" +
" }\n" +
" }}\n" +
"s\n" +
"@init {setBuildParseTree(true);}\n" +
"@after {" +
" System.out.println($r.ctx.toStringTree(this));" +
" ParseTreeWalker walker = new ParseTreeWalker();\n" +
" walker.walk(new LeafListener(), $r.ctx);" +
"}\n" +
" : r=e ;\n" +
"e : e op='*' e\n" +
" | e op='+' e\n" +
" | INT\n" +
" ;\n" +
"MULT: '*' ;\n" +
"ADD : '+' ;\n" +
"INT : [0-9]+ ;\n" +
"WS : [ \\t\\n]+ -> skip ;\n";
String result = execParser("T.g", grammar, "TParser", "TLexer", "s", "1+2*3", false);
String expecting = "(e (e 1) + (e (e 2) * (e 3)))\n";
assertEquals(expecting, result);
}
}

View File

@ -32,8 +32,12 @@ import org.antlr.v4.runtime.*;
import org.antlr.v4.runtime.atn.*;
import org.antlr.v4.runtime.dfa.*;
import org.antlr.v4.runtime.misc.Nullable;
import org.antlr.v4.runtime.tree.*;
import org.junit.*;
import org.antlr.v4.runtime.tree.ParseTree;
import org.antlr.v4.runtime.tree.ParseTreeListener;
import org.antlr.v4.runtime.tree.ParseTreeWalker;
import org.junit.Assert;
import org.junit.Ignore;
import org.junit.Test;
import java.io.*;
import java.lang.reflect.*;
@ -280,6 +284,29 @@ public class TestPerformance extends BaseTest {
tokenCount,
System.currentTimeMillis() - startTime);
final LexerATNSimulator lexerInterpreter = sharedLexer.getInterpreter();
final DFA[] modeToDFA = lexerInterpreter.dfa;
if (SHOW_DFA_STATE_STATS) {
int states = 0;
int configs = 0;
Set<ATNConfig> uniqueConfigs = new HashSet<ATNConfig>();
for (int i = 0; i < modeToDFA.length; i++) {
DFA dfa = modeToDFA[i];
if (dfa == null || dfa.states == null) {
continue;
}
states += dfa.states.size();
for (DFAState state : dfa.states.values()) {
configs += state.configset.size();
uniqueConfigs.addAll(state.configset);
}
}
System.out.format("There are %d lexer DFAState instances, %d configs (%d unique).\n", states, configs, uniqueConfigs.size());
}
if (RUN_PARSER) {
// make sure the individual DFAState objects actually have unique ATNConfig arrays
final ParserATNSimulator<?> interpreter = sharedParser.getInterpreter();
@ -303,7 +330,7 @@ public class TestPerformance extends BaseTest {
}
}
System.out.format("There are %d DFAState instances, %d configs (%d unique).\n", states, configs, uniqueConfigs.size());
System.out.format("There are %d parser DFAState instances, %d configs (%d unique).\n", states, configs, uniqueConfigs.size());
}
int localDfaCount = 0;

View File

@ -274,7 +274,7 @@ public class TestSemPredEvalParser extends BaseTest {
"alt 1\n";
assertEquals(expecting, found);
expecting = "";
expecting = null;
assertEquals(expecting, stderrDuringParse);
}