got basic lexer DFAs in (I think i need to use NFAConfigs though)

[git-p4: depot-paths = "//depot/code/antlr4/main/": change = 6774]
This commit is contained in:
parrt 2010-03-26 15:29:47 -08:00
parent f7445e961b
commit cb7445cf36
18 changed files with 809 additions and 374 deletions

View File

@ -2,6 +2,7 @@ package org.antlr.v4.analysis;
import org.antlr.v4.automata.DFA;
import org.antlr.v4.automata.DecisionState;
import org.antlr.v4.parse.ANTLRParser;
import org.antlr.v4.tool.Grammar;
public class AnalysisPipeline {
@ -17,6 +18,12 @@ public class AnalysisPipeline {
lr.check();
if ( lr.listOfRecursiveCycles.size()>0 ) return; // bail out
if ( g.getType() == ANTLRParser.LEXER ) {
LexerNFAToDFAConverter conv = new LexerNFAToDFAConverter(g);
DFA dfa = conv.createDFA();
g.setLookaheadDFA(0, dfa); // only one decision
return;
}
// BUILD DFA FOR EACH DECISION
for (DecisionState s : g.nfa.decisionToNFAState) {
System.out.println("\nDECISION "+s.decision);

View File

@ -0,0 +1,186 @@
package org.antlr.v4.analysis;
import org.antlr.v4.automata.*;
import org.antlr.v4.misc.IntervalSet;
import org.antlr.v4.misc.OrderedHashSet;
import org.antlr.v4.tool.Grammar;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
public class LexerNFAToDFAConverter {
Grammar g;
/** DFA we are creating */
DFA dfa;
/** A list of DFA states we still need to process during NFA conversion */
List<LexerState> work = new LinkedList<LexerState>();
List<LexerState> accepts = new LinkedList<LexerState>();
public static boolean debug = false;
public LexerNFAToDFAConverter(Grammar g) {
this.g = g;
TokensStartState startState = (TokensStartState)g.nfa.states.get(0);
dfa = new DFA(g, startState);
}
public DFA createDFA() {
LexerState start = computeStartState();
dfa.startState = start;
dfa.addState(start); // make sure dfa knows about this state
work.add((LexerState)dfa.startState);
// while more DFA states to check, process them
while ( work.size()>0 ) {
LexerState d = work.get(0);
reach(d);
work.remove(0); // we're done with this DFA state
}
// walk accept states, informing DFA
for (LexerState d : accepts) {
for (NFAState s : d.nfaStates) {
if ( s instanceof RuleStopState && !s.rule.isFragment() ) {
dfa.defineAcceptState(s.rule.index, d);
d.matchesRules.add(s.rule);
}
}
}
return dfa;
}
/** */
public LexerState computeStartState() {
LexerState d = dfa.newLexerState();
d.nfaStates.add(dfa.decisionNFAStartState);
closure(d);
return d;
}
/** From this node, add a d--a-->t transition for all
* labels 'a' where t is a DFA node created
* from the set of NFA states reachable from any NFA
* configuration in DFA state d.
*/
void reach(LexerState d) {
OrderedHashSet<IntervalSet> labels = DFA.getReachableLabels(d);
for (IntervalSet label : labels) {
LexerState t = reach(d, label);
if ( debug ) {
System.out.println("DFA state after reach -" +
label.toString(g)+"->"+t);
}
closure(t); // add any NFA states reachable via epsilon
addTransition(d, label, t); // make d-label->t transition
}
}
/** Add t if not in DFA yet and then make d-label->t */
void addTransition(LexerState d, IntervalSet label, LexerState t) {
LexerState existing = (LexerState)dfa.stateSet.get(t);
if ( existing != null ) { // seen before; point at old one
d.addEdge(new Edge(existing, label));
return;
}
System.out.println("ADD "+t);
work.add(t); // add to work list to continue NFA conversion
dfa.addState(t); // add state we've never seen before
if ( t.isAcceptState ) accepts.add(t);
d.addEdge(new Edge(t, label));
}
/** Given the set of NFA states in DFA state d, find all NFA states
* reachable traversing label arcs. By definition, there can be
* only one DFA state reachable by a single label from DFA state d so we must
* find and merge all NFA states reachable via label. Return a new
* LexerState that has all of those NFA states.
*/
public LexerState reach(LexerState d, IntervalSet label) {
//System.out.println("reach "+label.toString(g)+" from "+d.stateNumber);
LexerState labelTarget = dfa.newLexerState();
for (NFAState s : d.nfaStates) {
int n = s.getNumberOfTransitions();
for (int i=0; i<n; i++) { // for each transition
Transition t = s.transition(i);
// found a transition with label; does it collide with label?
if ( !t.isEpsilon() && !t.label().and(label).isNil() ) {
// add NFA target to (potentially) new DFA state
labelTarget.nfaStates.add(t.target);
}
}
}
return labelTarget;
}
/** For all NFA states in d, compute the epsilon closure; that is, find
* all NFA states reachable from the NFA states in d purely via epsilon
* transitions.
*/
public void closure(LexerState d) {
if ( debug ) {
System.out.println("closure("+d+")");
}
List<NFAState> states = new ArrayList<NFAState>();
states.addAll(d.nfaStates.elements()); // dup initial list; avoid walk/update issue
for (NFAState s : states) closure(d, s, NFAContext.EMPTY); // update d.nfaStates
if ( debug ) {
System.out.println("after closure("+d+")");
}
//System.out.println("after closure d="+d);
}
public void closure(LexerState d, NFAState s, NFAContext context) {
// s itself is always in closure
d.nfaStates.add(s);
if ( s instanceof RuleStopState ) {
// TODO: chase FOLLOW links if recursive
if ( context!=NFAContext.EMPTY ) {
if ( !d.nfaStates.contains(context.returnState) ) {
closure(d, context.returnState, context.parent);
}
// do nothing if context not empty and already added to nfaStates
}
else {
d.isAcceptState = true;
}
}
else {
int n = s.getNumberOfTransitions();
for (int i=0; i<n; i++) {
Transition t = s.transition(i);
if ( t instanceof RuleTransition ) {
NFAContext newContext =
new NFAContext(context, ((RuleTransition)t).followState);
if ( !d.nfaStates.contains(t.target) ) closure(d, t.target, newContext);
}
else if ( t.isEpsilon() && !d.nfaStates.contains(t.target) ) {
closure(d, t.target, context);
}
}
}
}
// void ruleStopStateClosure(LexerState d, NFAState s) {
// //System.out.println("FOLLOW of "+s+" context="+context);
// // follow all static FOLLOW links
// int n = s.getNumberOfTransitions();
// for (int i=0; i<n; i++) {
// Transition t = s.transition(i);
// if ( !(t instanceof EpsilonTransition) ) continue; // ignore EOF transitions
// if ( !d.nfaStates.contains(t.target) ) closure(d, t.target);
// }
// return;
// }
}

View File

@ -142,7 +142,7 @@ public class LinearApproximator {
DFAState t = new DFAState(dfa);
t.nfaConfigs = altConfigs.get(a)[k];
dfa.addState(t);
if ( k==depth ) dfa.defineAcceptState(a, t);
if ( k==depth ) dfa.addAcceptState(a, t);
Edge e = new Edge(t, look[k]);
d.addEdge(e);
d = t;

View File

@ -172,7 +172,7 @@ public class Resolver {
d.isAcceptState = true;
d.predictsAlt = minAlt;
// might be adding new accept state for alt, but that's ok
converter.dfa.defineAcceptState(minAlt, d);
converter.dfa.addAcceptState(minAlt, d);
}
/** Turn off all configurations associated with the

View File

@ -117,7 +117,7 @@ public class StackLimitedNFAToDFAConverter {
* configuration in DFA state d.
*/
void reach(DFAState d) {
OrderedHashSet<IntervalSet> labels = getReachableLabels(d);
OrderedHashSet<IntervalSet> labels = DFA.getReachableLabels(d);
for (IntervalSet label : labels) {
DFAState t = reach(d, label);
@ -178,7 +178,7 @@ public class StackLimitedNFAToDFAConverter {
if ( alt > 0 ) { // uniquely predicts an alt?
System.out.println(t+" predicts "+alt);
// Define new stop state
dfa.defineAcceptState(alt, t);
dfa.addAcceptState(alt, t);
}
else {
System.out.println("ADD "+t);
@ -268,6 +268,8 @@ public class StackLimitedNFAToDFAConverter {
closureBusy = new HashSet<NFAConfig>();
// TODO: can we avoid this separate list by directly filling d.nfaConfigs?
// OH: concurrent modification. dup initialconfigs?
List<NFAConfig> configs = new ArrayList<NFAConfig>();
for (NFAConfig c : d.nfaConfigs) {
closure(c.state, c.alt, c.context, c.semanticContext, collectPredicates, configs);
@ -414,130 +416,6 @@ public class StackLimitedNFAToDFAConverter {
}
}
public OrderedHashSet<IntervalSet> getReachableLabels(DFAState d) {
OrderedHashSet<IntervalSet> reachableLabels = new OrderedHashSet<IntervalSet>();
for (NFAState s : d.getUniqueNFAStates()) { // for each state
int n = s.getNumberOfTransitions();
for (int i=0; i<n; i++) { // for each transition
Transition t = s.transition(i);
IntervalSet label = null;
if ( t instanceof AtomTransition ) {
label = IntervalSet.of(((AtomTransition)t).label);
}
else if ( t instanceof SetTransition ) {
label = ((SetTransition)t).label;
}
if ( label!=null ) {
addReachableLabel(reachableLabels, label);
}
}
}
//System.out.println("reachable labels for "+d+"="+reachableLabels);
return reachableLabels;
}
/** Add label uniquely and disjointly; intersection with
* another set or int/char forces breaking up the set(s).
*
* Example, if reachable list of labels is [a..z, {k,9}, 0..9],
* the disjoint list will be [{a..j,l..z}, k, 9, 0..8].
*
* As we add NFA configurations to a DFA state, we might as well track
* the set of all possible transition labels to make the DFA conversion
* more efficient. W/o the reachable labels, we'd need to check the
* whole vocabulary space (could be 0..\uFFFE)! The problem is that
* labels can be sets, which may overlap with int labels or other sets.
* As we need a deterministic set of transitions from any
* state in the DFA, we must make the reachable labels set disjoint.
* This operation amounts to finding the character classes for this
* DFA state whereas with tools like flex, that need to generate a
* homogeneous DFA, must compute char classes across all states.
* We are going to generate DFAs with heterogeneous states so we
* only care that the set of transitions out of a single state is
* unique. :)
*
* The idea for adding a new set, t, is to look for overlap with the
* elements of existing list s. Upon overlap, replace
* existing set s[i] with two new disjoint sets, s[i]-t and s[i]&t.
* (if s[i]-t is nil, don't add). The remainder is t-s[i], which is
* what you want to add to the set minus what was already there. The
* remainder must then be compared against the i+1..n elements in s
* looking for another collision. Each collision results in a smaller
* and smaller remainder. Stop when you run out of s elements or
* remainder goes to nil. If remainder is non nil when you run out of
* s elements, then add remainder to the end.
*/
protected void addReachableLabel(OrderedHashSet<IntervalSet> reachableLabels,
IntervalSet label)
{
/*
System.out.println("addReachableLabel to state "+dfa.decisionNumber+"."+stateNumber+": "+label.getSet().toString(dfa.nfa.grammar));
System.out.println("start of add to state "+dfa.decisionNumber+"."+stateNumber+": " +
"reachableLabels="+reachableLabels.toString());
*/
if ( reachableLabels.contains(label) ) { // exact label present
return;
}
IntervalSet remainder = label; // remainder starts out as whole set to add
int n = reachableLabels.size(); // only look at initial elements
// walk the existing list looking for the collision
for (int i=0; i<n; i++) {
IntervalSet rl = reachableLabels.get(i);
/*
System.out.println("comparing ["+i+"]: "+label.toString(dfa.nfa.grammar)+" & "+
rl.toString(dfa.nfa.grammar)+"="+
intersection.toString(dfa.nfa.grammar));
*/
IntervalSet intersection = (IntervalSet)label.and(rl);
if ( intersection.isNil() ) {
continue;
}
//System.out.println(label+" collides with "+rl);
// For any (s_i, t) with s_i&t!=nil replace with (s_i-t, s_i&t)
// (ignoring s_i-t if nil; don't put in list)
// Replace existing s_i with intersection since we
// know that will always be a non nil character class
IntervalSet s_i = rl;
reachableLabels.set(i, intersection);
// Compute s_i-t to see what is in current set and not in incoming
IntervalSet existingMinusNewElements = (IntervalSet)s_i.subtract(label);
//System.out.println(s_i+"-"+t+"="+existingMinusNewElements);
if ( !existingMinusNewElements.isNil() ) {
// found a new character class, add to the end (doesn't affect
// outer loop duration due to n computation a priori.
reachableLabels.add(existingMinusNewElements);
}
/*
System.out.println("after collision, " +
"reachableLabels="+reachableLabels.toString());
*/
// anything left to add to the reachableLabels?
remainder = (IntervalSet)label.subtract(s_i);
if ( remainder.isNil() ) {
break; // nothing left to add to set. done!
}
label = remainder;
}
if ( !remainder.isNil() ) {
/*
System.out.println("before add remainder to state "+dfa.decisionNumber+"."+stateNumber+": " +
"reachableLabels="+reachableLabels.toString());
System.out.println("remainder state "+dfa.decisionNumber+"."+stateNumber+": "+remainder.toString(dfa.nfa.grammar));
*/
reachableLabels.add(remainder);
}
/*
System.out.println("#END of add to state "+dfa.decisionNumber+"."+stateNumber+": " +
"reachableLabels="+reachableLabels.toString());
*/
}
/** for each NFA config in d, look for "predicate required" sign we set
* during nondeterminism resolution.
*
@ -573,7 +451,7 @@ public class StackLimitedNFAToDFAConverter {
c.alt,
c.context,
c.semanticContext);
dfa.defineAcceptState(c.alt, predDFATarget);
dfa.addAcceptState(c.alt, predDFATarget);
// add a transition to pred target from d
d.addEdge(new PredicateEdge(c.semanticContext, predDFATarget));
}

View File

@ -1,6 +1,8 @@
package org.antlr.v4.automata;
import org.antlr.v4.analysis.StackLimitedNFAToDFAConverter;
import org.antlr.v4.misc.IntervalSet;
import org.antlr.v4.misc.OrderedHashSet;
import org.antlr.v4.tool.Grammar;
import java.lang.reflect.Array;
@ -47,7 +49,7 @@ public class DFA {
/** Did DFA minimization do anything? */
public boolean minimized;
public boolean cyclic;
//public boolean cyclic;
/** Unique state numbers per DFA */
int stateCounter = 0;
@ -75,10 +77,14 @@ public class DFA {
states.add( d ); // index in states should be d.stateCounter
}
public void addAcceptState(int alt, DFAState acceptState) {
if ( stateSet.get(acceptState)==null ) addState(acceptState);
defineAcceptState(alt, acceptState);
}
public void defineAcceptState(int alt, DFAState acceptState) {
acceptState.isAcceptState = true;
acceptState.predictsAlt = alt;
if ( stateSet.get(acceptState)==null ) addState(acceptState);
if ( altToAcceptStates[alt]==null ) {
altToAcceptStates[alt] = new ArrayList<DFAState>();
}
@ -87,8 +93,11 @@ public class DFA {
public DFAState newState() {
DFAState n = new DFAState(this);
// states.setSize(n.stateNumber+1);
// states.set(n.stateNumber, n); // track state num to state
return n;
}
public LexerState newLexerState() {
LexerState n = new LexerState(this);
return n;
}
@ -115,4 +124,130 @@ public class DFA {
return serializer.toString();
}
public static OrderedHashSet<IntervalSet> getReachableLabels(DFAState d) {
OrderedHashSet<IntervalSet> reachableLabels = new OrderedHashSet<IntervalSet>();
for (NFAState s : d.getUniqueNFAStates()) { // for each state
int n = s.getNumberOfTransitions();
for (int i=0; i<n; i++) { // for each transition
Transition t = s.transition(i);
IntervalSet label = t.label();
// if ( t instanceof AtomTransition ) {
// label = IntervalSet.of(((AtomTransition)t).label);
// }
// else if ( t instanceof RangeTransition ) {
// label = ((RangeTransition)t).label();
// }
// else if ( t instanceof SetTransition ) {
// label = ((SetTransition)t).label;
// }
if ( label!=null ) {
addReachableLabel(reachableLabels, label);
}
}
}
//System.out.println("reachable labels for "+d+"="+reachableLabels);
return reachableLabels;
}
/** Add label uniquely and disjointly; intersection with
* another set or int/char forces breaking up the set(s).
*
* Example, if reachable list of labels is [a..z, {k,9}, 0..9],
* the disjoint list will be [{a..j,l..z}, k, 9, 0..8].
*
* As we add NFA configurations to a DFA state, we might as well track
* the set of all possible transition labels to make the DFA conversion
* more efficient. W/o the reachable labels, we'd need to check the
* whole vocabulary space (could be 0..\uFFFE)! The problem is that
* labels can be sets, which may overlap with int labels or other sets.
* As we need a deterministic set of transitions from any
* state in the DFA, we must make the reachable labels set disjoint.
* This operation amounts to finding the character classes for this
* DFA state whereas with tools like flex, that need to generate a
* homogeneous DFA, must compute char classes across all states.
* We are going to generate DFAs with heterogeneous states so we
* only care that the set of transitions out of a single state is
* unique. :)
*
* The idea for adding a new set, t, is to look for overlap with the
* elements of existing list s. Upon overlap, replace
* existing set s[i] with two new disjoint sets, s[i]-t and s[i]&t.
* (if s[i]-t is nil, don't add). The remainder is t-s[i], which is
* what you want to add to the set minus what was already there. The
* remainder must then be compared against the i+1..n elements in s
* looking for another collision. Each collision results in a smaller
* and smaller remainder. Stop when you run out of s elements or
* remainder goes to nil. If remainder is non nil when you run out of
* s elements, then add remainder to the end.
*/
public static void addReachableLabel(OrderedHashSet<IntervalSet> reachableLabels,
IntervalSet label)
{
/*
System.out.println("addReachableLabel to state "+dfa.decisionNumber+"."+stateNumber+": "+label.getSet().toString(dfa.nfa.grammar));
System.out.println("start of add to state "+dfa.decisionNumber+"."+stateNumber+": " +
"reachableLabels="+reachableLabels.toString());
*/
if ( reachableLabels.contains(label) ) { // exact label present
return;
}
IntervalSet remainder = label; // remainder starts out as whole set to add
int n = reachableLabels.size(); // only look at initial elements
// walk the existing list looking for the collision
for (int i=0; i<n; i++) {
IntervalSet rl = reachableLabels.get(i);
/*
System.out.println("comparing ["+i+"]: "+label.toString(dfa.nfa.grammar)+" & "+
rl.toString(dfa.nfa.grammar)+"="+
intersection.toString(dfa.nfa.grammar));
*/
IntervalSet intersection = (IntervalSet)label.and(rl);
if ( intersection.isNil() ) {
continue;
}
//System.out.println(label+" collides with "+rl);
// For any (s_i, t) with s_i&t!=nil replace with (s_i-t, s_i&t)
// (ignoring s_i-t if nil; don't put in list)
// Replace existing s_i with intersection since we
// know that will always be a non nil character class
IntervalSet s_i = rl;
reachableLabels.set(i, intersection);
// Compute s_i-t to see what is in current set and not in incoming
IntervalSet existingMinusNewElements = (IntervalSet)s_i.subtract(label);
//System.out.println(s_i+"-"+t+"="+existingMinusNewElements);
if ( !existingMinusNewElements.isNil() ) {
// found a new character class, add to the end (doesn't affect
// outer loop duration due to n computation a priori.
reachableLabels.add(existingMinusNewElements);
}
/*
System.out.println("after collision, " +
"reachableLabels="+reachableLabels.toString());
*/
// anything left to add to the reachableLabels?
remainder = (IntervalSet)label.subtract(s_i);
if ( remainder.isNil() ) {
break; // nothing left to add to set. done!
}
label = remainder;
}
if ( !remainder.isNil() ) {
/*
System.out.println("before add remainder to state "+dfa.decisionNumber+"."+stateNumber+": " +
"reachableLabels="+reachableLabels.toString());
System.out.println("remainder state "+dfa.decisionNumber+"."+stateNumber+": "+remainder.toString(dfa.nfa.grammar));
*/
reachableLabels.add(remainder);
}
/*
System.out.println("#END of add to state "+dfa.decisionNumber+"."+stateNumber+": " +
"reachableLabels="+reachableLabels.toString());
*/
}
}

View File

@ -1,6 +1,7 @@
package org.antlr.v4.automata;
import org.antlr.v4.tool.Grammar;
import org.antlr.v4.tool.Rule;
import java.util.ArrayList;
import java.util.HashSet;
@ -46,7 +47,19 @@ public class DFASerializer {
String getStateString(DFAState s) {
int n = s.stateNumber;
String stateStr = "s"+n;
if ( s.isAcceptState ) stateStr = ":s"+n+"=>"+s.getUniquelyPredictedAlt();
if ( s.isAcceptState ) {
if ( s instanceof LexerState ) {
stateStr = ":s"+n+"=>";
StringBuilder buf = new StringBuilder();
for (Rule r : ((LexerState)s).matchesRules) {
buf.append(" "+r.name);
}
stateStr += buf.toString();
}
else {
stateStr = ":s"+n+"=>"+s.getUniquelyPredictedAlt();
}
}
return stateStr;
}
}

View File

@ -58,8 +58,7 @@ public class DFAState {
new ArrayList<Edge>(INITIAL_NUM_TRANSITIONS);
/** The set of NFA configurations (state,alt,context) for this DFA state */
public OrderedHashSet<NFAConfig> nfaConfigs =
new OrderedHashSet<NFAConfig>();
public OrderedHashSet<NFAConfig> nfaConfigs;
/** Rather than recheck every NFA configuration in a DFA state (after
* resolving) in reach just check this boolean. Saves a linear walk
@ -72,9 +71,14 @@ public class DFAState {
//int cachedUniquelyPredicatedAlt = NFA.INVALID_ALT_NUMBER;
public DFAState(DFA dfa) { this.dfa = dfa; }
public DFAState() {; }
public void addNFAConfig(NFAState s, NFAConfig c) {
public DFAState(DFA dfa) {
this.dfa = dfa;
nfaConfigs = new OrderedHashSet<NFAConfig>();
}
public void addNFAConfig(NFAConfig c) {
if ( nfaConfigs.contains(c) ) return;
nfaConfigs.add(c);
}
@ -85,7 +89,7 @@ public class DFAState {
SemanticContext semanticContext)
{
NFAConfig c = new NFAConfig(state, alt, context, semanticContext);
addNFAConfig(state, c);
addNFAConfig(c);
return c;
}

View File

@ -0,0 +1,10 @@
package org.antlr.v4.automata;
import org.antlr.v4.tool.Grammar;
/** TODO: do we need? */
public class LexerDFA extends DFA {
public LexerDFA(Grammar g, DecisionState startState) {
super(g, startState);
}
}

View File

@ -3,6 +3,7 @@ package org.antlr.v4.automata;
import org.antlr.v4.codegen.Target;
import org.antlr.v4.tool.Grammar;
import org.antlr.v4.tool.GrammarAST;
import org.antlr.v4.tool.Rule;
import org.antlr.v4.tool.TerminalAST;
import org.stringtemplate.v4.misc.Misc;
@ -11,7 +12,20 @@ public class LexerNFAFactory extends ParserNFAFactory {
public LexerNFAFactory(Grammar g) { super(g); }
public NFA createNFA() {
// create s0, start state (must be first)
// implied Tokens rule node
NFAState startState = newState(TokensStartState.class, null);
_createNFA();
// LINK START STATE TO EACH TOKEN RULE
for (Rule r : g.rules.values()) {
if ( !r.isFragment() ) {
RuleStartState s = nfa.ruleToStartState.get(r);
epsilon(startState, s);
}
}
return nfa;
}
@ -44,4 +58,8 @@ public class LexerNFAFactory extends ParserNFAFactory {
return new Handle(left, right);
}
@Override
public Handle tokenRef(TerminalAST node) {
return ruleRef(node);
}
}

View File

@ -0,0 +1,61 @@
package org.antlr.v4.automata;
import org.antlr.v4.misc.OrderedHashSet;
import org.antlr.v4.tool.Rule;
import java.util.HashSet;
import java.util.Set;
/** Lexer DFA states track just NFAStates not config with stack/alt etc... like
* DFA used for prediction.
*/
public class LexerState extends DFAState {
public OrderedHashSet<NFAState> nfaStates;
/** For ambiguous lexer rules, the accept state matches a set of rules,
* not just one. Means we can't use predictsAlt (an int).
*/
public Set<Rule> matchesRules = new HashSet<Rule>();
public LexerState(DFA dfa) {
this.dfa = dfa;
nfaStates = new OrderedHashSet<NFAState>();
}
public Set<NFAState> getUniqueNFAStates() { return nfaStates; }
public Set<Integer> getAltSet() { return null; }
/** Two LexerStates are equal if their NFA state lists are the
* same. Don't test the DFA state numbers here because
* we use to know if any other state exists that has this exact set
* of states. The DFAState state number is irrelevant.
*/
public boolean equals(Object o) {
// compare set of NFA configurations in this set with other
if ( this==o ) return true;
LexerState other = (LexerState)o;
return this.nfaStates.equals(other.nfaStates);
}
public int hashCode() {
int h = 0;
for (NFAState s : nfaStates) h += s.stateNumber;
return h;
}
/** Print all NFA states plus what alts they predict */
public String toString() {
StringBuffer buf = new StringBuffer();
buf.append(stateNumber+":{");
for (int i = 0; i < nfaStates.size(); i++) {
NFAState s = nfaStates.get(i);
if ( i>0 ) {
buf.append(", ");
}
buf.append(s);
}
buf.append("}");
return buf.toString();
}
}

View File

@ -0,0 +1,6 @@
package org.antlr.v4.automata;
/** The Tokens rule start state linking to each lexer rule start state */
public class TokensStartState extends BlockStartState {
public TokensStartState(NFA nfa) { super(nfa); }
}

View File

@ -140,10 +140,13 @@ tokensSection
)
;
rule: ^( RULE name=ID .+)
rule
@init {List<GrammarAST> modifiers = new ArrayList<GrammarAST>();}
: ^( RULE name=ID (^(RULEMODIFIERS (m=. {modifiers.add($m);})+))? .)
{
int numAlts = $RULE.getFirstChildWithType(BLOCK).getChildCount();
Rule r = new Rule(g, $name.text, (GrammarASTWithOptions)$RULE, numAlts);
if ( modifiers.size()>0 ) r.modifiers = modifiers;
rules.add(r);
currentRule = r;
currentAlt = 1;

View File

@ -1,4 +1,4 @@
// $ANTLR ${project.version} ${buildNumber} CollectSymbols.g 2010-02-22 16:10:22
// $ANTLR ${project.version} ${buildNumber} CollectSymbols.g 2010-03-26 16:10:17
/*
[The "BSD license"]
@ -589,20 +589,32 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter {
// $ANTLR start "rule"
// CollectSymbols.g:143:1: rule : ^( RULE name= ID ( . )+ ) ;
// CollectSymbols.g:143:1: rule : ^( RULE name= ID ( ^( RULEMODIFIERS (m= . )+ ) )? . ) ;
public final void rule() throws RecognitionException {
GrammarAST name=null;
GrammarAST RULE7=null;
GrammarAST m=null;
List<GrammarAST> modifiers = new ArrayList<GrammarAST>();
try {
// CollectSymbols.g:143:5: ( ^( RULE name= ID ( . )+ ) )
// CollectSymbols.g:143:9: ^( RULE name= ID ( . )+ )
// CollectSymbols.g:145:2: ( ^( RULE name= ID ( ^( RULEMODIFIERS (m= . )+ ) )? . ) )
// CollectSymbols.g:145:6: ^( RULE name= ID ( ^( RULEMODIFIERS (m= . )+ ) )? . )
{
RULE7=(GrammarAST)match(input,RULE,FOLLOW_RULE_in_rule350); if (state.failed) return ;
RULE7=(GrammarAST)match(input,RULE,FOLLOW_RULE_in_rule357); if (state.failed) return ;
match(input, Token.DOWN, null); if (state.failed) return ;
name=(GrammarAST)match(input,ID,FOLLOW_ID_in_rule354); if (state.failed) return ;
// CollectSymbols.g:143:25: ( . )+
name=(GrammarAST)match(input,ID,FOLLOW_ID_in_rule361); if (state.failed) return ;
// CollectSymbols.g:145:22: ( ^( RULEMODIFIERS (m= . )+ ) )?
int alt5=2;
alt5 = dfa5.predict(input);
switch (alt5) {
case 1 :
// CollectSymbols.g:145:23: ^( RULEMODIFIERS (m= . )+ )
{
match(input,RULEMODIFIERS,FOLLOW_RULEMODIFIERS_in_rule365); if (state.failed) return ;
match(input, Token.DOWN, null); if (state.failed) return ;
// CollectSymbols.g:145:39: (m= . )+
int cnt4=0;
loop4:
do {
@ -612,16 +624,17 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter {
if ( ((LA4_0>=SEMPRED && LA4_0<=ALT_REWRITE)) ) {
alt4=1;
}
else if ( (LA4_0==UP) ) {
alt4=2;
}
switch (alt4) {
case 1 :
// CollectSymbols.g:143:25: .
// CollectSymbols.g:145:40: m= .
{
m=(GrammarAST)input.LT(1);
matchAny(input); if (state.failed) return ;
if ( state.backtracking==1 ) {
modifiers.add(m);
}
}
break;
@ -637,11 +650,21 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter {
} while (true);
match(input, Token.UP, null); if (state.failed) return ;
}
break;
}
matchAny(input); if (state.failed) return ;
match(input, Token.UP, null); if (state.failed) return ;
if ( state.backtracking==1 ) {
int numAlts = RULE7.getFirstChildWithType(BLOCK).getChildCount();
Rule r = new Rule(g, (name!=null?name.getText():null), (GrammarASTWithOptions)RULE7, numAlts);
if ( modifiers.size()>0 ) r.modifiers = modifiers;
rules.add(r);
currentRule = r;
currentAlt = 1;
@ -665,14 +688,14 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter {
};
// $ANTLR start "setAlt"
// CollectSymbols.g:153:1: setAlt : {...}? ( ALT | ALT_REWRITE ) ;
// CollectSymbols.g:156:1: setAlt : {...}? ( ALT | ALT_REWRITE ) ;
public final CollectSymbols.setAlt_return setAlt() throws RecognitionException {
CollectSymbols.setAlt_return retval = new CollectSymbols.setAlt_return();
retval.start = input.LT(1);
try {
// CollectSymbols.g:154:2: ({...}? ( ALT | ALT_REWRITE ) )
// CollectSymbols.g:154:4: {...}? ( ALT | ALT_REWRITE )
// CollectSymbols.g:157:2: ({...}? ( ALT | ALT_REWRITE ) )
// CollectSymbols.g:157:4: {...}? ( ALT | ALT_REWRITE )
{
if ( !((inContext("RULE BLOCK"))) ) {
if (state.backtracking>0) {state.failed=true; return retval;}
@ -707,13 +730,13 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter {
// $ANTLR start "finishRule"
// CollectSymbols.g:158:1: finishRule : RULE ;
// CollectSymbols.g:161:1: finishRule : RULE ;
public final void finishRule() throws RecognitionException {
try {
// CollectSymbols.g:159:2: ( RULE )
// CollectSymbols.g:159:4: RULE
// CollectSymbols.g:162:2: ( RULE )
// CollectSymbols.g:162:4: RULE
{
match(input,RULE,FOLLOW_RULE_in_finishRule403); if (state.failed) return ;
match(input,RULE,FOLLOW_RULE_in_finishRule425); if (state.failed) return ;
if ( state.backtracking==1 ) {
currentRule = null;
}
@ -733,24 +756,24 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter {
// $ANTLR start "ruleNamedAction"
// CollectSymbols.g:162:1: ruleNamedAction : {...}? ^( AT ID ACTION ) ;
// CollectSymbols.g:165:1: ruleNamedAction : {...}? ^( AT ID ACTION ) ;
public final void ruleNamedAction() throws RecognitionException {
GrammarAST ID8=null;
GrammarAST ACTION9=null;
try {
// CollectSymbols.g:163:2: ({...}? ^( AT ID ACTION ) )
// CollectSymbols.g:163:4: {...}? ^( AT ID ACTION )
// CollectSymbols.g:166:2: ({...}? ^( AT ID ACTION ) )
// CollectSymbols.g:166:4: {...}? ^( AT ID ACTION )
{
if ( !((inContext("RULE"))) ) {
if (state.backtracking>0) {state.failed=true; return ;}
throw new FailedPredicateException(input, "ruleNamedAction", "inContext(\"RULE\")");
}
match(input,AT,FOLLOW_AT_in_ruleNamedAction419); if (state.failed) return ;
match(input,AT,FOLLOW_AT_in_ruleNamedAction441); if (state.failed) return ;
match(input, Token.DOWN, null); if (state.failed) return ;
ID8=(GrammarAST)match(input,ID,FOLLOW_ID_in_ruleNamedAction421); if (state.failed) return ;
ACTION9=(GrammarAST)match(input,ACTION,FOLLOW_ACTION_in_ruleNamedAction423); if (state.failed) return ;
ID8=(GrammarAST)match(input,ID,FOLLOW_ID_in_ruleNamedAction443); if (state.failed) return ;
ACTION9=(GrammarAST)match(input,ACTION,FOLLOW_ACTION_in_ruleNamedAction445); if (state.failed) return ;
match(input, Token.UP, null); if (state.failed) return ;
if ( state.backtracking==1 ) {
@ -775,20 +798,20 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter {
// $ANTLR start "ruleAction"
// CollectSymbols.g:170:1: ruleAction : {...}? ACTION ;
// CollectSymbols.g:173:1: ruleAction : {...}? ACTION ;
public final void ruleAction() throws RecognitionException {
GrammarAST ACTION10=null;
try {
// CollectSymbols.g:171:2: ({...}? ACTION )
// CollectSymbols.g:171:4: {...}? ACTION
// CollectSymbols.g:174:2: ({...}? ACTION )
// CollectSymbols.g:174:4: {...}? ACTION
{
if ( !((inContext("RULE ...")&&!inContext("SCOPE")&&
!inContext("CATCH")&&!inContext("FINALLY")&&!inContext("AT"))) ) {
if (state.backtracking>0) {state.failed=true; return ;}
throw new FailedPredicateException(input, "ruleAction", "inContext(\"RULE ...\")&&!inContext(\"SCOPE\")&&\n\t\t !inContext(\"CATCH\")&&!inContext(\"FINALLY\")&&!inContext(\"AT\")");
}
ACTION10=(GrammarAST)match(input,ACTION,FOLLOW_ACTION_in_ruleAction443); if (state.failed) return ;
ACTION10=(GrammarAST)match(input,ACTION,FOLLOW_ACTION_in_ruleAction465); if (state.failed) return ;
if ( state.backtracking==1 ) {
currentRule.alt[currentAlt].actions.add((ActionAST)ACTION10);
@ -811,19 +834,19 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter {
// $ANTLR start "exceptionHandler"
// CollectSymbols.g:180:1: exceptionHandler : ^( CATCH ARG_ACTION ACTION ) ;
// CollectSymbols.g:183:1: exceptionHandler : ^( CATCH ARG_ACTION ACTION ) ;
public final void exceptionHandler() throws RecognitionException {
GrammarAST ACTION11=null;
try {
// CollectSymbols.g:181:2: ( ^( CATCH ARG_ACTION ACTION ) )
// CollectSymbols.g:181:4: ^( CATCH ARG_ACTION ACTION )
// CollectSymbols.g:184:2: ( ^( CATCH ARG_ACTION ACTION ) )
// CollectSymbols.g:184:4: ^( CATCH ARG_ACTION ACTION )
{
match(input,CATCH,FOLLOW_CATCH_in_exceptionHandler459); if (state.failed) return ;
match(input,CATCH,FOLLOW_CATCH_in_exceptionHandler481); if (state.failed) return ;
match(input, Token.DOWN, null); if (state.failed) return ;
match(input,ARG_ACTION,FOLLOW_ARG_ACTION_in_exceptionHandler461); if (state.failed) return ;
ACTION11=(GrammarAST)match(input,ACTION,FOLLOW_ACTION_in_exceptionHandler463); if (state.failed) return ;
match(input,ARG_ACTION,FOLLOW_ARG_ACTION_in_exceptionHandler483); if (state.failed) return ;
ACTION11=(GrammarAST)match(input,ACTION,FOLLOW_ACTION_in_exceptionHandler485); if (state.failed) return ;
match(input, Token.UP, null); if (state.failed) return ;
if ( state.backtracking==1 ) {
@ -848,18 +871,18 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter {
// $ANTLR start "finallyClause"
// CollectSymbols.g:188:1: finallyClause : ^( FINALLY ACTION ) ;
// CollectSymbols.g:191:1: finallyClause : ^( FINALLY ACTION ) ;
public final void finallyClause() throws RecognitionException {
GrammarAST ACTION12=null;
try {
// CollectSymbols.g:189:2: ( ^( FINALLY ACTION ) )
// CollectSymbols.g:189:4: ^( FINALLY ACTION )
// CollectSymbols.g:192:2: ( ^( FINALLY ACTION ) )
// CollectSymbols.g:192:4: ^( FINALLY ACTION )
{
match(input,FINALLY,FOLLOW_FINALLY_in_finallyClause480); if (state.failed) return ;
match(input,FINALLY,FOLLOW_FINALLY_in_finallyClause502); if (state.failed) return ;
match(input, Token.DOWN, null); if (state.failed) return ;
ACTION12=(GrammarAST)match(input,ACTION,FOLLOW_ACTION_in_finallyClause482); if (state.failed) return ;
ACTION12=(GrammarAST)match(input,ACTION,FOLLOW_ACTION_in_finallyClause504); if (state.failed) return ;
match(input, Token.UP, null); if (state.failed) return ;
if ( state.backtracking==1 ) {
@ -884,19 +907,19 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter {
// $ANTLR start "ruleArg"
// CollectSymbols.g:196:1: ruleArg : {...}? ARG_ACTION ;
// CollectSymbols.g:199:1: ruleArg : {...}? ARG_ACTION ;
public final void ruleArg() throws RecognitionException {
GrammarAST ARG_ACTION13=null;
try {
// CollectSymbols.g:197:2: ({...}? ARG_ACTION )
// CollectSymbols.g:197:4: {...}? ARG_ACTION
// CollectSymbols.g:200:2: ({...}? ARG_ACTION )
// CollectSymbols.g:200:4: {...}? ARG_ACTION
{
if ( !((inContext("RULE"))) ) {
if (state.backtracking>0) {state.failed=true; return ;}
throw new FailedPredicateException(input, "ruleArg", "inContext(\"RULE\")");
}
ARG_ACTION13=(GrammarAST)match(input,ARG_ACTION,FOLLOW_ARG_ACTION_in_ruleArg502); if (state.failed) return ;
ARG_ACTION13=(GrammarAST)match(input,ARG_ACTION,FOLLOW_ARG_ACTION_in_ruleArg524); if (state.failed) return ;
if ( state.backtracking==1 ) {
currentRule.args = ScopeParser.parseTypeList((ARG_ACTION13!=null?ARG_ACTION13.getText():null));
@ -919,18 +942,18 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter {
// $ANTLR start "ruleReturns"
// CollectSymbols.g:204:1: ruleReturns : ^( RETURNS ARG_ACTION ) ;
// CollectSymbols.g:207:1: ruleReturns : ^( RETURNS ARG_ACTION ) ;
public final void ruleReturns() throws RecognitionException {
GrammarAST ARG_ACTION14=null;
try {
// CollectSymbols.g:205:2: ( ^( RETURNS ARG_ACTION ) )
// CollectSymbols.g:205:4: ^( RETURNS ARG_ACTION )
// CollectSymbols.g:208:2: ( ^( RETURNS ARG_ACTION ) )
// CollectSymbols.g:208:4: ^( RETURNS ARG_ACTION )
{
match(input,RETURNS,FOLLOW_RETURNS_in_ruleReturns519); if (state.failed) return ;
match(input,RETURNS,FOLLOW_RETURNS_in_ruleReturns541); if (state.failed) return ;
match(input, Token.DOWN, null); if (state.failed) return ;
ARG_ACTION14=(GrammarAST)match(input,ARG_ACTION,FOLLOW_ARG_ACTION_in_ruleReturns521); if (state.failed) return ;
ARG_ACTION14=(GrammarAST)match(input,ARG_ACTION,FOLLOW_ARG_ACTION_in_ruleReturns543); if (state.failed) return ;
match(input, Token.UP, null); if (state.failed) return ;
if ( state.backtracking==1 ) {
@ -955,40 +978,40 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter {
// $ANTLR start "ruleScopeSpec"
// CollectSymbols.g:212:1: ruleScopeSpec : {...}? ( ^( SCOPE ACTION ) | ^( SCOPE (ids+= ID )+ ) ) ;
// CollectSymbols.g:215:1: ruleScopeSpec : {...}? ( ^( SCOPE ACTION ) | ^( SCOPE (ids+= ID )+ ) ) ;
public final void ruleScopeSpec() throws RecognitionException {
GrammarAST ACTION15=null;
GrammarAST ids=null;
List list_ids=null;
try {
// CollectSymbols.g:213:2: ({...}? ( ^( SCOPE ACTION ) | ^( SCOPE (ids+= ID )+ ) ) )
// CollectSymbols.g:213:4: {...}? ( ^( SCOPE ACTION ) | ^( SCOPE (ids+= ID )+ ) )
// CollectSymbols.g:216:2: ({...}? ( ^( SCOPE ACTION ) | ^( SCOPE (ids+= ID )+ ) ) )
// CollectSymbols.g:216:4: {...}? ( ^( SCOPE ACTION ) | ^( SCOPE (ids+= ID )+ ) )
{
if ( !((inContext("RULE"))) ) {
if (state.backtracking>0) {state.failed=true; return ;}
throw new FailedPredicateException(input, "ruleScopeSpec", "inContext(\"RULE\")");
}
// CollectSymbols.g:214:3: ( ^( SCOPE ACTION ) | ^( SCOPE (ids+= ID )+ ) )
int alt6=2;
int LA6_0 = input.LA(1);
// CollectSymbols.g:217:3: ( ^( SCOPE ACTION ) | ^( SCOPE (ids+= ID )+ ) )
int alt7=2;
int LA7_0 = input.LA(1);
if ( (LA6_0==SCOPE) ) {
int LA6_1 = input.LA(2);
if ( (LA7_0==SCOPE) ) {
int LA7_1 = input.LA(2);
if ( (LA6_1==DOWN) ) {
int LA6_2 = input.LA(3);
if ( (LA7_1==DOWN) ) {
int LA7_2 = input.LA(3);
if ( (LA6_2==ACTION) ) {
alt6=1;
if ( (LA7_2==ACTION) ) {
alt7=1;
}
else if ( (LA6_2==ID) ) {
alt6=2;
else if ( (LA7_2==ID) ) {
alt7=2;
}
else {
if (state.backtracking>0) {state.failed=true; return ;}
NoViableAltException nvae =
new NoViableAltException("", 6, 2, input);
new NoViableAltException("", 7, 2, input);
throw nvae;
}
@ -996,7 +1019,7 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter {
else {
if (state.backtracking>0) {state.failed=true; return ;}
NoViableAltException nvae =
new NoViableAltException("", 6, 1, input);
new NoViableAltException("", 7, 1, input);
throw nvae;
}
@ -1004,18 +1027,18 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter {
else {
if (state.backtracking>0) {state.failed=true; return ;}
NoViableAltException nvae =
new NoViableAltException("", 6, 0, input);
new NoViableAltException("", 7, 0, input);
throw nvae;
}
switch (alt6) {
switch (alt7) {
case 1 :
// CollectSymbols.g:214:5: ^( SCOPE ACTION )
// CollectSymbols.g:217:5: ^( SCOPE ACTION )
{
match(input,SCOPE,FOLLOW_SCOPE_in_ruleScopeSpec544); if (state.failed) return ;
match(input,SCOPE,FOLLOW_SCOPE_in_ruleScopeSpec566); if (state.failed) return ;
match(input, Token.DOWN, null); if (state.failed) return ;
ACTION15=(GrammarAST)match(input,ACTION,FOLLOW_ACTION_in_ruleScopeSpec546); if (state.failed) return ;
ACTION15=(GrammarAST)match(input,ACTION,FOLLOW_ACTION_in_ruleScopeSpec568); if (state.failed) return ;
match(input, Token.UP, null); if (state.failed) return ;
if ( state.backtracking==1 ) {
@ -1029,28 +1052,28 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter {
}
break;
case 2 :
// CollectSymbols.g:220:5: ^( SCOPE (ids+= ID )+ )
// CollectSymbols.g:223:5: ^( SCOPE (ids+= ID )+ )
{
match(input,SCOPE,FOLLOW_SCOPE_in_ruleScopeSpec559); if (state.failed) return ;
match(input,SCOPE,FOLLOW_SCOPE_in_ruleScopeSpec581); if (state.failed) return ;
match(input, Token.DOWN, null); if (state.failed) return ;
// CollectSymbols.g:220:16: (ids+= ID )+
int cnt5=0;
loop5:
// CollectSymbols.g:223:16: (ids+= ID )+
int cnt6=0;
loop6:
do {
int alt5=2;
int LA5_0 = input.LA(1);
int alt6=2;
int LA6_0 = input.LA(1);
if ( (LA5_0==ID) ) {
alt5=1;
if ( (LA6_0==ID) ) {
alt6=1;
}
switch (alt5) {
switch (alt6) {
case 1 :
// CollectSymbols.g:220:16: ids+= ID
// CollectSymbols.g:223:16: ids+= ID
{
ids=(GrammarAST)match(input,ID,FOLLOW_ID_in_ruleScopeSpec563); if (state.failed) return ;
ids=(GrammarAST)match(input,ID,FOLLOW_ID_in_ruleScopeSpec585); if (state.failed) return ;
if (list_ids==null) list_ids=new ArrayList();
list_ids.add(ids);
@ -1059,13 +1082,13 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter {
break;
default :
if ( cnt5 >= 1 ) break loop5;
if ( cnt6 >= 1 ) break loop6;
if (state.backtracking>0) {state.failed=true; return ;}
EarlyExitException eee =
new EarlyExitException(5, input);
new EarlyExitException(6, input);
throw eee;
}
cnt5++;
cnt6++;
} while (true);
@ -1097,14 +1120,14 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter {
};
// $ANTLR start "rewriteElement"
// CollectSymbols.g:224:1: rewriteElement : {...}? ( TOKEN_REF | RULE_REF | STRING_LITERAL | LABEL ) ;
// CollectSymbols.g:227:1: rewriteElement : {...}? ( TOKEN_REF | RULE_REF | STRING_LITERAL | LABEL ) ;
public final CollectSymbols.rewriteElement_return rewriteElement() throws RecognitionException {
CollectSymbols.rewriteElement_return retval = new CollectSymbols.rewriteElement_return();
retval.start = input.LT(1);
try {
// CollectSymbols.g:226:2: ({...}? ( TOKEN_REF | RULE_REF | STRING_LITERAL | LABEL ) )
// CollectSymbols.g:227:6: {...}? ( TOKEN_REF | RULE_REF | STRING_LITERAL | LABEL )
// CollectSymbols.g:229:2: ({...}? ( TOKEN_REF | RULE_REF | STRING_LITERAL | LABEL ) )
// CollectSymbols.g:230:6: {...}? ( TOKEN_REF | RULE_REF | STRING_LITERAL | LABEL )
{
if ( !((inContext("RESULT ..."))) ) {
if (state.backtracking>0) {state.failed=true; return retval;}
@ -1141,7 +1164,7 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter {
};
// $ANTLR start "labeledElement"
// CollectSymbols.g:231:1: labeledElement : {...}? ( ^( ASSIGN id= ID e= . ) | ^( PLUS_ASSIGN id= ID e= . ) ) ;
// CollectSymbols.g:234:1: labeledElement : {...}? ( ^( ASSIGN id= ID e= . ) | ^( PLUS_ASSIGN id= ID e= . ) ) ;
public final CollectSymbols.labeledElement_return labeledElement() throws RecognitionException {
CollectSymbols.labeledElement_return retval = new CollectSymbols.labeledElement_return();
retval.start = input.LT(1);
@ -1150,38 +1173,38 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter {
GrammarAST e=null;
try {
// CollectSymbols.g:237:2: ({...}? ( ^( ASSIGN id= ID e= . ) | ^( PLUS_ASSIGN id= ID e= . ) ) )
// CollectSymbols.g:237:4: {...}? ( ^( ASSIGN id= ID e= . ) | ^( PLUS_ASSIGN id= ID e= . ) )
// CollectSymbols.g:240:2: ({...}? ( ^( ASSIGN id= ID e= . ) | ^( PLUS_ASSIGN id= ID e= . ) ) )
// CollectSymbols.g:240:4: {...}? ( ^( ASSIGN id= ID e= . ) | ^( PLUS_ASSIGN id= ID e= . ) )
{
if ( !((inContext("RULE ..."))) ) {
if (state.backtracking>0) {state.failed=true; return retval;}
throw new FailedPredicateException(input, "labeledElement", "inContext(\"RULE ...\")");
}
// CollectSymbols.g:238:3: ( ^( ASSIGN id= ID e= . ) | ^( PLUS_ASSIGN id= ID e= . ) )
int alt7=2;
int LA7_0 = input.LA(1);
// CollectSymbols.g:241:3: ( ^( ASSIGN id= ID e= . ) | ^( PLUS_ASSIGN id= ID e= . ) )
int alt8=2;
int LA8_0 = input.LA(1);
if ( (LA7_0==ASSIGN) ) {
alt7=1;
if ( (LA8_0==ASSIGN) ) {
alt8=1;
}
else if ( (LA7_0==PLUS_ASSIGN) ) {
alt7=2;
else if ( (LA8_0==PLUS_ASSIGN) ) {
alt8=2;
}
else {
if (state.backtracking>0) {state.failed=true; return retval;}
NoViableAltException nvae =
new NoViableAltException("", 7, 0, input);
new NoViableAltException("", 8, 0, input);
throw nvae;
}
switch (alt7) {
switch (alt8) {
case 1 :
// CollectSymbols.g:238:5: ^( ASSIGN id= ID e= . )
// CollectSymbols.g:241:5: ^( ASSIGN id= ID e= . )
{
match(input,ASSIGN,FOLLOW_ASSIGN_in_labeledElement627); if (state.failed) return retval;
match(input,ASSIGN,FOLLOW_ASSIGN_in_labeledElement649); if (state.failed) return retval;
match(input, Token.DOWN, null); if (state.failed) return retval;
id=(GrammarAST)match(input,ID,FOLLOW_ID_in_labeledElement631); if (state.failed) return retval;
id=(GrammarAST)match(input,ID,FOLLOW_ID_in_labeledElement653); if (state.failed) return retval;
e=(GrammarAST)input.LT(1);
matchAny(input); if (state.failed) return retval;
@ -1190,12 +1213,12 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter {
}
break;
case 2 :
// CollectSymbols.g:239:5: ^( PLUS_ASSIGN id= ID e= . )
// CollectSymbols.g:242:5: ^( PLUS_ASSIGN id= ID e= . )
{
match(input,PLUS_ASSIGN,FOLLOW_PLUS_ASSIGN_in_labeledElement643); if (state.failed) return retval;
match(input,PLUS_ASSIGN,FOLLOW_PLUS_ASSIGN_in_labeledElement665); if (state.failed) return retval;
match(input, Token.DOWN, null); if (state.failed) return retval;
id=(GrammarAST)match(input,ID,FOLLOW_ID_in_labeledElement647); if (state.failed) return retval;
id=(GrammarAST)match(input,ID,FOLLOW_ID_in_labeledElement669); if (state.failed) return retval;
e=(GrammarAST)input.LT(1);
matchAny(input); if (state.failed) return retval;
@ -1231,7 +1254,7 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter {
};
// $ANTLR start "terminal"
// CollectSymbols.g:243:1: terminal : ({...}? STRING_LITERAL | TOKEN_REF );
// CollectSymbols.g:246:1: terminal : ({...}? STRING_LITERAL | TOKEN_REF );
public final CollectSymbols.terminal_return terminal() throws RecognitionException {
CollectSymbols.terminal_return retval = new CollectSymbols.terminal_return();
retval.start = input.LT(1);
@ -1240,32 +1263,32 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter {
GrammarAST TOKEN_REF17=null;
try {
// CollectSymbols.g:244:5: ({...}? STRING_LITERAL | TOKEN_REF )
int alt8=2;
int LA8_0 = input.LA(1);
// CollectSymbols.g:247:5: ({...}? STRING_LITERAL | TOKEN_REF )
int alt9=2;
int LA9_0 = input.LA(1);
if ( (LA8_0==STRING_LITERAL) ) {
alt8=1;
if ( (LA9_0==STRING_LITERAL) ) {
alt9=1;
}
else if ( (LA8_0==TOKEN_REF) ) {
alt8=2;
else if ( (LA9_0==TOKEN_REF) ) {
alt9=2;
}
else {
if (state.backtracking>0) {state.failed=true; return retval;}
NoViableAltException nvae =
new NoViableAltException("", 8, 0, input);
new NoViableAltException("", 9, 0, input);
throw nvae;
}
switch (alt8) {
switch (alt9) {
case 1 :
// CollectSymbols.g:244:7: {...}? STRING_LITERAL
// CollectSymbols.g:247:7: {...}? STRING_LITERAL
{
if ( !((!inContext("TOKENS ASSIGN"))) ) {
if (state.backtracking>0) {state.failed=true; return retval;}
throw new FailedPredicateException(input, "terminal", "!inContext(\"TOKENS ASSIGN\")");
}
STRING_LITERAL16=(GrammarAST)match(input,STRING_LITERAL,FOLLOW_STRING_LITERAL_in_terminal673); if (state.failed) return retval;
STRING_LITERAL16=(GrammarAST)match(input,STRING_LITERAL,FOLLOW_STRING_LITERAL_in_terminal695); if (state.failed) return retval;
if ( state.backtracking==1 ) {
terminals.add(((GrammarAST)retval.start));
@ -1279,9 +1302,9 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter {
}
break;
case 2 :
// CollectSymbols.g:252:7: TOKEN_REF
// CollectSymbols.g:255:7: TOKEN_REF
{
TOKEN_REF17=(GrammarAST)match(input,TOKEN_REF,FOLLOW_TOKEN_REF_in_terminal688); if (state.failed) return retval;
TOKEN_REF17=(GrammarAST)match(input,TOKEN_REF,FOLLOW_TOKEN_REF_in_terminal710); if (state.failed) return retval;
if ( state.backtracking==1 ) {
terminals.add(TOKEN_REF17);
@ -1309,31 +1332,31 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter {
// $ANTLR start "ruleref"
// CollectSymbols.g:262:1: ruleref : ({...}?r= RULE_REF | r= RULE_REF ) ;
// CollectSymbols.g:265:1: ruleref : ({...}?r= RULE_REF | r= RULE_REF ) ;
public final void ruleref() throws RecognitionException {
GrammarAST r=null;
try {
// CollectSymbols.g:264:5: ( ({...}?r= RULE_REF | r= RULE_REF ) )
// CollectSymbols.g:264:7: ({...}?r= RULE_REF | r= RULE_REF )
// CollectSymbols.g:267:5: ( ({...}?r= RULE_REF | r= RULE_REF ) )
// CollectSymbols.g:267:7: ({...}?r= RULE_REF | r= RULE_REF )
{
// CollectSymbols.g:264:7: ({...}?r= RULE_REF | r= RULE_REF )
int alt9=2;
int LA9_0 = input.LA(1);
// CollectSymbols.g:267:7: ({...}?r= RULE_REF | r= RULE_REF )
int alt10=2;
int LA10_0 = input.LA(1);
if ( (LA9_0==RULE_REF) ) {
int LA9_1 = input.LA(2);
if ( (LA10_0==RULE_REF) ) {
int LA10_1 = input.LA(2);
if ( ((inContext("DOT ..."))) ) {
alt9=1;
alt10=1;
}
else if ( (true) ) {
alt9=2;
alt10=2;
}
else {
if (state.backtracking>0) {state.failed=true; return ;}
NoViableAltException nvae =
new NoViableAltException("", 9, 1, input);
new NoViableAltException("", 10, 1, input);
throw nvae;
}
@ -1341,19 +1364,19 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter {
else {
if (state.backtracking>0) {state.failed=true; return ;}
NoViableAltException nvae =
new NoViableAltException("", 9, 0, input);
new NoViableAltException("", 10, 0, input);
throw nvae;
}
switch (alt9) {
switch (alt10) {
case 1 :
// CollectSymbols.g:264:9: {...}?r= RULE_REF
// CollectSymbols.g:267:9: {...}?r= RULE_REF
{
if ( !((inContext("DOT ..."))) ) {
if (state.backtracking>0) {state.failed=true; return ;}
throw new FailedPredicateException(input, "ruleref", "inContext(\"DOT ...\")");
}
r=(GrammarAST)match(input,RULE_REF,FOLLOW_RULE_REF_in_ruleref725); if (state.failed) return ;
r=(GrammarAST)match(input,RULE_REF,FOLLOW_RULE_REF_in_ruleref747); if (state.failed) return ;
if ( state.backtracking==1 ) {
qualifiedRulerefs.add((GrammarAST)r.getParent());
}
@ -1361,9 +1384,9 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter {
}
break;
case 2 :
// CollectSymbols.g:266:8: r= RULE_REF
// CollectSymbols.g:269:8: r= RULE_REF
{
r=(GrammarAST)match(input,RULE_REF,FOLLOW_RULE_REF_in_ruleref738); if (state.failed) return ;
r=(GrammarAST)match(input,RULE_REF,FOLLOW_RULE_REF_in_ruleref760); if (state.failed) return ;
}
break;
@ -1396,6 +1419,7 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter {
protected DFA1 dfa1 = new DFA1(this);
protected DFA5 dfa5 = new DFA5(this);
static final String DFA1_eotS =
"\41\uffff";
static final String DFA1_eofS =
@ -1411,7 +1435,7 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter {
"\uffff\1\11\1\13\1\uffff\1\10\2\uffff\1\1\1\uffff\1\2\3\uffff\1"+
"\7";
static final String DFA1_specialS =
"\10\uffff\1\3\1\1\1\2\23\uffff\1\0\1\4\1\uffff}>";
"\10\uffff\1\4\1\3\1\0\23\uffff\1\1\1\2\1\uffff}>";
static final String[] DFA1_transitionS = {
"\1\6\1\uffff\1\16\4\uffff\1\1\11\uffff\1\7\1\uffff\1\20\1\17"+
"\12\uffff\1\3\4\uffff\1\14\10\uffff\1\2\2\uffff\1\12\1\10\3"+
@ -1488,36 +1512,6 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter {
int _s = s;
switch ( s ) {
case 0 :
int LA1_30 = input.LA(1);
int index1_30 = input.index();
input.rewind();
s = -1;
if ( ((inContext("GRAMMAR"))) ) {s = 28;}
else if ( ((inContext("RULE"))) ) {s = 32;}
input.seek(index1_30);
if ( s>=0 ) return s;
break;
case 1 :
int LA1_9 = input.LA(1);
int index1_9 = input.index();
input.rewind();
s = -1;
if ( ((inContext("RESULT ..."))) ) {s = 11;}
else if ( ((!inContext("TOKENS ASSIGN"))) ) {s = 21;}
input.seek(index1_9);
if ( s>=0 ) return s;
break;
case 2 :
int LA1_10 = input.LA(1);
@ -1532,22 +1526,22 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter {
input.seek(index1_10);
if ( s>=0 ) return s;
break;
case 3 :
int LA1_8 = input.LA(1);
case 1 :
int LA1_30 = input.LA(1);
int index1_8 = input.index();
int index1_30 = input.index();
input.rewind();
s = -1;
if ( (!(((inContext("RESULT ..."))))) ) {s = 20;}
if ( ((inContext("GRAMMAR"))) ) {s = 28;}
else if ( ((inContext("RESULT ..."))) ) {s = 11;}
else if ( ((inContext("RULE"))) ) {s = 32;}
input.seek(index1_8);
input.seek(index1_30);
if ( s>=0 ) return s;
break;
case 4 :
case 2 :
int LA1_31 = input.LA(1);
@ -1562,6 +1556,36 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter {
input.seek(index1_31);
if ( s>=0 ) return s;
break;
case 3 :
int LA1_9 = input.LA(1);
int index1_9 = input.index();
input.rewind();
s = -1;
if ( ((inContext("RESULT ..."))) ) {s = 11;}
else if ( ((!inContext("TOKENS ASSIGN"))) ) {s = 21;}
input.seek(index1_9);
if ( s>=0 ) return s;
break;
case 4 :
int LA1_8 = input.LA(1);
int index1_8 = input.index();
input.rewind();
s = -1;
if ( (!(((inContext("RESULT ..."))))) ) {s = 20;}
else if ( ((inContext("RESULT ..."))) ) {s = 11;}
input.seek(index1_8);
if ( s>=0 ) return s;
break;
}
if (state.backtracking>0) {state.failed=true; return -1;}
NoViableAltException nvae =
@ -1570,6 +1594,61 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter {
throw nvae;
}
}
static final String DFA5_eotS =
"\7\uffff";
static final String DFA5_eofS =
"\7\uffff";
static final String DFA5_minS =
"\1\4\1\2\1\uffff\1\4\1\2\1\3\1\uffff";
static final String DFA5_maxS =
"\1\145\1\3\1\uffff\3\145\1\uffff";
static final String DFA5_acceptS =
"\2\uffff\1\2\3\uffff\1\1";
static final String DFA5_specialS =
"\7\uffff}>";
static final String[] DFA5_transitionS = {
"\106\2\1\1\33\2",
"\1\3\1\2",
"",
"\142\4",
"\1\6\1\5\142\4",
"\1\2\142\6",
""
};
static final short[] DFA5_eot = DFA.unpackEncodedString(DFA5_eotS);
static final short[] DFA5_eof = DFA.unpackEncodedString(DFA5_eofS);
static final char[] DFA5_min = DFA.unpackEncodedStringToUnsignedChars(DFA5_minS);
static final char[] DFA5_max = DFA.unpackEncodedStringToUnsignedChars(DFA5_maxS);
static final short[] DFA5_accept = DFA.unpackEncodedString(DFA5_acceptS);
static final short[] DFA5_special = DFA.unpackEncodedString(DFA5_specialS);
static final short[][] DFA5_transition;
static {
int numStates = DFA5_transitionS.length;
DFA5_transition = new short[numStates][];
for (int i=0; i<numStates; i++) {
DFA5_transition[i] = DFA.unpackEncodedString(DFA5_transitionS[i]);
}
}
class DFA5 extends DFA {
public DFA5(BaseRecognizer recognizer) {
this.recognizer = recognizer;
this.decisionNumber = 5;
this.eot = DFA5_eot;
this.eof = DFA5_eof;
this.min = DFA5_min;
this.max = DFA5_max;
this.accept = DFA5_accept;
this.special = DFA5_special;
this.transition = DFA5_transition;
}
public String getDescription() {
return "145:22: ( ^( RULEMODIFIERS (m= . )+ ) )?";
}
}
public static final BitSet FOLLOW_globalScope_in_topdown97 = new BitSet(new long[]{0x0000000000000002L});
@ -1600,34 +1679,35 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter {
public static final BitSet FOLLOW_ID_in_tokensSection312 = new BitSet(new long[]{0x0000000000000000L,0x0000000000000008L});
public static final BitSet FOLLOW_STRING_LITERAL_in_tokensSection314 = new BitSet(new long[]{0x0000000000000008L});
public static final BitSet FOLLOW_ID_in_tokensSection328 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_RULE_in_rule350 = new BitSet(new long[]{0x0000000000000004L});
public static final BitSet FOLLOW_ID_in_rule354 = new BitSet(new long[]{0xFFFFFFFFFFFFFFF0L,0x0000003FFFFFFFFFL});
public static final BitSet FOLLOW_set_in_setAlt379 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_RULE_in_finishRule403 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_AT_in_ruleNamedAction419 = new BitSet(new long[]{0x0000000000000004L});
public static final BitSet FOLLOW_ID_in_ruleNamedAction421 = new BitSet(new long[]{0x0000000000010000L});
public static final BitSet FOLLOW_ACTION_in_ruleNamedAction423 = new BitSet(new long[]{0x0000000000000008L});
public static final BitSet FOLLOW_ACTION_in_ruleAction443 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_CATCH_in_exceptionHandler459 = new BitSet(new long[]{0x0000000000000004L});
public static final BitSet FOLLOW_ARG_ACTION_in_exceptionHandler461 = new BitSet(new long[]{0x0000000000010000L});
public static final BitSet FOLLOW_ACTION_in_exceptionHandler463 = new BitSet(new long[]{0x0000000000000008L});
public static final BitSet FOLLOW_FINALLY_in_finallyClause480 = new BitSet(new long[]{0x0000000000000004L});
public static final BitSet FOLLOW_ACTION_in_finallyClause482 = new BitSet(new long[]{0x0000000000000008L});
public static final BitSet FOLLOW_ARG_ACTION_in_ruleArg502 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_RETURNS_in_ruleReturns519 = new BitSet(new long[]{0x0000000000000004L});
public static final BitSet FOLLOW_ARG_ACTION_in_ruleReturns521 = new BitSet(new long[]{0x0000000000000008L});
public static final BitSet FOLLOW_SCOPE_in_ruleScopeSpec544 = new BitSet(new long[]{0x0000000000000004L});
public static final BitSet FOLLOW_ACTION_in_ruleScopeSpec546 = new BitSet(new long[]{0x0000000000000008L});
public static final BitSet FOLLOW_SCOPE_in_ruleScopeSpec559 = new BitSet(new long[]{0x0000000000000004L});
public static final BitSet FOLLOW_ID_in_ruleScopeSpec563 = new BitSet(new long[]{0x0000000000000008L,0x0000000000400000L});
public static final BitSet FOLLOW_set_in_rewriteElement591 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_ASSIGN_in_labeledElement627 = new BitSet(new long[]{0x0000000000000004L});
public static final BitSet FOLLOW_ID_in_labeledElement631 = new BitSet(new long[]{0xFFFFFFFFFFFFFFF0L,0x0000003FFFFFFFFFL});
public static final BitSet FOLLOW_PLUS_ASSIGN_in_labeledElement643 = new BitSet(new long[]{0x0000000000000004L});
public static final BitSet FOLLOW_ID_in_labeledElement647 = new BitSet(new long[]{0xFFFFFFFFFFFFFFF0L,0x0000003FFFFFFFFFL});
public static final BitSet FOLLOW_STRING_LITERAL_in_terminal673 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_TOKEN_REF_in_terminal688 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_RULE_REF_in_ruleref725 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_RULE_REF_in_ruleref738 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_RULE_in_rule357 = new BitSet(new long[]{0x0000000000000004L});
public static final BitSet FOLLOW_ID_in_rule361 = new BitSet(new long[]{0xFFFFFFFFFFFFFFF0L,0x0000003FFFFFFFFFL});
public static final BitSet FOLLOW_RULEMODIFIERS_in_rule365 = new BitSet(new long[]{0x0000000000000004L});
public static final BitSet FOLLOW_set_in_setAlt401 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_RULE_in_finishRule425 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_AT_in_ruleNamedAction441 = new BitSet(new long[]{0x0000000000000004L});
public static final BitSet FOLLOW_ID_in_ruleNamedAction443 = new BitSet(new long[]{0x0000000000010000L});
public static final BitSet FOLLOW_ACTION_in_ruleNamedAction445 = new BitSet(new long[]{0x0000000000000008L});
public static final BitSet FOLLOW_ACTION_in_ruleAction465 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_CATCH_in_exceptionHandler481 = new BitSet(new long[]{0x0000000000000004L});
public static final BitSet FOLLOW_ARG_ACTION_in_exceptionHandler483 = new BitSet(new long[]{0x0000000000010000L});
public static final BitSet FOLLOW_ACTION_in_exceptionHandler485 = new BitSet(new long[]{0x0000000000000008L});
public static final BitSet FOLLOW_FINALLY_in_finallyClause502 = new BitSet(new long[]{0x0000000000000004L});
public static final BitSet FOLLOW_ACTION_in_finallyClause504 = new BitSet(new long[]{0x0000000000000008L});
public static final BitSet FOLLOW_ARG_ACTION_in_ruleArg524 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_RETURNS_in_ruleReturns541 = new BitSet(new long[]{0x0000000000000004L});
public static final BitSet FOLLOW_ARG_ACTION_in_ruleReturns543 = new BitSet(new long[]{0x0000000000000008L});
public static final BitSet FOLLOW_SCOPE_in_ruleScopeSpec566 = new BitSet(new long[]{0x0000000000000004L});
public static final BitSet FOLLOW_ACTION_in_ruleScopeSpec568 = new BitSet(new long[]{0x0000000000000008L});
public static final BitSet FOLLOW_SCOPE_in_ruleScopeSpec581 = new BitSet(new long[]{0x0000000000000004L});
public static final BitSet FOLLOW_ID_in_ruleScopeSpec585 = new BitSet(new long[]{0x0000000000000008L,0x0000000000400000L});
public static final BitSet FOLLOW_set_in_rewriteElement613 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_ASSIGN_in_labeledElement649 = new BitSet(new long[]{0x0000000000000004L});
public static final BitSet FOLLOW_ID_in_labeledElement653 = new BitSet(new long[]{0xFFFFFFFFFFFFFFF0L,0x0000003FFFFFFFFFL});
public static final BitSet FOLLOW_PLUS_ASSIGN_in_labeledElement665 = new BitSet(new long[]{0x0000000000000004L});
public static final BitSet FOLLOW_ID_in_labeledElement669 = new BitSet(new long[]{0xFFFFFFFFFFFFFFF0L,0x0000003FFFFFFFFFL});
public static final BitSet FOLLOW_STRING_LITERAL_in_terminal695 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_TOKEN_REF_in_terminal710 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_RULE_REF_in_ruleref747 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_RULE_REF_in_ruleref760 = new BitSet(new long[]{0x0000000000000002L});
}

View File

@ -318,11 +318,23 @@ public class DOTGenerator {
buf.append('s');
buf.append(s.stateNumber);
if ( s.isAcceptState ) {
if ( s instanceof LexerState ) {
buf.append("=>");
for (Rule r : ((LexerState)s).matchesRules) {
buf.append(" "+r.name);
}
}
else {
buf.append("=>"+s.getUniquelyPredictedAlt());
}
}
if ( Tool.internalOption_ShowNFAConfigsInDFA ) {
Set<Integer> alts = ((DFAState)s).getAltSet();
if ( alts!=null ) {
if ( s instanceof LexerState ) {
buf.append("\\n");
buf.append( ((LexerState)s).nfaStates.toString() );
}
else if ( alts!=null ) {
buf.append("\\n");
// separate alts
List<Integer> altList = new ArrayList<Integer>();

View File

@ -50,6 +50,7 @@ public class Grammar implements AttributeResolver {
public Grammar parent;
public List<Grammar> importedGrammars;
public Map<String, Rule> rules = new LinkedHashMap<String, Rule>();
int ruleNumber = 1;
/** The NFA that represents the grammar with edges labelled with tokens
* or epsilon. It is more suitable to analysis than an AST representation.
@ -211,7 +212,10 @@ public class Grammar implements AttributeResolver {
}
}
public void defineRule(Rule r) { rules.put(r.name, r); }
public void defineRule(Rule r) {
rules.put(r.name, r);
r.index = ruleNumber++;
}
public Rule getRule(String name) {
Rule r = rules.get(name);

View File

@ -44,6 +44,11 @@ public class GrammarAST extends CommonTree {
return nodes;
}
// @Override
// public boolean equals(Object obj) {
// return super.equals(obj);
// }
@Override
public Tree dupNode() {
return new GrammarAST(this);

View File

@ -40,6 +40,8 @@ public class Rule implements AttributeResolver {
}};
public String name;
public List<GrammarAST> modifiers;
public GrammarASTWithOptions ast;
public AttributeDict args;
public AttributeDict retvals;
@ -70,6 +72,9 @@ public class Rule implements AttributeResolver {
public Alternative[] alt;
/** All rules have unique index 1..n */
public int index;
public Rule(Grammar g, String name, GrammarASTWithOptions ast, int numberOfAlts) {
this.g = g;
this.name = name;
@ -201,6 +206,14 @@ public class Rule implements AttributeResolver {
return Grammar.grammarAndLabelRefTypeToScope.get(grammarLabelKey);
}
public boolean isFragment() {
if ( modifiers==null ) return false;
for (GrammarAST a : modifiers) {
if ( a.getText().equals("fragment") ) return true;
}
return false;
}
@Override
public String toString() {
StringBuilder buf = new StringBuilder();