got sem pred analysis in for stack limited NFA conversion

[git-p4: depot-paths = "//depot/code/antlr4/main/": change = 6748]
This commit is contained in:
parrt 2010-03-11 17:44:12 -08:00
parent 18a69d4550
commit bf9c0dd5a2
17 changed files with 765 additions and 208 deletions

View File

@ -1,3 +1,3 @@
state(state, useBox, name) ::= <<
node [fontsize=11, shape = <if(useBox)>box<else>circle, fixedsize=true, width=.4<endif>]; <name>
state(state, name) ::= <<
node [fontsize=11, <if(useBox)>shape=box, fixedsize=false<else>shape=circle, fixedsize=true, width=.4<endif>]; <name>
>>

View File

@ -1,3 +1,3 @@
stopstate(name,useBox) ::= <<
node [fontsize=11, shape = <if(useBox)>polygon,sides=4,peripheries=2<else>doublecircle, fixedsize=true, width=.6<endif>]; <name>
stopstate(name) ::= <<
node [fontsize=11, <if(useBox)>shape=polygon,sides=4,peripheries=2,fixedsize=false<else>shape=doublecircle, fixedsize=true, width=.6<endif>]; <name>
>>

View File

@ -26,14 +26,17 @@ public class AnalysisPipeline {
public DFA createDFA(DecisionState s) {
// TRY APPROXIMATE LL(*) ANALYSIS
NFAToApproxDFAConverter approxConv = new NFAToApproxDFAConverter(g, s);
StackLimitedNFAToDFAConverter approxConv = new StackLimitedNFAToDFAConverter(g, s);
DFA dfa = approxConv.createDFA();
System.out.println("DFA="+dfa);
if ( dfa.isDeterministic() ) return dfa;
// REAL LL(*) ANALYSIS IF THAT FAILS
NFAToExactDFAConverter conv = new NFAToExactDFAConverter(g, s);
RecursionLimitedNFAToDFAConverter conv = new RecursionLimitedNFAToDFAConverter(g, s);
// DFA dfa = conv.createDFA();
// System.out.println("DFA="+dfa);
// DFAVerifier verifier = new DFAVerifier(dfa, approxConv);
// verifier.analyze();
return dfa;
}

View File

@ -0,0 +1,30 @@
package org.antlr.v4.analysis;
import org.antlr.v4.automata.DFA;
import java.util.HashSet;
import java.util.Set;
/** Detect imperfect DFA:
*
* 1. nonreduced DFA (dangling states)
* 2. unreachable stop states
* 3. nondeterministic states
*/
public class DFAVerifier {
DFA dfa;
StackLimitedNFAToDFAConverter converter;
public DFAVerifier(DFA dfa, StackLimitedNFAToDFAConverter converter) {
this.dfa = dfa;
this.converter = converter;
}
public void analyze() {
}
public Set<Integer> getUnreachableAlts() {
return new HashSet<Integer>();
}
}

View File

@ -21,13 +21,8 @@ public class NFAConfig {
/** The set of semantic predicates associated with this NFA
* configuration. The predicates were found on the way to
* the associated NFA state in this syntactic context.
* Set<AST>: track nodes in grammar containing the predicate
* for error messages and such (nice to know where the predicate
* came from in case of duplicates etc...). By using a set,
* the equals() method will correctly show {pred1,pred2} as equals()
* to {pred2,pred1}.
*/
//public SemanticContext semanticContext = SemanticContext.EMPTY_SEMANTIC_CONTEXT;
public SemanticContext semanticContext = SemanticContext.EMPTY_SEMANTIC_CONTEXT;
/** Indicate that this configuration has been resolved and no further
* DFA processing should occur with it. Essentially, this is used
@ -45,16 +40,17 @@ public class NFAConfig {
* nondeterministic configurations (as it does for "resolved" field)
* that have enough predicates to resolve the conflit.
*/
//protected boolean resolveWithPredicate;
protected boolean resolvedWithPredicate;
public NFAConfig(NFAState state,
int alt,
NFAContext context)
NFAContext context,
SemanticContext semanticContext)
{
this.state = state;
this.alt = alt;
this.context = context;
//this.semanticContext = semanticContext;
this.semanticContext = semanticContext;
}
/** An NFA configuration is equal to another if both have
@ -70,8 +66,8 @@ public class NFAConfig {
NFAConfig other = (NFAConfig)o;
return this.state==other.state &&
this.alt==other.alt &&
this.context.equals(other.context);
// this.semanticContext.equals(other.semanticContext)
this.context.equals(other.context) &&
this.semanticContext.equals(other.semanticContext);
}
public int hashCode() {
@ -93,16 +89,16 @@ public class NFAConfig {
buf.append("|");
buf.append(alt);
}
if ( context !=null ) {
if ( context!=null && context!= StackLimitedNFAToDFAConverter.NFA_EMPTY_STACK_CONTEXT ) {
buf.append("|");
buf.append(context);
}
if ( resolved ) {
buf.append("|resolved");
}
// if ( resolveWithPredicate ) {
// buf.append("|resolveWithPredicate");
// }
if (resolvedWithPredicate) {
buf.append("|resolveWithPredicate");
}
return buf.toString();
}
}

View File

@ -46,60 +46,6 @@ import org.antlr.v4.automata.NFAState;
* on the path from this node thru the parent pointers to the root.
*/
public class NFAContext {
/** This is similar to Bermudez's m constant in his LAR(m) where
* you bound the stack so your states don't explode. The main difference
* is that I bound only recursion on the stack, not the simple stack size.
* This looser constraint will let the conversion roam further to find
* lookahead to resolve a decision.
*
* Bermudez's m operates differently as it is his LR stack depth
* I'm pretty sure it therefore includes all stack symbols. Here I
* restrict the size of an NFA configuration to be finite because a
* stack component may mention the same NFA invocation state at
* most m times. Hence, the number of DFA states will not grow forever.
* With recursive rules like
*
* e : '(' e ')' | INT ;
*
* you could chase your tail forever if somebody said "s : e '.' | e ';' ;"
* This constant prevents new states from being created after a stack gets
* "too big". Actually (12/14/2007) I realize that this example is
* trapped by the non-LL(*) detector for recursion in > 1 alt. Here is
* an example that trips stack overflow:
*
* s : a Y | A A A A A X ; // force recursion past m=4
* a : A a | Q;
*
* If that were:
*
* s : a Y | A+ X ;
*
* it could loop forever.
*
* Imagine doing a depth-first search on the e DFA...as you chase an input
* sequence you can recurse to same rule such as e above. You'd have a
* chain of ((((. When you get do some point, you have to give up. The
* states in the chain will have longer and longer NFA config stacks.
* Must limit size.
*
* max=0 implies you cannot ever jump to another rule during closure.
* max=1 implies you can make as many calls as you want--you just
* can't ever visit a state that is on your rule invocation stack.
* I.e., you cannot ever recurse.
* max=2 implies you are able to recurse once (i.e., call a rule twice
* from the same place).
*
* This tracks recursion to a rule specific to an invocation site!
* It does not detect multiple calls to a rule from different rule
* invocation states. We are guaranteed to terminate because the
* stack can only grow as big as the number of NFA states * max.
*
* I noticed that the Java grammar didn't work with max=1, but did with
* max=4. Let's set to 4. Recursion is sometimes needed to resolve some
* fixed lookahead decisions.
*/
public static int MAX_SAME_RULE_INVOCATIONS_PER_NFA_CONFIG_STACK = 4;
public NFAContext parent;
/** The NFA state following state that invoked another rule's start state
@ -133,9 +79,6 @@ public class NFAContext {
* same call stack; walk upwards to the root.
* Recall that the root sentinel node has no invokingStates and no parent.
* Note that you may be comparing contexts in different alt trees.
*
* The hashCode is now cheap as it's computed once upon each context
* push on the stack. Use it to make equals() more efficient.
*/
public boolean equals(Object o) {
NFAContext other = ((NFAContext)o);
@ -251,6 +194,19 @@ public class NFAContext {
return h;
}
/** How many rule invocations in this context? I.e., how many
* elements in stack (path to root, not including root placeholder)?
*/
public int depth() {
int n = 0;
NFAContext sp = this;
while ( sp != StackLimitedNFAToDFAConverter.NFA_EMPTY_STACK_CONTEXT ) {
n++;
sp = sp.parent;
}
return n;
}
/** A context is empty if there is no parent; meaning nobody pushed
* anything on the call stack.
*/

View File

@ -0,0 +1,294 @@
package org.antlr.v4.analysis;
import org.antlr.v4.automata.DFAState;
import org.antlr.v4.misc.BitSet;
import java.util.*;
/** */
public class PredicateResolver {
StackLimitedNFAToDFAConverter converter;
public PredicateResolver(StackLimitedNFAToDFAConverter converter) {
this.converter = converter;
}
/** See if a set of nondeterministic alternatives can be disambiguated
* with the semantic predicate contexts of the alternatives.
*
* Without semantic predicates, syntactic conflicts are resolved
* by simply choosing the first viable alternative. In the
* presence of semantic predicates, you can resolve the issue by
* evaluating boolean expressions at run time. During analysis,
* this amounts to suppressing grammar error messages to the
* developer. NFA configurations are always marked as "to be
* resolved with predicates" so that DFA.reach() will know to ignore
* these configurations and add predicate transitions to the DFA
* after adding edge labels.
*
* During analysis, we can simply make sure that for n
* ambiguously predicted alternatives there are at least n-1
* unique predicate sets. The nth alternative can be predicted
* with "not" the "or" of all other predicates. NFA configurations without
* predicates are assumed to have the default predicate of
* "true" from a user point of view. When true is combined via || with
* another predicate, the predicate is a tautology and must be removed
* from consideration for disambiguation:
*
* a : b | B ; // hoisting p1||true out of rule b, yields no predicate
* b : {p1}? B | B ;
*
* This is done down in getPredicatesPerNonDeterministicAlt().
*/
protected boolean tryToResolveWithSemanticPredicates(DFAState d,
Set<Integer> nondeterministicAlts)
{
Map<Integer, SemanticContext> altToPredMap =
getPredicatesPerNonDeterministicAlt(d, nondeterministicAlts);
if ( altToPredMap.size()==0 ) return false;
//System.out.println("nondeterministic alts with predicates: "+altToPredMap);
// TODO: do we need?
// dfa.probe.reportAltPredicateContext(d, altToPredMap);
if ( nondeterministicAlts.size()-altToPredMap.size()>1 ) {
// too few predicates to resolve; just return
// TODO: actually do we need to gen error here?
return false;
}
// Handle case where 1 predicate is missing
// Case 1. Semantic predicates
// If the missing pred is on nth alt, !(union of other preds)==true
// so we can avoid that computation. If naked alt is ith, then must
// test it with !(union) since semantic predicated alts are order
// independent
// Case 2: Syntactic predicates
// The naked alt is always assumed to be true as the order of
// alts is the order of precedence. The naked alt will be a tautology
// anyway as it's !(union of other preds). This implies
// that there is no such thing as noviable alt for synpred edges
// emanating from a DFA state.
if ( altToPredMap.size()==nondeterministicAlts.size()-1 ) {
// if there are n-1 predicates for n nondeterministic alts, can fix
BitSet ndSet = BitSet.of(nondeterministicAlts);
BitSet predSet = BitSet.of(altToPredMap);
int nakedAlt = ndSet.subtract(predSet).getSingleElement();
SemanticContext nakedAltPred = null;
if ( nakedAlt == Collections.max(nondeterministicAlts) ) {
// the naked alt is the last nondet alt and will be the default clause
nakedAltPred = new SemanticContext.TruePredicate();
}
else {
// pretend naked alternative is covered with !(union other preds)
// unless it's a synpred since those have precedence same
// as alt order
SemanticContext unionOfPredicatesFromAllAlts =
getUnionOfPredicates(altToPredMap);
//System.out.println("all predicates "+unionOfPredicatesFromAllAlts);
if ( unionOfPredicatesFromAllAlts.isSyntacticPredicate() ) {
nakedAltPred = new SemanticContext.TruePredicate();
}
else {
nakedAltPred =
SemanticContext.not(unionOfPredicatesFromAllAlts);
}
}
//System.out.println("covering naked alt="+nakedAlt+" with "+nakedAltPred);
altToPredMap.put(nakedAlt, nakedAltPred);
// set all config with alt=nakedAlt to have the computed predicate
int numConfigs = d.nfaConfigs.size();
for (int i = 0; i < numConfigs; i++) {
NFAConfig configuration = (NFAConfig)d.nfaConfigs.get(i);
if ( configuration.alt == nakedAlt ) {
configuration.semanticContext = nakedAltPred;
}
}
}
if ( altToPredMap.size()==nondeterministicAlts.size() ) {
// RESOLVE CONFLICT by picking one NFA configuration for each alt
// and setting its resolvedWithPredicate flag
// First, prevent a recursion warning on this state due to
// pred resolution
// if ( d.abortedDueToRecursionOverflow ) {
// d.dfa.probe.removeRecursiveOverflowState(d);
// }
for (NFAConfig c : d.nfaConfigs) {
SemanticContext semCtx = altToPredMap.get(c.alt);
if ( semCtx!=null ) {
// resolve (first found) with pred
// and remove alt from problem list
c.resolvedWithPredicate = true;
c.semanticContext = semCtx; // reset to combined
altToPredMap.remove(c.alt);
// notify grammar that we've used the preds contained in semCtx
// if ( semCtx.isSyntacticPredicate() ) {
// dfa.nfa.grammar.synPredUsedInDFA(dfa, semCtx);
// }
}
else if ( nondeterministicAlts.contains(c.alt) ) {
// resolve all other configurations for nondeterministic alts
// for which there is no predicate context by turning it off
c.resolved = true;
}
}
return true;
}
return false; // couldn't fix the problem with predicates
}
/** Return a mapping from nondeterministc alt to combined list of predicates.
* If both (s|i|semCtx1) and (t|i|semCtx2) exist, then the proper predicate
* for alt i is semCtx1||semCtx2 because you have arrived at this single
* DFA state via two NFA paths, both of which have semantic predicates.
* We ignore deterministic alts because syntax alone is sufficient
* to predict those. Do not include their predicates.
*
* Alts with no predicate are assumed to have {true}? pred.
*
* When combining via || with "true", all predicates are removed from
* consideration since the expression will always be true and hence
* not tell us how to resolve anything. So, if any NFA configuration
* in this DFA state does not have a semantic context, the alt cannot
* be resolved with a predicate.
*
* If nonnull, incidentEdgeLabel tells us what NFA transition label
* we did a reach on to compute state d. d may have insufficient
* preds, so we really want this for the error message.
*/
protected Map<Integer, SemanticContext> getPredicatesPerNonDeterministicAlt(
DFAState d,
Set<Integer> nondeterministicAlts)
{
// map alt to combined SemanticContext
Map<Integer, SemanticContext> altToPredicateContextMap =
new HashMap<Integer, SemanticContext>();
// init the alt to predicate set map
Map<Integer, Set<SemanticContext>> altToSetOfContextsMap =
new HashMap<Integer, Set<SemanticContext>>();
for (int alt : nondeterministicAlts) {
altToSetOfContextsMap.put(alt, new HashSet<SemanticContext>());
}
// Create a unique set of predicates from configs
// Also, track the alts with at least one uncovered configuration
// (one w/o a predicate); tracks tautologies like p1||true
//Map<Integer, Set<Token>> altToLocationsReachableWithoutPredicate = new HashMap<Integer, Set<Token>>();
Set<Integer> nondetAltsWithUncoveredConfiguration = new HashSet<Integer>();
//System.out.println("configs="+d.nfaConfigs);
//System.out.println("configs with preds?"+d.atLeastOneConfigurationHasAPredicate);
//System.out.println("configs with preds="+d.configurationsWithPredicateEdges);
for (NFAConfig c : d.nfaConfigs) {
// if alt is nondeterministic, combine its predicates
if ( nondeterministicAlts.contains(c.alt) ) {
// if there is a predicate for this NFA configuration, OR in
if ( c.semanticContext != SemanticContext.EMPTY_SEMANTIC_CONTEXT ) {
Set<SemanticContext> predSet = altToSetOfContextsMap.get(c.alt);
predSet.add(c.semanticContext);
}
else {
// if no predicate, but it's part of nondeterministic alt
// then at least one path exists not covered by a predicate.
// must remove predicate for this alt; track incomplete alts
nondetAltsWithUncoveredConfiguration.add(c.alt);
}
}
}
// Walk semantic contexts for nondet alts, ORing them together
// Also, track the list of incompletely covered alts: those alts
// with at least 1 predicate and at least one configuration w/o a
// predicate. We want this in order to report to the decision probe.
List<Integer> incompletelyCoveredAlts = new ArrayList<Integer>();
for (int alt : nondeterministicAlts) {
Set<SemanticContext> contextsForThisAlt = altToSetOfContextsMap.get(alt);
if ( nondetAltsWithUncoveredConfiguration.contains(alt) ) { // >= 1 config has no ctx
if ( contextsForThisAlt.size()>0 ) { // && at least one pred
incompletelyCoveredAlts.add(alt); // this alt incompleted covered
}
continue; // don't include at least 1 config has no ctx
}
SemanticContext combinedContext = null;
for (Iterator itrSet = contextsForThisAlt.iterator(); itrSet.hasNext();) {
SemanticContext ctx = (SemanticContext) itrSet.next();
combinedContext =
SemanticContext.or(combinedContext,ctx);
}
altToPredicateContextMap.put(alt, combinedContext);
}
if ( incompletelyCoveredAlts.size()>0 ) {
/*
System.out.println("prob in dec "+dfa.decisionNumber+" state="+d);
FASerializer serializer = new FASerializer(dfa.nfa.grammar);
String result = serializer.serialize(dfa.startState);
System.out.println("dfa: "+result);
System.out.println("incomplete alts: "+incompletelyCoveredAlts);
System.out.println("nondet="+nondeterministicAlts);
System.out.println("nondetAltsWithUncoveredConfiguration="+ nondetAltsWithUncoveredConfiguration);
System.out.println("altToCtxMap="+altToSetOfContextsMap);
System.out.println("altToPredicateContextMap="+altToPredicateContextMap);
*/
// TODO: add back if we're using in error messages
// for (NFAConfig c : d.nfaConfigs) {
// if ( incompletelyCoveredAlts.contains(c.alt) &&
// c.semanticContext == SemanticContext.EMPTY_SEMANTIC_CONTEXT )
// {
// NFAState s = c.state;
// /*
// System.out.print("nondet config w/o context "+configuration+
// " incident "+(s.incidentEdgeLabel!=null?s.incidentEdgeLabel.toString(dfa.nfa.grammar):null));
// if ( s.associatedASTNode!=null ) {
// System.out.print(" token="+s.associatedASTNode.token);
// }
// else System.out.println();
// */
// // We want to report getting to an NFA state with an
// // incoming label, unless it's EOF, w/o a predicate.
// if ( s.incidentEdgeLabel!=null && s.incidentEdgeLabel.label != Label.EOF ) {
// if ( s.ast==null || s.ast.token==null ) {
// ErrorManager.internalError("no AST/token for nonepsilon target w/o predicate");
// }
// else {
// Set<Token> locations = altToLocationsReachableWithoutPredicate.get(c.alt);
// if ( locations==null ) {
// locations = new HashSet<Token>();
// altToLocationsReachableWithoutPredicate.put(c.alt, locations);
// }
// locations.add(s.ast.token);
// }
// }
// }
// }
converter.incompletelyCoveredStates.add(d);
}
return altToPredicateContextMap;
}
/** OR together all predicates from the alts. Note that the predicate
* for an alt could itself be a combination of predicates.
*/
public SemanticContext getUnionOfPredicates(Map altToPredMap) {
Iterator iter;
SemanticContext unionOfPredicatesFromAllAlts = null;
iter = altToPredMap.values().iterator();
while ( iter.hasNext() ) {
SemanticContext semCtx = (SemanticContext)iter.next();
if ( unionOfPredicatesFromAllAlts==null ) {
unionOfPredicatesFromAllAlts = semCtx;
}
else {
unionOfPredicatesFromAllAlts =
SemanticContext.or(unionOfPredicatesFromAllAlts,semCtx);
}
}
return unionOfPredicatesFromAllAlts;
}
}

View File

@ -72,13 +72,77 @@ import java.util.List;
*
* This case also catches infinite left recursion.
*/
public class NFAToExactDFAConverter extends NFAToApproxDFAConverter {
public class RecursionLimitedNFAToDFAConverter extends StackLimitedNFAToDFAConverter {
/** This is similar to Bermudez's m constant in his LAR(m) where
* you bound the stack so your states don't explode. The main difference
* is that I bound only recursion on the stack, not the simple stack size.
* This looser constraint will let the conversion roam further to find
* lookahead to resolve a decision.
*
* Bermudez's m operates differently as it is his LR stack depth
* I'm pretty sure it therefore includes all stack symbols. Here I
* restrict the size of an NFA configuration to be finite because a
* stack component may mention the same NFA invocation state at
* most m times. Hence, the number of DFA states will not grow forever.
* With recursive rules like
*
* e : '(' e ')' | INT ;
*
* you could chase your tail forever if somebody said "s : e '.' | e ';' ;"
* This constant prevents new states from being created after a stack gets
* "too big". Actually (12/14/2007) I realize that this example is
* trapped by the non-LL(*) detector for recursion in > 1 alt. Here is
* an example that trips stack overflow:
*
* s : a Y | A A A A A X ; // force recursion past m=4
* a : A a | Q;
*
* If that were:
*
* s : a Y | A+ X ;
*
* it could loop forever.
*
* Imagine doing a depth-first search on the e DFA...as you chase an input
* sequence you can recurse to same rule such as e above. You'd have a
* chain of ((((. When you get do some point, you have to give up. The
* states in the chain will have longer and longer NFA config stacks.
* Must limit size.
*
* max=0 implies you cannot ever jump to another rule during closure.
* max=1 implies you can make as many calls as you want--you just
* can't ever visit a state that is on your rule invocation stack.
* I.e., you cannot ever recurse.
* max=2 implies you are able to recurse once (i.e., call a rule twice
* from the same place).
*
* This tracks recursion to a rule specific to an invocation site!
* It does not detect multiple calls to a rule from different rule
* invocation states. We are guaranteed to terminate because the
* stack can only grow as big as the number of NFA states * max.
*
* I noticed that the Java grammar didn't work with max=1, but did with
* max=4. Let's set to 4. Recursion is sometimes needed to resolve some
* fixed lookahead decisions.
*/
public static int DEFAULT_MAX_SAME_RULE_INVOCATIONS_PER_NFA_CONFIG_STACK = 4;
/** Max recursion depth.
* approx is setting stack size like bermudez: m=1 in this case.
* full alg limits recursion not overall stack size. that's more
* like LL(k) analysis which can have any stack size, but will recurse
* a max of k times since it can only see k tokens. each recurse pumps
* another token. limiting stack size to m={0,1} lets us convert
* recursion to loops. use r constant not m for recursion depth?
*/
public int r = DEFAULT_MAX_SAME_RULE_INVOCATIONS_PER_NFA_CONFIG_STACK;
/** Track whether an alt discovers recursion for each alt during
* NFA to DFA conversion; >1 alt with recursion implies nonregular.
*/
public IntSet recursiveAltSet = new BitSet();
public NFAToExactDFAConverter(Grammar g, DecisionState nfaStartState) {
public RecursionLimitedNFAToDFAConverter(Grammar g, DecisionState nfaStartState) {
super(g, nfaStartState);
}
@ -86,23 +150,6 @@ public class NFAToExactDFAConverter extends NFAToApproxDFAConverter {
void reach(DFAState d) {
super.reach(d);
// if ( !d.isResolvedWithPredicates() && d.getNumberOfTransitions()==0 ) {
// //System.out.println("dangling DFA state "+d+"\nAfter reach / closures:\n"+dfa);
// // TODO: can fixed lookahead hit a dangling state case?
// // TODO: yes, with left recursion
// //System.err.println("dangling state alts: "+d.getAltSet());
// dfa.probe.reportDanglingState(d);
// // turn off all configurations except for those associated with
// // min alt number; somebody has to win else some input will not
// // predict any alt.
// int minAlt = Resolver.resolveByPickingMinAlt(d, null);
// // force it to be an accept state
// // don't call convertToAcceptState() which merges stop states.
// // other states point at us; don't want them pointing to dead states
// d.isAcceptState = true; // might be adding new accept state for alt
// dfa.defineAcceptState(minAlt, d);
// }
//
// // Check to see if we need to add any semantic predicate transitions
// if ( d.isResolvedWithPredicates() ) {
// addPredicateTransitions(d);
@ -131,13 +178,17 @@ public class NFAToExactDFAConverter extends NFAToApproxDFAConverter {
* from it that points to every possible following node. This case
* is conveniently handled then by the common closure case.
*/
void ruleStopStateClosure(NFAState s, int altNum, NFAContext context, List<NFAConfig> configs) {
if ( context.parent!=null ) {
void ruleStopStateClosure(NFAState s, int altNum, NFAContext context,
SemanticContext semanticContext,
boolean collectPredicates,
List<NFAConfig> configs)
{
if ( context != NFA_EMPTY_STACK_CONTEXT ) {
NFAContext newContext = context.parent; // "pop" invoking state
closure(context.returnState, altNum, newContext, configs);
closure(context.returnState, altNum, newContext, semanticContext, collectPredicates, configs);
}
else {
commonClosure(s, altNum, context, configs); // do global FOLLOW
commonClosure(s, altNum, context, semanticContext, collectPredicates, configs); // do global FOLLOW
}
}
@ -156,7 +207,11 @@ public class NFAToExactDFAConverter extends NFAToApproxDFAConverter {
*
* 3. Preds?
*/
void commonClosure(NFAState s, int altNum, NFAContext context, List<NFAConfig> configs) {
void commonClosure(NFAState s, int altNum, NFAContext context,
SemanticContext semanticContext,
boolean collectPredicates,
List<NFAConfig> configs)
{
int n = s.getNumberOfTransitions();
for (int i=0; i<n; i++) {
Transition t = s.transition(i);
@ -169,6 +224,11 @@ public class NFAToExactDFAConverter extends NFAToApproxDFAConverter {
throw new RuntimeException("recursion in >1 alt: "+recursiveAltSet);
}
}
// Detect an attempt to recurse too high
// if this context has hit the max recursions for p.stateNumber,
// don't allow it to enter p.stateNumber again
if ( depth >= r ) {
}
// first create a new context and push onto call tree,
// recording the fact that we are invoking a rule and
// from which state (case 2 below will get the following state
@ -176,10 +236,10 @@ public class NFAToExactDFAConverter extends NFAToApproxDFAConverter {
// pushed on the stack).
NFAContext newContext = new NFAContext(context, retState);
// traverse epsilon edge to new rule
closure(t.target, altNum, newContext, configs);
closure(t.target, altNum, newContext, semanticContext, collectPredicates, configs);
}
else if ( t.isEpsilon() ) {
closure(t.target, altNum, context, configs);
closure(t.target, altNum, context, semanticContext, collectPredicates, configs);
}
}
}

View File

@ -12,6 +12,15 @@ import java.util.Set;
/** Code "module" that knows how to resolve LL(*) nondeterminisms. */
public class Resolver {
StackLimitedNFAToDFAConverter converter;
PredicateResolver semResolver;
public Resolver(StackLimitedNFAToDFAConverter converter) {
this.converter = converter;
semResolver = new PredicateResolver(converter);
}
/** Walk each NFA configuration in this DFA state looking for a conflict
* where (s|i|ctx) and (s|j|ctx) exist, indicating that state s with
* conflicting ctx predicts alts i and j. Return an Integer set
@ -25,7 +34,7 @@ public class Resolver {
* alt must be different or must have different contexts to avoid a
* conflict.
*/
public static Set<Integer> getNonDeterministicAlts(DFAState d, boolean approx) {
public Set<Integer> getNonDeterministicAlts(DFAState d) {
//System.out.println("getNondetAlts for DFA state "+stateNumber);
Set<Integer> nondeterministicAlts = new HashSet<Integer>();
@ -91,16 +100,15 @@ public class Resolver {
// Also a conflict if s.ctx or t.ctx is empty
boolean altConflict = s.alt != t.alt;
boolean ctxConflict = false;
if ( approx ) {
ctxConflict = s.context == t.context &&
s.context != NFAToApproxDFAConverter.NFA_EMPTY_STACK_CONTEXT;
if ( converter instanceof StackLimitedNFAToDFAConverter) {
ctxConflict = s.context.equals(t.context);
}
else {
ctxConflict = s.context.conflictsWith(t.context);
}
if ( altConflict && ctxConflict ) {
nondeterministicAlts.add(Utils.integer(s.alt));
nondeterministicAlts.add(Utils.integer(t.alt));
nondeterministicAlts.add(s.alt);
nondeterministicAlts.add(t.alt);
}
}
}
@ -110,12 +118,12 @@ public class Resolver {
return nondeterministicAlts;
}
public static void resolveNonDeterminisms(DFAState d, boolean approx) {
if ( NFAToApproxDFAConverter.debug ) {
public void resolveNonDeterminisms(DFAState d) {
if ( StackLimitedNFAToDFAConverter.debug ) {
System.out.println("resolveNonDeterminisms "+d.toString());
}
Set nondeterministicAlts = getNonDeterministicAlts(d, approx);
if ( NFAToApproxDFAConverter.debug && nondeterministicAlts!=null ) {
Set<Integer> nondeterministicAlts = getNonDeterministicAlts(d);
if ( StackLimitedNFAToDFAConverter.debug && nondeterministicAlts!=null ) {
System.out.println("nondet alts="+nondeterministicAlts);
}
@ -123,26 +131,40 @@ public class Resolver {
if ( nondeterministicAlts==null ) return;
// reportNondeterminism(d, nondeterministicAlts);
System.err.println("nondterministic alts "+nondeterministicAlts);
converter.nondeterministicStates.add(d);
// ATTEMPT TO RESOLVE WITH SEMANTIC PREDICATES
if ( !approx ) {
// boolean resolved =
// tryToResolveWithSemanticPredicates(d, nondeterministicAlts);
// if ( resolved ) {
// if ( debug ) {
// System.out.println("resolved DFA state "+d.stateNumber+" with pred");
// }
// d.resolvedWithPredicates = true;
// dfa.probe.reportNondeterminismResolvedWithSemanticPredicate(d);
// return;
// }
boolean resolved =
semResolver.tryToResolveWithSemanticPredicates(d, nondeterministicAlts);
if ( resolved ) {
if ( StackLimitedNFAToDFAConverter.debug ) {
System.out.println("resolved DFA state "+d.stateNumber+" with pred");
}
d.resolvedWithPredicates = true;
converter.resolvedWithSemanticPredicates.add(d);
return;
}
// RESOLVE SYNTACTIC CONFLICT BY REMOVING ALL BUT ONE ALT
resolveByPickingMinAlt(d, nondeterministicAlts);
}
public void resolveDanglingState(DFAState d) {
if ( d.resolvedWithPredicates || d.getNumberOfTransitions()>0 ) return;
System.err.println("dangling DFA state "+d+" after reach / closures");
converter.danglingStates.add(d);
// turn off all configurations except for those associated with
// min alt number; somebody has to win else some input will not
// predict any alt.
int minAlt = resolveByPickingMinAlt(d, null);
// force it to be an accept state
d.isAcceptState = true;
// might be adding new accept state for alt, but that's ok
converter.dfa.defineAcceptState(minAlt, d);
}
/** Turn off all configurations associated with the
* set of incoming nondeterministic alts except the min alt number.
* There may be many alts among the configurations but only turn off
@ -153,7 +175,7 @@ public class Resolver {
*
* Return the min alt found.
*/
static int resolveByPickingMinAlt(DFAState d, Set nondeterministicAlts) {
int resolveByPickingMinAlt(DFAState d, Set<Integer> nondeterministicAlts) {
int min = Integer.MAX_VALUE;
if ( nondeterministicAlts!=null ) {
min = getMinAlt(nondeterministicAlts);
@ -170,13 +192,13 @@ public class Resolver {
/** turn off all states associated with alts other than the good one
* (as long as they are one of the nondeterministic ones)
*/
static void turnOffOtherAlts(DFAState d, int min, Set<Integer> nondeterministicAlts) {
void turnOffOtherAlts(DFAState d, int min, Set<Integer> nondeterministicAlts) {
int numConfigs = d.nfaConfigs.size();
for (int i = 0; i < numConfigs; i++) {
NFAConfig configuration = d.nfaConfigs.get(i);
if ( configuration.alt!=min ) {
if ( nondeterministicAlts==null ||
nondeterministicAlts.contains(Utils.integer(configuration.alt)) )
nondeterministicAlts.contains(configuration.alt) )
{
configuration.resolved = true;
}
@ -184,7 +206,7 @@ public class Resolver {
}
}
static int getMinAlt(Set<Integer> nondeterministicAlts) {
public static int getMinAlt(Set<Integer> nondeterministicAlts) {
int min = Integer.MAX_VALUE;
for (Integer altI : nondeterministicAlts) {
int alt = altI.intValue();

View File

@ -64,7 +64,6 @@ public abstract class SemanticContext {
protected int constantValue = INVALID_PRED_VALUE;
public Predicate() {
predicateAST = new GrammarAST();
this.gated=false;
}
@ -87,14 +86,13 @@ public abstract class SemanticContext {
/** Two predicates are the same if they are literally the same
* text rather than same node in the grammar's AST.
* Or, if they have the same constant value, return equal.
* TODO: As of July 2006 I'm not sure these are needed.
*/
public boolean equals(Object o) {
if ( !(o instanceof Predicate) ) {
return false;
}
return predicateAST.getText().equals(((Predicate)o).predicateAST.getText());
if ( !(o instanceof Predicate) ) return false;
Predicate p = (Predicate) o;
if ( predicateAST!=null && p.predicateAST!=null )
return predicateAST.getText().equals(p.predicateAST.getText());
return predicateAST==null && p.predicateAST==null;
}
public int hashCode() {

View File

@ -12,7 +12,7 @@ import java.util.*;
* per DFA (also required for thread safety if multiple conversions
* launched).
*/
public class NFAToApproxDFAConverter {
public class StackLimitedNFAToDFAConverter {
public static final NFAContext NFA_EMPTY_STACK_CONTEXT = new NFAContext(null, null);
Grammar g;
@ -22,31 +22,82 @@ public class NFAToApproxDFAConverter {
/** DFA we are creating */
DFA dfa;
/** Stack depth max; same as Bermudez's m */
int m = 1;
/** A list of DFA states we still need to process during NFA conversion */
List<DFAState> work = new LinkedList<DFAState>();
/** Each alt in an NFA derived from a grammar must have a DFA state that
* predicts it lest the parser not know what to do. Nondeterminisms can
* lead to this situation (assuming no semantic predicates can resolve
* the problem) and when for some reason, I cannot compute the lookahead
* (which might arise from an error in the algorithm or from
* left-recursion etc...). This list starts out with all alts contained
* and then in method doesStateReachAcceptState() I remove the alts I
* know to be uniquely predicted.
*/
public List<Integer> unreachableAlts;
/** Track all DFA states with nondeterministic alternatives.
* By reaching the same DFA state, a path through the NFA for some input
* is able to reach the same NFA state by starting at more than one
* alternative's left edge. Though, later, we may find that predicates
* resolve the issue, but track info anyway.
* Note that from the DFA state, you can ask for
* which alts are nondeterministic.
*/
public Set<DFAState> nondeterministicStates = new HashSet<DFAState>();
/** The set of states w/o emanating edges (and w/o resolving sem preds). */
public Set<DFAState> danglingStates = new HashSet<DFAState>();
/** Was a syntactic ambiguity resolved with predicates? Any DFA
* state that predicts more than one alternative, must be resolved
* with predicates or it should be reported to the user.
*/
Set<DFAState> resolvedWithSemanticPredicates = new HashSet<DFAState>();
/** Tracks alts insufficiently covered.
* For example, p1||true gets reduced to true and so leaves
* whole alt uncovered. This maps DFA state to the set of alts
*/
Set<DFAState> incompletelyCoveredStates = new HashSet<DFAState>();
Set<DFAState> recursionOverflowStates = new HashSet<DFAState>();
/** Used to prevent the closure operation from looping to itself and
* hence looping forever. Sensitive to the NFA state, the alt, and
* the stack context.
*/
Set<NFAConfig> closureBusy;
Resolver resolver;
public static boolean debug = false;
public NFAToApproxDFAConverter(Grammar g, DecisionState nfaStartState) {
public StackLimitedNFAToDFAConverter(Grammar g, DecisionState nfaStartState) {
this.g = g;
this.nfaStartState = nfaStartState;
dfa = new DFA(g, nfaStartState);
dfa.converter = this;
resolver = new Resolver(this);
unreachableAlts = new ArrayList<Integer>();
for (int i = 1; i <= dfa.nAlts; i++) {
unreachableAlts.add(i);
}
}
public DFA createDFA() {
dfa.startState = computeStartState();
computeStartState();
dfa.addState(dfa.startState); // make sure dfa knows about this state
work.add(dfa.startState);
// while more DFA states to check, process them
while ( work.size()>0 ) {
reach( work.get(0) );
DFAState d = work.get(0);
reach(d);
resolver.resolveDanglingState(d);
work.remove(0); // we're done with this DFA state
}
@ -67,7 +118,8 @@ public class NFAToApproxDFAConverter {
System.out.println("DFA state after reach -" +
label.toString(g)+"->"+t);
}
// nothing was reached by label due to conflict resolution
// nothing was reached by label; we must have resolved
// all NFA configs in d, when added to work, that point at label
if ( t==null ) continue;
// if ( t.getUniqueAlt()==NFA.INVALID_ALT_NUMBER ) {
// // Only compute closure if a unique alt number is not known.
@ -83,6 +135,9 @@ public class NFAToApproxDFAConverter {
addTransition(d, label, t); // make d-label->t transition
}
// Add semantic predicate transitions if we resolved when added to work list
if ( d.resolvedWithPredicates ) addPredicateTransitions(d);
}
/** Add t if not in DFA yet, resolving nondet's and then make d-label->t */
@ -97,8 +152,7 @@ public class NFAToApproxDFAConverter {
// resolve any syntactic conflicts by choosing a single alt or
// by using semantic predicates if present.
boolean approx = this instanceof NFAToApproxDFAConverter;
Resolver.resolveNonDeterminisms(t, approx);
resolver.resolveNonDeterminisms(t);
// If deterministic, don't add this state; it's an accept state
// Just return as a valid DFA state
@ -136,17 +190,20 @@ public class NFAToApproxDFAConverter {
int n = c.state.getNumberOfTransitions();
for (int i=0; i<n; i++) { // for each transition
Transition t = c.state.transition(i);
// when we added this state as target of some other state,
// we tried to resolve any conflicts. Ignore anything we
// were able to fix previously
if ( c.resolved || c.resolvedWithPredicate) continue;
// found a transition with label; does it collide with label?
if ( !t.isEpsilon() && !t.label().and(label).isNil() ) {
// add NFA target to (potentially) new DFA state
labelTarget.addNFAConfig(t.target, c.alt, c.context);
labelTarget.addNFAConfig(t.target, c.alt, c.context, c.semanticContext);
}
}
}
if ( labelTarget.nfaConfigs.size()==0 ) {
System.err.println("why is this empty?");
}
// if we couldn't find any non-resolved edges to add, return nothing
if ( labelTarget.nfaConfigs.size()==0 ) return null;
return labelTarget;
}
@ -159,20 +216,20 @@ public class NFAToApproxDFAConverter {
* derived from this. At a stop state in the DFA, we can return this alt
* number, indicating which alt is predicted.
*/
public DFAState computeStartState() {
public void computeStartState() {
DFAState d = dfa.newState();
dfa.startState = d;
// add config for each alt start, then add closure for those states
for (int altNum=1; altNum<=dfa.nAlts; altNum++) {
Transition t = nfaStartState.transition(altNum-1);
NFAState altStart = t.target;
NFAContext initialContext = NFA_EMPTY_STACK_CONTEXT;
d.addNFAConfig(altStart, altNum, initialContext);
d.addNFAConfig(altStart, altNum,
NFA_EMPTY_STACK_CONTEXT,
SemanticContext.EMPTY_SEMANTIC_CONTEXT);
}
closure(d);
return d;
}
/** For all NFA states (configurations) merged in d,
@ -184,11 +241,16 @@ public class NFAToApproxDFAConverter {
System.out.println("closure("+d+")");
}
// Only the start state initiates pred collection; gets turned
// off maybe by actions later hence we need a parameter to carry
// it forward
boolean collectPredicates = (d == dfa.startState);
closureBusy = new HashSet<NFAConfig>();
List<NFAConfig> configs = new ArrayList<NFAConfig>();
for (NFAConfig c : d.nfaConfigs) {
closure(c.state, c.alt, c.context, configs);
closure(c.state, c.alt, c.context, c.semanticContext, collectPredicates, configs);
}
d.nfaConfigs.addAll(configs); // Add new NFA configs to DFA state d
@ -219,9 +281,11 @@ public class NFAToApproxDFAConverter {
* TODO: remove altNum if we don't reorder for loopback nodes
*/
public void closure(NFAState s, int altNum, NFAContext context,
SemanticContext semanticContext,
boolean collectPredicates,
List<NFAConfig> configs)
{
NFAConfig proposedNFAConfig = new NFAConfig(s, altNum, context);
NFAConfig proposedNFAConfig = new NFAConfig(s, altNum, context, semanticContext);
if ( closureBusy.contains(proposedNFAConfig) ) return;
closureBusy.add(proposedNFAConfig);
@ -229,20 +293,27 @@ public class NFAToApproxDFAConverter {
// p itself is always in closure
configs.add(proposedNFAConfig);
// if we have context info and we're at rule stop state, do
// local follow for invokingRule and global follow for other links
if ( s instanceof RuleStopState ) {
ruleStopStateClosure(s, altNum, context, configs);
ruleStopStateClosure(s, altNum, context, semanticContext, collectPredicates, configs);
}
else {
commonClosure(s, altNum, context, configs);
commonClosure(s, altNum, context, semanticContext, collectPredicates, configs);
}
}
void ruleStopStateClosure(NFAState s, int altNum, NFAContext context, List<NFAConfig> configs) {
// if we have context info and we're at rule stop state, do
// local follow for invokingRule and global follow for other links
void ruleStopStateClosure(NFAState s, int altNum, NFAContext context,
SemanticContext semanticContext,
boolean collectPredicates,
List<NFAConfig> configs)
{
Rule invokingRule = null;
if ( context!=NFA_EMPTY_STACK_CONTEXT ) invokingRule = context.returnState.rule;
if ( context!=NFA_EMPTY_STACK_CONTEXT ) {
// if stack not empty, get invoking rule from top of stack
invokingRule = context.returnState.rule;
}
//System.out.println("FOLLOW of "+s+" context="+context);
// follow all static FOLLOW links
@ -254,32 +325,66 @@ public class NFAToApproxDFAConverter {
// else follow link to context state only
if ( t.target.rule != invokingRule ) {
//System.out.println("OFF TO "+t.target);
closure(t.target, altNum, context, configs);
closure(t.target, altNum, context, semanticContext, collectPredicates, configs);
}
else {
if ( t.target == context.returnState) {
else { // t.target is in invoking rule; only follow context's link
if ( t.target == context.returnState ) {
//System.out.println("OFF TO CALL SITE "+t.target);
// go only to specific call site; pop context
closure(t.target, altNum, NFA_EMPTY_STACK_CONTEXT, configs);
NFAContext newContext = context.parent; // "pop" invoking state
closure(t.target, altNum, newContext, semanticContext, collectPredicates, configs);
}
}
}
return;
}
void commonClosure(NFAState s, int altNum, NFAContext context, List<NFAConfig> configs) {
void commonClosure(NFAState s, int altNum, NFAContext context,
SemanticContext semanticContext, boolean collectPredicates,
List<NFAConfig> configs)
{
int n = s.getNumberOfTransitions();
for (int i=0; i<n; i++) {
Transition t = s.transition(i);
NFAContext newContext = context; // assume old context
if ( t instanceof RuleTransition) {
NFAContext newContext = context; // assume old context
NFAState retState = ((RuleTransition)t).followState;
if ( context==NFA_EMPTY_STACK_CONTEXT ) { // track first call return state only
if ( context.depth() < m ) { // track first call return state only
newContext = new NFAContext(context, retState);
}
closure(t.target, altNum, newContext, semanticContext, collectPredicates, configs);
}
if ( t.isEpsilon() ) {
closure(t.target, altNum, newContext, configs);
else if ( t instanceof ActionTransition ) {
continue;
}
else if ( t instanceof PredicateTransition ) {
SemanticContext labelContext = ((PredicateTransition)t).semanticContext;
SemanticContext newSemanticContext = semanticContext;
if ( collectPredicates ) {
// AND the previous semantic context with new pred
// int walkAlt =
// dfa.decisionNFAStartState.translateDisplayAltToWalkAlt(alt);
NFAState altLeftEdge = dfa.decisionNFAStartState.transition(altNum).target;
/*
System.out.println("state "+p.stateNumber+" alt "+alt+" walkAlt "+walkAlt+" trans to "+transition0.target);
System.out.println("DFA start state "+dfa.decisionNFAStartState.stateNumber);
System.out.println("alt left edge "+altLeftEdge.stateNumber+
", epsilon target "+
altLeftEdge.transition(0).target.stateNumber);
*/
// do not hoist syn preds from other rules; only get if in
// starting state's rule (i.e., context is empty)
if ( !labelContext.isSyntacticPredicate() || s==altLeftEdge ) {
System.out.println("&"+labelContext+" enclosingRule="+s.rule);
newSemanticContext =
SemanticContext.and(semanticContext, labelContext);
}
}
closure(t.target, altNum, context, newSemanticContext, collectPredicates, configs);
}
else if ( t.isEpsilon() ) {
closure(t.target, altNum, context, semanticContext, collectPredicates, configs);
}
}
}
@ -407,4 +512,60 @@ public class NFAToApproxDFAConverter {
"reachableLabels="+reachableLabels.toString());
*/
}
/** for each NFA config in d, look for "predicate required" sign we set
* during nondeterminism resolution.
*
* Add the predicate edges sorted by the alternative number; I'm fairly
* sure that I could walk the configs backwards so they are added to
* the predDFATarget in the right order, but it's best to make sure.
* Predicates succeed in the order they are specifed. Alt i wins
* over alt i+1 if both predicates are true.
*/
protected void addPredicateTransitions(DFAState d) {
List<NFAConfig> configsWithPreds = new ArrayList<NFAConfig>();
// get a list of all configs with predicates
for (NFAConfig c : d.nfaConfigs) {
if ( c.resolvedWithPredicate) {
configsWithPreds.add(c);
}
}
// Sort ascending according to alt; alt i has higher precedence than i+1
Collections.sort(configsWithPreds,
new Comparator<NFAConfig>() {
public int compare(NFAConfig a, NFAConfig b) {
if ( a.alt < b.alt ) return -1;
else if ( a.alt > b.alt ) return 1;
return 0;
}
});
List<NFAConfig> predConfigsSortedByAlt = configsWithPreds;
// Now, we can add edges emanating from d for these preds in right order
for (NFAConfig c : predConfigsSortedByAlt) {
DFAState predDFATarget = dfa.altToAcceptState[c.alt];
if ( predDFATarget==null ) {
predDFATarget = dfa.newState(); // create if not there.
// new DFA state is a target of the predicate from d
predDFATarget.addNFAConfig(c.state,
c.alt,
c.context,
c.semanticContext);
predDFATarget.isAcceptState = true;
dfa.defineAcceptState(c.alt, predDFATarget);
// v3 checked if already there, but new state is an accept
// state and therefore can't be there yet; we just checked above
// DFAState existingState = dfa.addState(predDFATarget);
// if ( predDFATarget != existingState ) {
// // already there...use/return the existing DFA state that
// // is a target of this predicate. Make this state number
// // point at the existing state
// dfa.setState(predDFATarget.stateNumber, existingState);
// predDFATarget = existingState;
// }
}
// add a transition to pred target from d
d.addTransition(new PredicateEdge(c.semanticContext, predDFATarget));
}
}
}

View File

@ -1,11 +1,9 @@
package org.antlr.v4.automata;
import org.antlr.v4.misc.Utils;
import org.antlr.v4.analysis.StackLimitedNFAToDFAConverter;
import org.antlr.v4.tool.Grammar;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/** A DFA (converted from a grammar's NFA).
@ -44,17 +42,6 @@ public class DFA {
*/
//protected List<DFAState> states = new ArrayList<DFAState>();
/** Each alt in an NFA derived from a grammar must have a DFA state that
* predicts it lest the parser not know what to do. Nondeterminisms can
* lead to this situation (assuming no semantic predicates can resolve
* the problem) and when for some reason, I cannot compute the lookahead
* (which might arise from an error in the algorithm or from
* left-recursion etc...). This list starts out with all alts contained
* and then in method doesStateReachAcceptState() I remove the alts I
* know to be uniquely predicted.
*/
public List<Integer> unreachableAlts;
public int nAlts = 0;
/** We only want one accept state per predicted alt; track here */
@ -63,15 +50,13 @@ public class DFA {
/** Unique state numbers per DFA */
int stateCounter = 0;
public StackLimitedNFAToDFAConverter converter;
public DFA(Grammar g, DecisionState startState) {
this.g = g;
this.decisionNFAStartState = startState;
nAlts = startState.getNumberOfTransitions();
decision = startState.decision;
unreachableAlts = new ArrayList<Integer>();
for (int i = 1; i <= nAlts; i++) {
unreachableAlts.add(Utils.integer(i));
}
altToAcceptState = new DFAState[nAlts+1];
}
@ -82,6 +67,7 @@ public class DFA {
}
public void defineAcceptState(int alt, DFAState acceptState) {
if ( uniqueStates.get(acceptState)==null ) addState(acceptState);
altToAcceptState[alt] = acceptState;
}
@ -92,6 +78,17 @@ public class DFA {
return n;
}
public boolean isDeterministic() {
if ( converter.danglingStates.size()==0 &&
converter.nondeterministicStates.size()==0 &&
converter.unreachableAlts.size()==0 )
{
return true;
}
// ...
return false;
}
public String toString() {
if ( startState==null ) return "";
DFASerializer serializer = new DFASerializer(g, startState);

View File

@ -3,8 +3,8 @@ package org.antlr.v4.automata;
import org.antlr.v4.analysis.NFAConfig;
import org.antlr.v4.analysis.NFAContext;
import org.antlr.v4.analysis.Resolver;
import org.antlr.v4.analysis.SemanticContext;
import org.antlr.v4.misc.OrderedHashSet;
import org.antlr.v4.misc.Utils;
import java.util.ArrayList;
import java.util.HashSet;
@ -57,6 +57,15 @@ public class DFAState {
public OrderedHashSet<NFAConfig> nfaConfigs =
new OrderedHashSet<NFAConfig>();
/** Rather than recheck every NFA configuration in a DFA state (after
* resolving) in reach just check this boolean. Saves a linear walk
* perhaps DFA state creation. Every little bit helps.
*
* This indicates that at least 2 alts were resolved, but not necessarily
* all alts in DFA state configs.
*/
public boolean resolvedWithPredicates = false;
//int cachedUniquelyPredicatedAlt = NFA.INVALID_ALT_NUMBER;
public DFAState(DFA dfa) { this.dfa = dfa; }
@ -68,9 +77,10 @@ public class DFAState {
public NFAConfig addNFAConfig(NFAState state,
int alt,
NFAContext context)
NFAContext context,
SemanticContext semanticContext)
{
NFAConfig c = new NFAConfig(state, alt, context);
NFAConfig c = new NFAConfig(state, alt, context, semanticContext);
addNFAConfig(state, c);
return c;
}
@ -93,7 +103,7 @@ public class DFAState {
public Set<Integer> getAltSet() {
Set<Integer> alts = new HashSet<Integer>();
for (NFAConfig c : nfaConfigs) {
alts.add(Utils.integer(c.alt));
alts.add(c.alt);
}
if ( alts.size()==0 ) return null;
return alts;

View File

@ -8,8 +8,12 @@ public class Edge {
public IntervalSet label;
public DFAState target;
public Edge(DFAState target, IntervalSet label) {
public Edge(DFAState target) {
this.target = target;
}
public Edge(DFAState target, IntervalSet label) {
this(target);
this.label = label;
}

View File

@ -0,0 +1,16 @@
package org.antlr.v4.automata;
import org.antlr.v4.analysis.SemanticContext;
import org.antlr.v4.tool.Grammar;
public class PredicateEdge extends Edge {
SemanticContext semanticContext;
public PredicateEdge(SemanticContext semanticContext, DFAState target) {
super(target);
this.semanticContext = semanticContext;
}
public String toString(Grammar g) {
return semanticContext.toString();
}
}

View File

@ -10,7 +10,7 @@ import org.antlr.v4.tool.GrammarAST;
* multiple NFA configurations into a single DFA state.
*/
public class PredicateTransition extends Transition {
protected SemanticContext semanticContext;
public SemanticContext semanticContext;
public PredicateTransition(GrammarAST predicateASTNode, NFAState target) {
super(target);

View File

@ -62,7 +62,7 @@ public class DOTGenerator {
dot = stlib.getInstanceOf("dfa");
dot.add("startState", startState.stateNumber);
dot.add("useBox", Tool.internalOption_ShowNFAConfigsInDFA);
walkCreatingDFADOT(dot, (DFAState)startState);
walkCreatingDFADOT(dot,startState);
dot.add("rankdir", rankdir);
return dot.render();
}
@ -99,6 +99,13 @@ public class DOTGenerator {
" edge from s"+s.stateNumber+" ["+i+"] of "+s.getNumberOfTransitions());
*/
st = stlib.getInstanceOf("edge");
// SemanticContext preds = s.getGatedPredicatesInNFAConfigurations();
// if ( preds!=null ) {
// String predsStr = "";
// predsStr = "&&{"+preds.toString()+"}?";
// label += predsStr;
// }
st.add("label", getEdgeLabel(edge.toString(grammar)));
st.add("src", getStateLabel(s));
st.add("target", getStateLabel(edge.target));
@ -182,6 +189,9 @@ public class DOTGenerator {
if ( edge instanceof ActionTransition ) {
edgeST = stlib.getInstanceOf("action-edge");
}
else if ( edge instanceof PredicateTransition ) {
edgeST = stlib.getInstanceOf("edge");
}
else if ( edge.isEpsilon() ) {
edgeST = stlib.getInstanceOf("epsilon-edge");
}