forked from jasder/antlr
shelve to look back at sam's. added optimizeConfigs() stuff.
This commit is contained in:
parent
00c4f98134
commit
1966379265
|
@ -156,16 +156,15 @@ public class ATNConfigSet implements Set<ATNConfig> {
|
|||
configToContext.remove(new Key(c));
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
StringBuilder buf = new StringBuilder();
|
||||
buf.append(elements().toString());
|
||||
// buf.append(super.toString());
|
||||
if ( hasSemanticContext ) buf.append(",hasSemanticContext=").append(hasSemanticContext);
|
||||
if ( uniqueAlt!=ATN.INVALID_ALT_NUMBER ) buf.append(",uniqueAlt=").append(uniqueAlt);
|
||||
if ( conflictingAlts!=null ) buf.append(",conflictingAlts=").append(conflictingAlts);
|
||||
if ( dipsIntoOuterContext ) buf.append(",dipsIntoOuterContext");
|
||||
return buf.toString();
|
||||
public void optimizeConfigs(ATNSimulator interpreter) {
|
||||
if (configs.isEmpty()) {
|
||||
return;
|
||||
}
|
||||
|
||||
for (int i = 0; i < configs.size(); i++) {
|
||||
ATNConfig config = configs.get(i);
|
||||
config.context = interpreter.getCachedContext(config.context);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -218,6 +217,18 @@ public class ATNConfigSet implements Set<ATNConfig> {
|
|||
configToContext.clear();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
StringBuilder buf = new StringBuilder();
|
||||
buf.append(elements().toString());
|
||||
// buf.append(super.toString());
|
||||
if ( hasSemanticContext ) buf.append(",hasSemanticContext=").append(hasSemanticContext);
|
||||
if ( uniqueAlt!=ATN.INVALID_ALT_NUMBER ) buf.append(",uniqueAlt=").append(uniqueAlt);
|
||||
if ( conflictingAlts!=null ) buf.append(",conflictingAlts=").append(conflictingAlts);
|
||||
if ( dipsIntoOuterContext ) buf.append(",dipsIntoOuterContext");
|
||||
return buf.toString();
|
||||
}
|
||||
|
||||
// satisfy interface
|
||||
|
||||
@Override
|
||||
|
|
|
@ -34,7 +34,10 @@ import org.antlr.v4.runtime.misc.IntervalSet;
|
|||
import org.antlr.v4.runtime.misc.NotNull;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.IdentityHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
public abstract class ATNSimulator {
|
||||
/** Must distinguish between missing edge and edge we know leads nowhere */
|
||||
|
@ -43,6 +46,16 @@ public abstract class ATNSimulator {
|
|||
@NotNull
|
||||
public final ATN atn;
|
||||
|
||||
/** The context cache maps all PredictionContext objects that are equals()
|
||||
* to a single cached copy. This cache be shared across all contexts
|
||||
* in all ATNConfigs in all DFA states. We rebuild each ATNConfigSet
|
||||
* to use only cached nodes/graphs in addDFAState(). We don't want to
|
||||
* fill this during closure() since there are lots of contexts that
|
||||
* pop up but are not used ever again. It also greatly slows down closure().
|
||||
*/
|
||||
protected final Map<PredictionContext, PredictionContext> contextCache =
|
||||
new HashMap<PredictionContext, PredictionContext>();
|
||||
|
||||
static {
|
||||
ERROR = new DFAState(new ATNConfigSet());
|
||||
ERROR.stateNumber = Integer.MAX_VALUE;
|
||||
|
@ -54,6 +67,14 @@ public abstract class ATNSimulator {
|
|||
|
||||
public abstract void reset();
|
||||
|
||||
public PredictionContext getCachedContext(PredictionContext context) {
|
||||
IdentityHashMap<PredictionContext, PredictionContext> visited =
|
||||
new IdentityHashMap<PredictionContext, PredictionContext>();
|
||||
return PredictionContext.getCachedContext(context,
|
||||
contextCache,
|
||||
visited);
|
||||
}
|
||||
|
||||
public static ATN deserialize(@NotNull char[] data) {
|
||||
ATN atn = new ATN();
|
||||
List<IntervalSet> sets = new ArrayList<IntervalSet>();
|
||||
|
|
|
@ -1439,23 +1439,13 @@ public class ParserATNSimulator<Symbol extends Token> extends ATNSimulator {
|
|||
DFAState newState = proposed;
|
||||
|
||||
newState.stateNumber = dfa.states.size();
|
||||
configs.optimizeConfigs(this);
|
||||
newState.configset = new ATNConfigSet(configs);
|
||||
dfa.states.put(newState, newState);
|
||||
if ( debug ) System.out.println("adding new DFA state: "+newState);
|
||||
return newState;
|
||||
}
|
||||
|
||||
// public void reportConflict(int startIndex, int stopIndex,
|
||||
// @NotNull IntervalSet alts,
|
||||
// @NotNull ATNConfigSet configs)
|
||||
// {
|
||||
// if ( debug || retry_debug ) {
|
||||
// System.out.println("reportConflict "+alts+":"+configs+
|
||||
// ", input="+parser.getInputString(startIndex, stopIndex));
|
||||
// }
|
||||
// if ( parser!=null ) parser.getErrorHandler().reportConflict(parser, startIndex, stopIndex, alts, configs);
|
||||
// }
|
||||
|
||||
public void reportAttemptingFullContext(DFA dfa, ATNConfigSet configs, int startIndex, int stopIndex) {
|
||||
if ( debug || retry_debug ) {
|
||||
Interval interval = Interval.of(startIndex, stopIndex);
|
||||
|
|
|
@ -2,6 +2,7 @@ package org.antlr.v4.runtime.atn;
|
|||
|
||||
import org.antlr.v4.runtime.Recognizer;
|
||||
import org.antlr.v4.runtime.RuleContext;
|
||||
import org.antlr.v4.runtime.misc.NotNull;
|
||||
import org.antlr.v4.runtime.misc.Nullable;
|
||||
|
||||
import java.util.ArrayDeque;
|
||||
|
@ -479,6 +480,69 @@ public abstract class PredictionContext implements Iterable<SingletonPredictionC
|
|||
return buf.toString();
|
||||
}
|
||||
|
||||
// From Sam
|
||||
public static PredictionContext getCachedContext(
|
||||
@NotNull PredictionContext context,
|
||||
@NotNull Map<PredictionContext, PredictionContext> contextCache,
|
||||
@NotNull IdentityHashMap<PredictionContext, PredictionContext> visited) {
|
||||
if (context.isEmpty()) {
|
||||
return context;
|
||||
}
|
||||
|
||||
PredictionContext existing = visited.get(context);
|
||||
if (existing != null) {
|
||||
return existing;
|
||||
}
|
||||
|
||||
existing = contextCache.get(context);
|
||||
if (existing != null) {
|
||||
visited.put(context, existing);
|
||||
return existing;
|
||||
}
|
||||
|
||||
boolean changed = false;
|
||||
PredictionContext[] parents = new PredictionContext[context.size()];
|
||||
for (int i = 0; i < parents.length; i++) {
|
||||
PredictionContext parent = getCachedContext(context.getParent(i), contextCache, visited);
|
||||
if (changed || parent != context.getParent(i)) {
|
||||
if (!changed) {
|
||||
parents = new PredictionContext[context.size()];
|
||||
for (int j = 0; j < context.size(); j++) {
|
||||
parents[j] = context.getParent(j);
|
||||
}
|
||||
|
||||
changed = true;
|
||||
}
|
||||
|
||||
parents[i] = parent;
|
||||
}
|
||||
}
|
||||
|
||||
if (!changed) {
|
||||
contextCache.put(context, context);
|
||||
visited.put(context, context);
|
||||
return context;
|
||||
}
|
||||
|
||||
PredictionContext updated;
|
||||
if (parents.length == 0) {
|
||||
updated = isEmptyLocal(context) ? EMPTY_LOCAL : EMPTY_FULL;
|
||||
}
|
||||
else if (parents.length == 1) {
|
||||
updated = new SingletonPredictionContext(parents[0], context.getInvokingState(0));
|
||||
}
|
||||
else {
|
||||
ArrayPredictionContext arrayPredictionContext = (ArrayPredictionContext)context;
|
||||
updated = new ArrayPredictionContext(parents, arrayPredictionContext.invokingStates);
|
||||
}
|
||||
|
||||
contextCache.put(updated, updated);
|
||||
visited.put(updated, updated);
|
||||
visited.put(context, updated);
|
||||
|
||||
return updated;
|
||||
}
|
||||
|
||||
// extra structures, but cut/paste/morphed works, so leave it.
|
||||
// seems to do a breadth-first walk
|
||||
public static List<PredictionContext> getAllNodes(PredictionContext context) {
|
||||
|
|
Loading…
Reference in New Issue