forked from jasder/antlr
got integrated graph stacks to compile. commit so i can look at sam's
This commit is contained in:
parent
3a79aa15ba
commit
7233177441
|
@ -63,8 +63,6 @@ import java.util.List;
|
|||
* satisfy the superclass interface.
|
||||
*/
|
||||
public class ParserRuleContext<Symbol extends Token> extends RuleContext {
|
||||
public static final ParserRuleContext<Token> EMPTY = new ParserRuleContext<Token>();
|
||||
|
||||
/** If we are debugging or building a parse tree for a visitor,
|
||||
* we need to track all of the tokens and rule invocations associated
|
||||
* with this rule's context. This is empty for normal parsing
|
||||
|
|
|
@ -59,6 +59,8 @@ import java.io.IOException;
|
|||
* @see ParserRuleContext
|
||||
*/
|
||||
public class RuleContext implements ParseTree.RuleNode {
|
||||
public static final ParserRuleContext<Token> EMPTY = new ParserRuleContext<Token>();
|
||||
|
||||
/** What context invoked this rule? */
|
||||
public RuleContext parent;
|
||||
|
||||
|
|
|
@ -30,7 +30,6 @@
|
|||
package org.antlr.v4.runtime.atn;
|
||||
|
||||
import org.antlr.v4.runtime.Recognizer;
|
||||
import org.antlr.v4.runtime.RuleContext;
|
||||
import org.antlr.v4.runtime.misc.NotNull;
|
||||
import org.antlr.v4.runtime.misc.Nullable;
|
||||
|
||||
|
@ -55,7 +54,7 @@ public class ATNConfig {
|
|||
* execution of the ATN simulator.
|
||||
*/
|
||||
@Nullable
|
||||
public RuleContext context;
|
||||
public PredictionContext context;
|
||||
|
||||
/**
|
||||
* We cannot execute predicates dependent upon local context unless
|
||||
|
@ -78,14 +77,14 @@ public class ATNConfig {
|
|||
|
||||
public ATNConfig(@NotNull ATNState state,
|
||||
int alt,
|
||||
@Nullable RuleContext context)
|
||||
@Nullable PredictionContext context)
|
||||
{
|
||||
this(state, alt, context, SemanticContext.NONE);
|
||||
}
|
||||
|
||||
public ATNConfig(@NotNull ATNState state,
|
||||
int alt,
|
||||
@Nullable RuleContext context,
|
||||
@Nullable PredictionContext context,
|
||||
@NotNull SemanticContext semanticContext)
|
||||
{
|
||||
this.state = state;
|
||||
|
@ -102,11 +101,11 @@ public class ATNConfig {
|
|||
this(c, state, c.context, semanticContext);
|
||||
}
|
||||
|
||||
public ATNConfig(@NotNull ATNConfig c, @NotNull ATNState state, @Nullable RuleContext context) {
|
||||
public ATNConfig(@NotNull ATNConfig c, @NotNull ATNState state, @Nullable PredictionContext context) {
|
||||
this(c, state, context, c.semanticContext);
|
||||
}
|
||||
|
||||
public ATNConfig(@NotNull ATNConfig c, @NotNull ATNState state, @Nullable RuleContext context,
|
||||
public ATNConfig(@NotNull ATNConfig c, @NotNull ATNState state, @Nullable PredictionContext context,
|
||||
@NotNull SemanticContext semanticContext)
|
||||
{
|
||||
this.state = state;
|
||||
|
|
|
@ -1,34 +1,55 @@
|
|||
package org.antlr.v4.runtime.atn;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Iterator;
|
||||
|
||||
public class ArrayPredictionContext extends PredictionContext {
|
||||
public final PredictionContext[] parents;
|
||||
// sorted for merge sort, no duplicates
|
||||
public final String[] payloads;
|
||||
public final int[] invokingStates;
|
||||
|
||||
public ArrayPredictionContext(SingletonPredictionContext a) {
|
||||
this.parents = new PredictionContext[] {a.parent};
|
||||
this.payloads = new String[] {a.payload};
|
||||
this.invokingStates = new int[] {a.invokingState};
|
||||
}
|
||||
|
||||
public ArrayPredictionContext(PredictionContext[] parents, String[] payloads) {
|
||||
public ArrayPredictionContext(PredictionContext[] parents, int[] invokingStates) {
|
||||
this.parents = parents;
|
||||
this.payloads = payloads;
|
||||
this.invokingStates = invokingStates;
|
||||
}
|
||||
|
||||
public ArrayPredictionContext(SingletonPredictionContext... nodes) {
|
||||
parents = new PredictionContext[nodes.length];
|
||||
payloads = new String[nodes.length];
|
||||
invokingStates = new int[nodes.length];
|
||||
for (int i=0; i<nodes.length; i++) {
|
||||
parents[i] = nodes[i].parent;
|
||||
payloads[i] = nodes[i].payload;
|
||||
invokingStates[i] = nodes[i].invokingState;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterator<SingletonPredictionContext> iterator() {
|
||||
return new Iterator<SingletonPredictionContext>() {
|
||||
int i = 0;
|
||||
@Override
|
||||
public boolean hasNext() { return i < parents.length; }
|
||||
|
||||
@Override
|
||||
public SingletonPredictionContext next() {
|
||||
SingletonPredictionContext ctx =
|
||||
new SingletonPredictionContext(parents[i], invokingStates[i]);
|
||||
i++;
|
||||
return ctx;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void remove() { throw new UnsupportedOperationException(); }
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
public int size() {
|
||||
return payloads.length;
|
||||
return invokingStates.length;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -37,13 +58,13 @@ public class ArrayPredictionContext extends PredictionContext {
|
|||
}
|
||||
|
||||
@Override
|
||||
public String getPayload(int index) {
|
||||
return payloads[index];
|
||||
public int getInvokingState(int index) {
|
||||
return invokingStates[index];
|
||||
}
|
||||
|
||||
@Override
|
||||
public int findPayload(String payload) {
|
||||
return Arrays.binarySearch(payloads, payload);
|
||||
public int findInvokingState(int invokingState) {
|
||||
return Arrays.binarySearch(invokingStates, invokingState);
|
||||
}
|
||||
|
||||
public ArrayPredictionContext trim() {
|
||||
|
@ -54,7 +75,7 @@ public class ArrayPredictionContext extends PredictionContext {
|
|||
int n = i+1;
|
||||
return new ArrayPredictionContext(
|
||||
Arrays.copyOf(parents, n),
|
||||
Arrays.copyOf(payloads, n)
|
||||
Arrays.copyOf(invokingStates, n)
|
||||
);
|
||||
}
|
||||
return this;
|
||||
|
@ -74,12 +95,12 @@ public class ArrayPredictionContext extends PredictionContext {
|
|||
}
|
||||
|
||||
ArrayPredictionContext a = (ArrayPredictionContext)o;
|
||||
if ( payloads.length != a.payloads.length ) {
|
||||
if ( invokingStates.length != a.invokingStates.length ) {
|
||||
return false;
|
||||
}
|
||||
|
||||
for (int i=0; i<payloads.length; i++) {
|
||||
if ( !payloads[i].equals(a.payloads[i]) ) return false;
|
||||
for (int i=0; i< invokingStates.length; i++) {
|
||||
if ( invokingStates[i]!=a.invokingStates[i] ) return false;
|
||||
if ( !parents[i].equals(a.parents[i]) ) return false;
|
||||
}
|
||||
return true;
|
||||
|
@ -92,6 +113,6 @@ public class ArrayPredictionContext extends PredictionContext {
|
|||
|
||||
@Override
|
||||
public String toString() {
|
||||
return Arrays.toString(payloads)+":"+id;
|
||||
return Arrays.toString(invokingStates)+":"+id;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,11 +2,13 @@ package org.antlr.v4.runtime.atn;
|
|||
|
||||
public class EmptyPredictionContext extends SingletonPredictionContext {
|
||||
public EmptyPredictionContext() {
|
||||
super(null,"$");
|
||||
super(null,-1);
|
||||
}
|
||||
|
||||
public boolean isEmpty() { return true; }
|
||||
|
||||
@Override
|
||||
public int findPayload(String payload) {
|
||||
public int findInvokingState(int invokingState) {
|
||||
return 1;
|
||||
}
|
||||
|
||||
|
@ -21,7 +23,12 @@ public class EmptyPredictionContext extends SingletonPredictionContext {
|
|||
}
|
||||
|
||||
@Override
|
||||
public String getPayload(int index) {
|
||||
return payload;
|
||||
public int getInvokingState(int index) {
|
||||
return invokingState;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
return this == o;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -29,7 +29,6 @@
|
|||
|
||||
package org.antlr.v4.runtime.atn;
|
||||
|
||||
import org.antlr.v4.runtime.ParserRuleContext;
|
||||
import org.antlr.v4.runtime.RuleContext;
|
||||
import org.antlr.v4.runtime.Token;
|
||||
import org.antlr.v4.runtime.misc.IntervalSet;
|
||||
|
@ -63,7 +62,7 @@ public class LL1Analyzer {
|
|||
Set<ATNConfig> lookBusy = new HashSet<ATNConfig>();
|
||||
boolean seeThruPreds = false; // fail to get lookahead upon pred
|
||||
_LOOK(s.transition(alt - 1).target,
|
||||
ParserRuleContext.EMPTY,
|
||||
PredictionContext.EMPTY,
|
||||
look[alt], lookBusy, seeThruPreds);
|
||||
if ( look[alt].size()==0 ) look[alt] = null;
|
||||
}
|
||||
|
@ -77,18 +76,19 @@ public class LL1Analyzer {
|
|||
public IntervalSet LOOK(@NotNull ATNState s, @Nullable RuleContext ctx) {
|
||||
IntervalSet r = new IntervalSet();
|
||||
boolean seeThruPreds = true; // ignore preds; get all lookahead
|
||||
_LOOK(s, ctx, r, new HashSet<ATNConfig>(), seeThruPreds);
|
||||
_LOOK(s, PredictionContext.fromRuleContext(ctx),
|
||||
r, new HashSet<ATNConfig>(), seeThruPreds);
|
||||
return r;
|
||||
}
|
||||
|
||||
/** Computer set of tokens that can come next. If the context is EMPTY,
|
||||
/** Compute set of tokens that can come next. If the context is EMPTY,
|
||||
* then we don't go anywhere when we hit the end of the rule. We have
|
||||
* the correct set. If the context is null, that means that we did not want
|
||||
* any tokens following this rule--just the tokens that could be found within this
|
||||
* rule. Add EPSILON to the set indicating we reached the end of the ruled out having
|
||||
* to match a token.
|
||||
*/
|
||||
protected void _LOOK(@NotNull ATNState s, @Nullable RuleContext ctx,
|
||||
protected void _LOOK(@NotNull ATNState s, @Nullable PredictionContext ctx,
|
||||
@NotNull IntervalSet look,
|
||||
@NotNull Set<ATNConfig> lookBusy,
|
||||
boolean seeThruPreds)
|
||||
|
@ -102,22 +102,25 @@ public class LL1Analyzer {
|
|||
look.add(Token.EPSILON);
|
||||
return;
|
||||
}
|
||||
if ( ctx.invokingState!=-1 ) {
|
||||
ATNState invokingState = atn.states.get(ctx.invokingState);
|
||||
RuleTransition rt = (RuleTransition)invokingState.transition(0);
|
||||
ATNState retState = rt.followState;
|
||||
// System.out.println("popping back to "+retState);
|
||||
_LOOK(retState, ctx.parent, look, lookBusy, seeThruPreds);
|
||||
return;
|
||||
}
|
||||
if ( ctx != PredictionContext.EMPTY ) {
|
||||
// run thru all possible stack tops in ctx
|
||||
for (SingletonPredictionContext p : ctx) {
|
||||
ATNState invokingState = atn.states.get(p.invokingState);
|
||||
RuleTransition rt = (RuleTransition)invokingState.transition(0);
|
||||
ATNState retState = rt.followState;
|
||||
// System.out.println("popping back to "+retState);
|
||||
_LOOK(retState, p.parent, look, lookBusy, seeThruPreds);
|
||||
}
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
int n = s.getNumberOfTransitions();
|
||||
for (int i=0; i<n; i++) {
|
||||
Transition t = s.transition(i);
|
||||
if ( t.getClass() == RuleTransition.class ) {
|
||||
RuleContext newContext =
|
||||
new RuleContext(ctx, s.stateNumber);
|
||||
PredictionContext newContext =
|
||||
new SingletonPredictionContext(ctx, s.stateNumber);
|
||||
_LOOK(t.target, newContext, look, lookBusy, seeThruPreds);
|
||||
}
|
||||
else if ( t.isEpsilon() && seeThruPreds ) {
|
||||
|
|
|
@ -29,7 +29,11 @@
|
|||
|
||||
package org.antlr.v4.runtime.atn;
|
||||
|
||||
import org.antlr.v4.runtime.*;
|
||||
import org.antlr.v4.runtime.CharStream;
|
||||
import org.antlr.v4.runtime.IntStream;
|
||||
import org.antlr.v4.runtime.Lexer;
|
||||
import org.antlr.v4.runtime.LexerNoViableAltException;
|
||||
import org.antlr.v4.runtime.Token;
|
||||
import org.antlr.v4.runtime.dfa.DFA;
|
||||
import org.antlr.v4.runtime.dfa.DFAState;
|
||||
import org.antlr.v4.runtime.misc.NotNull;
|
||||
|
@ -40,8 +44,6 @@ import java.io.OutputStream;
|
|||
|
||||
/** "dup" of ParserInterpreter */
|
||||
public class LexerATNSimulator extends ATNSimulator {
|
||||
public static final RuleContext EMPTY_LEXER_RULE_CONTEXT = new RuleContext();
|
||||
|
||||
public static boolean debug = false;
|
||||
public static boolean dfa_debug = false;
|
||||
public static final int MAX_DFA_EDGE = 127; // forces unicode to stay in ATN
|
||||
|
@ -491,7 +493,7 @@ public class LexerATNSimulator extends ATNSimulator {
|
|||
protected ATNConfigSet computeStartState(@NotNull IntStream input,
|
||||
@NotNull ATNState p)
|
||||
{
|
||||
RuleContext initialContext = EMPTY_LEXER_RULE_CONTEXT;
|
||||
PredictionContext initialContext = PredictionContext.EMPTY;
|
||||
ATNConfigSet configs = new ATNConfigSet();
|
||||
for (int i=0; i<p.getNumberOfTransitions(); i++) {
|
||||
ATNState target = p.transition(i).target;
|
||||
|
@ -522,12 +524,15 @@ public class LexerATNSimulator extends ATNSimulator {
|
|||
configs.add(config);
|
||||
return;
|
||||
}
|
||||
RuleContext newContext = config.context.parent; // "pop" invoking state
|
||||
ATNState invokingState = atn.states.get(config.context.invokingState);
|
||||
RuleTransition rt = (RuleTransition)invokingState.transition(0);
|
||||
ATNState retState = rt.followState;
|
||||
ATNConfig c = new ATNConfig(retState, config.alt, newContext);
|
||||
closure(c, configs);
|
||||
// run thru all possible stack tops in ctx
|
||||
for (SingletonPredictionContext ctx : config.context) {
|
||||
PredictionContext newContext = ctx.parent; // "pop" invoking state
|
||||
ATNState invokingState = atn.states.get(ctx.invokingState);
|
||||
RuleTransition rt = (RuleTransition)invokingState.transition(0);
|
||||
ATNState retState = rt.followState;
|
||||
ATNConfig c = new ATNConfig(retState, config.alt, newContext);
|
||||
closure(c, configs);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -549,8 +554,8 @@ public class LexerATNSimulator extends ATNSimulator {
|
|||
ATNState p = config.state;
|
||||
ATNConfig c = null;
|
||||
if ( t.getClass() == RuleTransition.class ) {
|
||||
RuleContext newContext =
|
||||
new RuleContext(config.context, p.stateNumber);
|
||||
PredictionContext newContext =
|
||||
new SingletonPredictionContext(config.context, p.stateNumber);
|
||||
c = new ATNConfig(config, t.target, newContext);
|
||||
}
|
||||
else if ( t.getClass() == PredicateTransition.class ) {
|
||||
|
|
|
@ -321,7 +321,12 @@ public class ParserATNSimulator<Symbol extends Token> extends ATNSimulator {
|
|||
DecisionState decState = atn.getDecisionState(dfa.decision);
|
||||
boolean greedy = decState.isGreedy;
|
||||
boolean loopsSimulateTailRecursion = false;
|
||||
ATNConfigSet s0_closure = computeStartState(dfa.atnStartState, ParserRuleContext.EMPTY, greedy, loopsSimulateTailRecursion);
|
||||
boolean fullCtx = false;
|
||||
ATNConfigSet s0_closure =
|
||||
computeStartState(dfa.atnStartState,
|
||||
ParserRuleContext.EMPTY,
|
||||
greedy, loopsSimulateTailRecursion,
|
||||
fullCtx);
|
||||
dfa.s0 = addDFAState(dfa, s0_closure);
|
||||
|
||||
int alt = 0;
|
||||
|
@ -364,7 +369,11 @@ public class ParserATNSimulator<Symbol extends Token> extends ATNSimulator {
|
|||
if ( s.isCtxSensitive ) {
|
||||
if ( dfa_debug ) System.out.println("ctx sensitive state "+outerContext+" in "+s);
|
||||
boolean loopsSimulateTailRecursion = true;
|
||||
ATNConfigSet s0_closure = computeStartState(dfa.atnStartState, outerContext, greedy, loopsSimulateTailRecursion);
|
||||
boolean fullCtx = false;
|
||||
ATNConfigSet s0_closure =
|
||||
computeStartState(dfa.atnStartState, outerContext,
|
||||
greedy, loopsSimulateTailRecursion,
|
||||
fullCtx);
|
||||
ATNConfigSet fullCtxSet =
|
||||
execATNWithFullContext(dfa, s, s0_closure,
|
||||
input, startIndex,
|
||||
|
@ -546,7 +555,12 @@ public class ParserATNSimulator<Symbol extends Token> extends ATNSimulator {
|
|||
else {
|
||||
if ( debug ) System.out.println("RETRY with outerContext="+outerContext);
|
||||
loopsSimulateTailRecursion = true;
|
||||
ATNConfigSet s0_closure = computeStartState(dfa.atnStartState, outerContext, greedy, loopsSimulateTailRecursion);
|
||||
ATNConfigSet s0_closure =
|
||||
computeStartState(dfa.atnStartState,
|
||||
outerContext,
|
||||
greedy,
|
||||
loopsSimulateTailRecursion,
|
||||
fullCtx);
|
||||
fullCtxSet = execATNWithFullContext(dfa, D, s0_closure,
|
||||
input, startIndex,
|
||||
outerContext,
|
||||
|
@ -718,9 +732,12 @@ public class ParserATNSimulator<Symbol extends Token> extends ATNSimulator {
|
|||
@NotNull
|
||||
public ATNConfigSet computeStartState(@NotNull ATNState p,
|
||||
@Nullable RuleContext ctx,
|
||||
boolean greedy, boolean loopsSimulateTailRecursion)
|
||||
boolean greedy,
|
||||
boolean loopsSimulateTailRecursion,
|
||||
boolean fullCtx)
|
||||
{
|
||||
RuleContext initialContext = ctx; // always at least the implicit call to start rule
|
||||
// always at least the implicit call to start rule
|
||||
PredictionContext initialContext = PredictionContext.fromRuleContext(ctx);
|
||||
ATNConfigSet configs = new ATNConfigSet();
|
||||
|
||||
for (int i=0; i<p.getNumberOfTransitions(); i++) {
|
||||
|
@ -932,17 +949,20 @@ public class ParserATNSimulator<Symbol extends Token> extends ATNSimulator {
|
|||
}
|
||||
// We hit rule end. If we have context info, use it
|
||||
if ( config.context!=null && !config.context.isEmpty() ) {
|
||||
RuleContext newContext = config.context.parent; // "pop" invoking state
|
||||
ATNState invokingState = atn.states.get(config.context.invokingState);
|
||||
RuleTransition rt = (RuleTransition)invokingState.transition(0);
|
||||
ATNState retState = rt.followState;
|
||||
ATNConfig c = new ATNConfig(retState, config.alt, newContext, config.semanticContext);
|
||||
// While we have context to pop back from, we may have
|
||||
// gotten that context AFTER having falling off a rule.
|
||||
// Make sure we track that we are now out of context.
|
||||
c.reachesIntoOuterContext = config.reachesIntoOuterContext;
|
||||
assert depth > Integer.MIN_VALUE;
|
||||
closure(c, configs, closureBusy, collectPredicates, greedy, loopsSimulateTailRecursion, depth - 1);
|
||||
// run thru all possible stack tops in ctx
|
||||
for (SingletonPredictionContext ctx : config.context) {
|
||||
PredictionContext newContext = ctx.parent; // "pop" invoking state
|
||||
ATNState invokingState = atn.states.get(ctx.invokingState);
|
||||
RuleTransition rt = (RuleTransition)invokingState.transition(0);
|
||||
ATNState retState = rt.followState;
|
||||
ATNConfig c = new ATNConfig(retState, config.alt, newContext, config.semanticContext);
|
||||
// While we have context to pop back from, we may have
|
||||
// gotten that context AFTER having falling off a rule.
|
||||
// Make sure we track that we are now out of context.
|
||||
c.reachesIntoOuterContext = config.reachesIntoOuterContext;
|
||||
assert depth > Integer.MIN_VALUE;
|
||||
closure(c, configs, closureBusy, collectPredicates, greedy, loopsSimulateTailRecursion, depth - 1);
|
||||
}
|
||||
return;
|
||||
}
|
||||
else {
|
||||
|
@ -953,18 +973,24 @@ public class ParserATNSimulator<Symbol extends Token> extends ATNSimulator {
|
|||
}
|
||||
else if ( loopsSimulateTailRecursion ) {
|
||||
if ( config.state.getClass()==StarLoopbackState.class ||
|
||||
config.state.getClass()==PlusLoopbackState.class )
|
||||
config.state.getClass()==PlusLoopbackState.class )
|
||||
{
|
||||
config.context = new RuleContext(config.context, config.state.stateNumber);
|
||||
config.context =
|
||||
new SingletonPredictionContext(config.context, config.state.stateNumber);
|
||||
// alter config; it's ok, since all calls to closure pass in a fresh config for us to chase
|
||||
if ( debug ) System.out.println("Loop back; push "+config.state.stateNumber+", stack="+config.context);
|
||||
}
|
||||
else if ( config.state.getClass()==LoopEndState.class ) {
|
||||
if ( debug ) System.out.println("Loop end; pop, stack="+config.context);
|
||||
RuleContext p = config.context;
|
||||
LoopEndState end = (LoopEndState) config.state;
|
||||
while ( !p.isEmpty() && p.invokingState == end.loopBackStateNumber ) {
|
||||
p = config.context = config.context.parent; // "pop"
|
||||
// run thru all possible stack tops in ctx
|
||||
for (SingletonPredictionContext ctx : config.context) {
|
||||
SingletonPredictionContext p = ctx;
|
||||
LoopEndState end = (LoopEndState) config.state;
|
||||
// pop all the way back until we don't see the loopback state anymore
|
||||
while ( !p.isEmpty() && p.invokingState == end.loopBackStateNumber ) {
|
||||
// TODO: BROKEN. can't figure out how to leave config.context
|
||||
// p = config.context = config.context.parent; // "pop"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1080,8 +1106,8 @@ public class ParserATNSimulator<Symbol extends Token> extends ATNSimulator {
|
|||
", ctx="+config.context);
|
||||
}
|
||||
ATNState p = config.state;
|
||||
RuleContext newContext =
|
||||
new RuleContext(config.context, p.stateNumber);
|
||||
PredictionContext newContext =
|
||||
new SingletonPredictionContext(config.context, p.stateNumber);
|
||||
return new ATNConfig(config, t.target, newContext);
|
||||
}
|
||||
|
||||
|
|
|
@ -1,15 +1,22 @@
|
|||
package org.antlr.v4.runtime.atn;
|
||||
|
||||
import org.antlr.v4.runtime.ParserRuleContext;
|
||||
import org.antlr.v4.runtime.Recognizer;
|
||||
import org.antlr.v4.runtime.RuleContext;
|
||||
import org.antlr.v4.runtime.misc.Nullable;
|
||||
|
||||
import java.util.ArrayDeque;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Deque;
|
||||
import java.util.IdentityHashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
public abstract class PredictionContext {
|
||||
public abstract class PredictionContext implements Iterable<SingletonPredictionContext> {
|
||||
public static final EmptyPredictionContext EMPTY = new EmptyPredictionContext();
|
||||
public static final int EMPTY_FULL_INVOKING_STATE = Integer.MAX_VALUE;
|
||||
|
||||
public static int globalNodeCount = 0;
|
||||
public final int id;
|
||||
|
@ -18,13 +25,151 @@ public abstract class PredictionContext {
|
|||
id = globalNodeCount++;
|
||||
}
|
||||
|
||||
public static PredictionContext fromRuleContext(RuleContext outerContext) {
|
||||
if ( outerContext==null ) outerContext = RuleContext.EMPTY;
|
||||
if ( outerContext==RuleContext.EMPTY ) {
|
||||
return PredictionContext.EMPTY;
|
||||
}
|
||||
|
||||
PredictionContext parent = EMPTY;
|
||||
if ( outerContext.parent != null ) {
|
||||
parent = PredictionContext.fromRuleContext(outerContext.parent);
|
||||
}
|
||||
|
||||
return new SingletonPredictionContext(parent, outerContext.invokingState);
|
||||
}
|
||||
|
||||
@Override
|
||||
public abstract Iterator<SingletonPredictionContext> iterator();
|
||||
|
||||
public abstract int size();
|
||||
|
||||
public abstract PredictionContext getParent(int index);
|
||||
|
||||
public abstract String getPayload(int index);
|
||||
public abstract int getInvokingState(int index);
|
||||
|
||||
public abstract int findPayload(String payload);
|
||||
public abstract int findInvokingState(int invokingState);
|
||||
|
||||
public boolean isEmpty() { return false; }
|
||||
|
||||
/** Two contexts conflict() if they are equals() or one is a stack suffix
|
||||
* of the other. For example, contexts [21 12 $] and [21 9 $] do not
|
||||
* conflict, but [21 $] and [21 12 $] do conflict. Note that I should
|
||||
* probably not show the $ in this case. There is a dummy node for each
|
||||
* stack that just means empty; $ is a marker that's all.
|
||||
*
|
||||
* This is used in relation to checking conflicts associated with a
|
||||
* single NFA state's configurations within a single DFA state.
|
||||
* If there are configurations s and t within a DFA state such that
|
||||
* s.state=t.state && s.alt != t.alt && s.ctx conflicts t.ctx then
|
||||
* the DFA state predicts more than a single alt--it's nondeterministic.
|
||||
* Two contexts conflict if they are the same or if one is a suffix
|
||||
* of the other.
|
||||
*
|
||||
* When comparing contexts, if one context has a stack and the other
|
||||
* does not then they should be considered the same context. The only
|
||||
* way for an NFA state p to have an empty context and a nonempty context
|
||||
* is the case when closure falls off end of rule without a call stack
|
||||
* and re-enters the rule with a context. This resolves the issue I
|
||||
* discussed with Sriram Srinivasan Feb 28, 2005 about not terminating
|
||||
* fast enough upon nondeterminism.
|
||||
*
|
||||
* UPDATE FOR GRAPH STACK; no suffix
|
||||
*/
|
||||
public boolean conflictsWith(PredictionContext other) {
|
||||
return this.equals(other);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return toString(null);
|
||||
}
|
||||
|
||||
public String toString(@Nullable Recognizer<?,?> recog) {
|
||||
return toString(recog, ParserRuleContext.EMPTY);
|
||||
}
|
||||
|
||||
// recog null unless ParserRuleContext, in which case we use subclass toString(...)
|
||||
public String toString(@Nullable Recognizer<?,?> recog, RuleContext stop) {
|
||||
StringBuilder buf = new StringBuilder();
|
||||
PredictionContext p = this;
|
||||
buf.append("[");
|
||||
// while ( p != null && p != stop ) {
|
||||
// if ( !p.isEmpty() ) buf.append(p.invokingState);
|
||||
// if ( p.parent != null && !p.parent.isEmpty() ) buf.append(" ");
|
||||
// p = p.parent;
|
||||
// }
|
||||
buf.append("]");
|
||||
return buf.toString();
|
||||
}
|
||||
|
||||
public String[] toStrings(Recognizer<?, ?> recognizer, int currentState) {
|
||||
return toStrings(recognizer, EMPTY, currentState);
|
||||
}
|
||||
|
||||
// FROM SAM
|
||||
public String[] toStrings(Recognizer<?, ?> recognizer, PredictionContext stop, int currentState) {
|
||||
List<String> result = new ArrayList<String>();
|
||||
|
||||
outer:
|
||||
for (int perm = 0; ; perm++) {
|
||||
int offset = 0;
|
||||
boolean last = true;
|
||||
PredictionContext p = this;
|
||||
int stateNumber = currentState;
|
||||
StringBuilder localBuffer = new StringBuilder();
|
||||
localBuffer.append("[");
|
||||
while ( !p.isEmpty() && p != stop ) {
|
||||
int index = 0;
|
||||
if (p.size() > 0) {
|
||||
int bits = 1;
|
||||
while ((1 << bits) < p.size()) {
|
||||
bits++;
|
||||
}
|
||||
|
||||
int mask = (1 << bits) - 1;
|
||||
index = (perm >> offset) & mask;
|
||||
last &= index >= p.size() - 1;
|
||||
if (index >= p.size()) {
|
||||
continue outer;
|
||||
}
|
||||
offset += bits;
|
||||
}
|
||||
|
||||
if ( recognizer!=null ) {
|
||||
if (localBuffer.length() > 1) {
|
||||
// first char is '[', if more than that this isn't the first rule
|
||||
localBuffer.append(' ');
|
||||
}
|
||||
|
||||
ATN atn = recognizer.getATN();
|
||||
ATNState s = atn.states.get(stateNumber);
|
||||
String ruleName = recognizer.getRuleNames()[s.ruleIndex];
|
||||
localBuffer.append(ruleName);
|
||||
}
|
||||
else if ( p.getInvokingState(index)!=EMPTY_FULL_INVOKING_STATE ) {
|
||||
if ( !p.isEmpty() ) {
|
||||
if (localBuffer.length() > 1) {
|
||||
// first char is '[', if more than that this isn't the first rule
|
||||
localBuffer.append(' ');
|
||||
}
|
||||
|
||||
localBuffer.append(p.getInvokingState(index));
|
||||
}
|
||||
}
|
||||
stateNumber = p.getInvokingState(index);
|
||||
p = p.getParent(index);
|
||||
}
|
||||
localBuffer.append("]");
|
||||
result.add(localBuffer.toString());
|
||||
|
||||
if (last) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return result.toArray(new String[result.size()]);
|
||||
}
|
||||
|
||||
// dispatch
|
||||
public static PredictionContext merge(PredictionContext a, PredictionContext b, boolean rootIsWildcard) {
|
||||
|
@ -47,19 +192,19 @@ public abstract class PredictionContext {
|
|||
if ( a == EMPTY ) return a;
|
||||
if ( b == EMPTY ) return b;
|
||||
}
|
||||
if ( a.payload.equals(b.payload) ) { // a == b
|
||||
if ( a.invokingState==b.invokingState ) { // a == b
|
||||
PredictionContext parent = merge(a.parent, b.parent, rootIsWildcard);
|
||||
if ( parent == a.parent ) return a;
|
||||
if ( parent == b.parent ) return b;
|
||||
// new joined parent so create new singleton pointing to it
|
||||
return new SingletonPredictionContext(parent, a.payload);
|
||||
return new SingletonPredictionContext(parent, a.invokingState);
|
||||
}
|
||||
else { // a != b payloads differ
|
||||
// parents differ, join them; nothing to reuse
|
||||
// sort payloads
|
||||
String[] payloads = {a.payload, b.payload};
|
||||
if ( a.payload.compareTo(b.payload) > 0 ) {
|
||||
payloads = new String[] {b.payload, a.payload};
|
||||
int[] payloads = {a.invokingState, b.invokingState};
|
||||
if ( a.invokingState > b.invokingState ) {
|
||||
payloads = new int[] {b.invokingState, a.invokingState};
|
||||
}
|
||||
if ( a.parent.equals(b.parent) ) {
|
||||
// parents are equal, pick left one as parent to reuse
|
||||
|
@ -84,13 +229,13 @@ public abstract class PredictionContext {
|
|||
int i = 0; // walks a
|
||||
int j = 0; // walks b
|
||||
int k = 0; // walks M target array
|
||||
String[] mergedPayloads = new String[a.payloads.length + b.payloads.length];
|
||||
PredictionContext[] mergedParents = new PredictionContext[a.payloads.length + b.payloads.length];
|
||||
int[] mergedPayloads = new int[a.invokingStates.length + b.invokingStates.length];
|
||||
PredictionContext[] mergedParents = new PredictionContext[a.invokingStates.length + b.invokingStates.length];
|
||||
ArrayPredictionContext M = new ArrayPredictionContext(mergedParents, mergedPayloads);
|
||||
while ( i<a.payloads.length && j<b.payloads.length ) {
|
||||
if ( a.payloads[i].equals(b.payloads[j]) ) {
|
||||
while ( i<a.invokingStates.length && j<b.invokingStates.length ) {
|
||||
if ( a.invokingStates[i]==b.invokingStates[j] ) {
|
||||
// same payload; stack tops are equal
|
||||
String payload = a.payloads[i];
|
||||
int payload = a.invokingStates[i];
|
||||
SingletonPredictionContext a_ = new SingletonPredictionContext(a.parents[i], payload);
|
||||
SingletonPredictionContext b_ = new SingletonPredictionContext(b.parents[j], payload);
|
||||
// if same stack tops, must yield merged singleton
|
||||
|
@ -98,43 +243,43 @@ public abstract class PredictionContext {
|
|||
// if r is same as a_ or b_, we get to keep existing, else new
|
||||
if ( r==a_ ) {
|
||||
M.parents[k] = a.parents[i];
|
||||
M.payloads[k] = a.payloads[i];
|
||||
M.invokingStates[k] = a.invokingStates[i];
|
||||
}
|
||||
else if ( r==b_ ) {
|
||||
M.parents[k] = b.parents[j];
|
||||
M.payloads[k] = b.payloads[j];
|
||||
M.invokingStates[k] = b.invokingStates[j];
|
||||
}
|
||||
else {
|
||||
M.parents[k] = r.parent;
|
||||
M.payloads[k] = r.payload;
|
||||
M.invokingStates[k] = r.invokingState;
|
||||
}
|
||||
i++; // hop over left one as usual
|
||||
j++; // but also skip one in right side since we merge
|
||||
}
|
||||
else if ( a.payloads[i].compareTo(b.payloads[j]) < 0 ) {
|
||||
else if ( a.invokingStates[i]<b.invokingStates[j] ) {
|
||||
M.parents[k] = a.parents[i];
|
||||
M.payloads[k] = a.payloads[i];
|
||||
M.invokingStates[k] = a.invokingStates[i];
|
||||
i++;
|
||||
}
|
||||
else {
|
||||
M.parents[k] = a.parents[j];
|
||||
M.payloads[k] = b.payloads[j];
|
||||
M.invokingStates[k] = b.invokingStates[j];
|
||||
j++;
|
||||
}
|
||||
k++;
|
||||
}
|
||||
// copy over any payloads remaining in either array
|
||||
if (i < a.payloads.length) {
|
||||
for (int p = i; p < a.payloads.length; p++) {
|
||||
if (i < a.invokingStates.length) {
|
||||
for (int p = i; p < a.invokingStates.length; p++) {
|
||||
M.parents[k] = a.parents[p];
|
||||
M.payloads[k] = a.payloads[p];
|
||||
M.invokingStates[k] = a.invokingStates[p];
|
||||
k++;
|
||||
}
|
||||
}
|
||||
else {
|
||||
for (int p = j; p < b.payloads.length; p++) {
|
||||
for (int p = j; p < b.invokingStates.length; p++) {
|
||||
M.parents[k] = b.parents[p];
|
||||
M.payloads[k] = b.payloads[p];
|
||||
M.invokingStates[k] = b.invokingStates[p];
|
||||
k++;
|
||||
}
|
||||
}
|
||||
|
@ -162,13 +307,13 @@ public abstract class PredictionContext {
|
|||
if ( current instanceof SingletonPredictionContext ) {
|
||||
String s = String.valueOf(current.id);
|
||||
buf.append(" s").append(s);
|
||||
buf.append(" [label=\"").append(current.getPayload(0)).append("\"];\n");
|
||||
buf.append(" [label=\"").append(current.getInvokingState(0)).append("\"];\n");
|
||||
continue;
|
||||
}
|
||||
ArrayPredictionContext arr = (ArrayPredictionContext)current;
|
||||
buf.append(" s").append(arr.id);
|
||||
buf.append(" [label=\"");
|
||||
buf.append(Arrays.toString(arr.payloads));
|
||||
buf.append(Arrays.toString(arr.invokingStates));
|
||||
buf.append("\"];\n");
|
||||
}
|
||||
|
||||
|
|
|
@ -1,12 +1,30 @@
|
|||
package org.antlr.v4.runtime.atn;
|
||||
|
||||
import java.util.Iterator;
|
||||
|
||||
public class SingletonPredictionContext extends PredictionContext {
|
||||
public final PredictionContext parent;
|
||||
public final String payload;
|
||||
public final int invokingState;
|
||||
|
||||
public SingletonPredictionContext(PredictionContext parent, String payload) {
|
||||
public SingletonPredictionContext(PredictionContext parent, int invokingState) {
|
||||
this.parent = parent;
|
||||
this.payload = payload;
|
||||
this.invokingState = invokingState;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterator<SingletonPredictionContext> iterator() {
|
||||
final SingletonPredictionContext self = this;
|
||||
return new Iterator<SingletonPredictionContext>() {
|
||||
int i = 0;
|
||||
@Override
|
||||
public boolean hasNext() { return i>0; }
|
||||
|
||||
@Override
|
||||
public SingletonPredictionContext next() { i++; return self; }
|
||||
|
||||
@Override
|
||||
public void remove() { throw new UnsupportedOperationException(); }
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -21,14 +39,14 @@ public class SingletonPredictionContext extends PredictionContext {
|
|||
}
|
||||
|
||||
@Override
|
||||
public String getPayload(int index) {
|
||||
public int getInvokingState(int index) {
|
||||
assert index == 0;
|
||||
return payload;
|
||||
return invokingState;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int findPayload(String payload) {
|
||||
return this.payload.equals(payload) ? 0 : -1;
|
||||
public int findInvokingState(int invokingState) {
|
||||
return this.invokingState == invokingState ? 0 : -1;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -45,7 +63,7 @@ public class SingletonPredictionContext extends PredictionContext {
|
|||
}
|
||||
|
||||
SingletonPredictionContext s = (SingletonPredictionContext)o;
|
||||
return payload.equals(s.payload) && parent.equals(s.parent);
|
||||
return invokingState == s.invokingState && parent.equals(s.parent);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -55,6 +73,6 @@ public class SingletonPredictionContext extends PredictionContext {
|
|||
|
||||
@Override
|
||||
public String toString() {
|
||||
return payload+":"+id;
|
||||
return invokingState +":"+id;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -364,7 +364,8 @@ public class TestGraphNodes extends TestCase {
|
|||
}
|
||||
|
||||
public SingletonPredictionContext createSingleton(PredictionContext parent, String payload) {
|
||||
SingletonPredictionContext a = new SingletonPredictionContext(parent, payload);
|
||||
return a;
|
||||
// SingletonPredictionContext a = new SingletonPredictionContext(parent, payload);
|
||||
// return a;
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue