moved empty context chk for retry

altered args on reporting methods
reporting methods moved from parser to error strategy
fixed full ctx retry in DFA (used to only work in ATN)
i record conflictSet not just boolean in DFA
added debugging support to BaseRecognizer
added DiagnosticErrorStrategy

[git-p4: depot-paths = "//depot/code/antlr4/main/": change = 9549]
This commit is contained in:
parrt 2011-12-10 16:24:41 -08:00
parent b682450e29
commit b1c7edb8d8
8 changed files with 484 additions and 322 deletions

View File

@ -1,5 +1,13 @@
package org.antlr.v4.runtime; package org.antlr.v4.runtime;
import org.antlr.v4.runtime.atn.ATNConfig;
import org.antlr.v4.runtime.atn.SemanticContext;
import org.antlr.v4.runtime.dfa.DFA;
import org.antlr.v4.runtime.misc.IntervalSet;
import org.antlr.v4.runtime.misc.NotNull;
import org.antlr.v4.runtime.misc.Nullable;
import org.antlr.v4.runtime.misc.OrderedHashSet;
/** The interface for defining strategies to deal with syntax errors /** The interface for defining strategies to deal with syntax errors
* encountered during a parse by ANTLR-generated parsers and tree parsers. * encountered during a parse by ANTLR-generated parsers and tree parsers.
* We distinguish between three different kinds of errors: * We distinguish between three different kinds of errors:
@ -23,8 +31,8 @@ package org.antlr.v4.runtime;
*/ */
public interface ANTLRErrorStrategy<Symbol> { public interface ANTLRErrorStrategy<Symbol> {
/** Report any kind of RecognitionException. */ /** Report any kind of RecognitionException. */
void reportError(BaseRecognizer<Symbol> recognizer, void reportError(@NotNull BaseRecognizer<Symbol> recognizer,
RecognitionException e) @Nullable RecognitionException e)
throws RecognitionException; throws RecognitionException;
/** When matching elements within alternative, use this method /** When matching elements within alternative, use this method
@ -43,7 +51,7 @@ public interface ANTLRErrorStrategy<Symbol> {
* "inserting" tokens, we need to specify what that implicitly created * "inserting" tokens, we need to specify what that implicitly created
* token is. We use object, because it could be a tree node. * token is. We use object, because it could be a tree node.
*/ */
Symbol recoverInline(BaseRecognizer<Symbol> recognizer) Symbol recoverInline(@NotNull BaseRecognizer<Symbol> recognizer)
throws RecognitionException; throws RecognitionException;
/** Resynchronize the parser by consuming tokens until we find one /** Resynchronize the parser by consuming tokens until we find one
@ -51,8 +59,8 @@ public interface ANTLRErrorStrategy<Symbol> {
* the current rule. The exception contains info you might want to * the current rule. The exception contains info you might want to
* use to recover better. * use to recover better.
*/ */
void recover(BaseRecognizer<Symbol> recognizer, void recover(@NotNull BaseRecognizer<Symbol> recognizer,
RecognitionException e); @Nullable RecognitionException e);
/** Make sure that the current lookahead symbol is consistent with /** Make sure that the current lookahead symbol is consistent with
* what were expecting at this point in the ATN. You can call this * what were expecting at this point in the ATN. You can call this
@ -81,14 +89,14 @@ public interface ANTLRErrorStrategy<Symbol> {
* turn off this functionality by simply overriding this method as * turn off this functionality by simply overriding this method as
* a blank { }. * a blank { }.
*/ */
void sync(BaseRecognizer<Symbol> recognizer); void sync(@NotNull BaseRecognizer<Symbol> recognizer);
/** Notify handler that parser has entered an error state. The /** Notify handler that parser has entered an error state. The
* parser currently doesn't call this--the handler itself calls this * parser currently doesn't call this--the handler itself calls this
* in report error methods. But, for symmetry with endErrorCondition, * in report error methods. But, for symmetry with endErrorCondition,
* this method is in the interface. * this method is in the interface.
*/ */
void beginErrorCondition(BaseRecognizer<Symbol> recognizer); void beginErrorCondition(@NotNull BaseRecognizer<Symbol> recognizer);
/** Is the parser in the process of recovering from an error? Upon /** Is the parser in the process of recovering from an error? Upon
* a syntax error, the parser enters recovery mode and stays there until * a syntax error, the parser enters recovery mode and stays there until
@ -96,11 +104,52 @@ public interface ANTLRErrorStrategy<Symbol> {
* avoid sending out spurious error messages. We only want one error * avoid sending out spurious error messages. We only want one error
* message per syntax error * message per syntax error
*/ */
boolean inErrorRecoveryMode(BaseRecognizer<Symbol> recognizer); boolean inErrorRecoveryMode(@NotNull BaseRecognizer<Symbol> recognizer);
/** Reset the error handler. Call this when the parser /** Reset the error handler. Call this when the parser
* matches a valid token (indicating no longer in recovery mode) * matches a valid token (indicating no longer in recovery mode)
* and from its own reset method. * and from its own reset method.
*/ */
void endErrorCondition(BaseRecognizer<Symbol> recognizer); void endErrorCondition(@NotNull BaseRecognizer<Symbol> recognizer);
/** Called when the parser detects a true ambiguity: an input sequence can be matched
* literally by two or more pass through the grammar. ANTLR resolves the ambiguity in
* favor of the alternative appearing first in the grammar. The start and stop index are
* zero-based absolute indices into the token stream. ambigAlts is a set of alternative numbers
* that can match the input sequence. This method is only called when we are parsing with
* full context.
*/
void reportAmbiguity(@NotNull BaseRecognizer<Symbol> recognizer,
int startIndex, int stopIndex, @NotNull IntervalSet ambigAlts,
@NotNull OrderedHashSet<ATNConfig> configs);
/** Called by the parser when it detects an input sequence that can be matched by two paths
* through the grammar. The difference between this and the reportAmbiguity method lies in
* the difference between Strong LL parsing and LL parsing. If we are not parsing with context,
* we can't be sure if a conflict is an ambiguity or simply a weakness in the Strong LL parsing
* strategy. If we are parsing with full context, this method is never called.
*/
void reportConflict(@NotNull BaseRecognizer<Symbol> recognizer,
int startIndex, int stopIndex, @NotNull IntervalSet ambigAlts,
@NotNull OrderedHashSet<ATNConfig> configs);
/** Called by the parser when it find a conflict that is resolved by retrying the parse
* with full context. This is not a warning; it simply notifies you that your grammar
* is more complicated than Strong LL can handle. The parser moved up to full context
* parsing for that input sequence.
*/
void reportContextSensitivity(@NotNull BaseRecognizer<Symbol> recognizer,
@NotNull DFA dfa,
int startIndex, int stopIndex,
@NotNull OrderedHashSet<ATNConfig> configs);
/** Called by the parser when it finds less than n-1 predicates for n ambiguous alternatives.
* If there are n-1, we assume that the missing predicate is !(the "or" of the other predicates).
* If there are fewer than n-1, then we don't know which make it alternative to protect
* if the predicates fail.
*/
void reportInsufficientPredicates(@NotNull BaseRecognizer<Symbol> recognizer,
int startIndex, int stopIndex, @NotNull IntervalSet ambigAlts,
@NotNull SemanticContext[] altToPred,
@NotNull OrderedHashSet<ATNConfig> configs);
} }

View File

@ -28,11 +28,14 @@
*/ */
package org.antlr.v4.runtime; package org.antlr.v4.runtime;
import org.antlr.v4.runtime.atn.*; import org.antlr.v4.runtime.atn.ATN;
import org.antlr.v4.runtime.atn.ATNState;
import org.antlr.v4.runtime.atn.ParserATNSimulator;
import org.antlr.v4.runtime.atn.RuleTransition;
import org.antlr.v4.runtime.dfa.DFA;
import org.antlr.v4.runtime.misc.IntervalSet; import org.antlr.v4.runtime.misc.IntervalSet;
import org.antlr.v4.runtime.misc.NotNull;
import org.antlr.v4.runtime.misc.Nullable; import org.antlr.v4.runtime.misc.Nullable;
import org.antlr.v4.runtime.misc.OrderedHashSet; import org.antlr.v4.runtime.tree.ASTNodeStream;
import org.antlr.v4.runtime.tree.ParseTreeListener; import org.antlr.v4.runtime.tree.ParseTreeListener;
import java.util.ArrayList; import java.util.ArrayList;
@ -164,19 +167,34 @@ public abstract class BaseRecognizer<Symbol> extends Recognizer<Symbol, ParserAT
return syntaxErrors; return syntaxErrors;
} }
@Override @Override
public abstract SymbolStream<Symbol> getInputStream(); public abstract SymbolStream<Symbol> getInputStream();
/** Match needs to return the current input symbol, which gets put public String getInputString(int start) {
* into the label for the associated token ref; e.g., x=ID. Token return getInputString(start, getInputStream().index());
* and tree parsers need to return different objects. Rather than test }
* for input stream type or change the IntStream interface, I use
* a simple method to ask the recognizer to tell me what the current
* input symbol is.
*/
public abstract Symbol getCurrentInputSymbol();
public void notifyListeners(String msg) { public String getInputString(int start, int stop) {
SymbolStream<Symbol> input = getInputStream();
if ( input instanceof TokenStream ) {
return ((TokenStream)input).toString(start,stop);
}
else if ( input instanceof ASTNodeStream) {
return ((ASTNodeStream<Symbol>)input).toString(input.get(start),input.get(stop));
}
return "n/a";
}
/** Match needs to return the current input symbol, which gets put
* into the label for the associated token ref; e.g., x=ID. Token
* and tree parsers need to return different objects. Rather than test
* for input stream type or change the IntStream interface, I use
* a simple method to ask the recognizer to tell me what the current
* input symbol is.
*/
public abstract Symbol getCurrentInputSymbol();
public void notifyListeners(String msg) {
notifyListeners(getCurrentInputSymbol(), msg, null); notifyListeners(getCurrentInputSymbol(), msg, null);
} }
@ -370,6 +388,30 @@ public abstract class BaseRecognizer<Symbol> extends Recognizer<Symbol, ParserAT
return null; return null;
} }
/** For debugging and other purposes */
public List<String> getDFAStrings() {
List<String> s = new ArrayList<String>();
for (int d = 0; d < _interp.decisionToDFA.length; d++) {
DFA dfa = _interp.decisionToDFA[d];
s.add( dfa.toString(getTokenNames()) );
}
return s;
}
/** For debugging and other purposes */
public void dumpDFA() {
boolean seenOne = false;
for (int d = 0; d < _interp.decisionToDFA.length; d++) {
DFA dfa = _interp.decisionToDFA[d];
if ( dfa!=null ) {
if ( seenOne ) System.out.println();
System.out.println("Decision " + dfa.decision + ":");
System.out.print(dfa.toString(getTokenNames()));
seenOne = true;
}
}
}
public abstract String getSourceName(); public abstract String getSourceName();
/** A convenience method for use most often with template rewrites. /** A convenience method for use most often with template rewrites.
@ -396,26 +438,4 @@ public abstract class BaseRecognizer<Symbol> extends Recognizer<Symbol, ParserAT
_ctx.s = atnState; _ctx.s = atnState;
if ( traceATNStates ) _ctx.trace(atnState); if ( traceATNStates ) _ctx.trace(atnState);
} }
public void reportConflict(int startIndex, int stopIndex, IntervalSet alts,
OrderedHashSet<ATNConfig> configs) {}
public void reportContextSensitivity(int startIndex, int stopIndex,
IntervalSet alts,
OrderedHashSet<ATNConfig> configs) {}
/** If context sensitive parsing, we know it's ambiguity not conflict */
public void reportAmbiguity(int startIndex, int stopIndex,
@NotNull IntervalSet ambigAlts,
@NotNull OrderedHashSet<ATNConfig> configs) {
}
public void reportInsufficientPredicates(int startIndex, int stopIndex,
@NotNull IntervalSet ambigAlts,
@NotNull SemanticContext[] altToPred,
@NotNull OrderedHashSet<ATNConfig> configs)
{
}
} }

View File

@ -30,8 +30,10 @@
package org.antlr.v4.runtime; package org.antlr.v4.runtime;
import org.antlr.v4.runtime.atn.*; import org.antlr.v4.runtime.atn.*;
import org.antlr.v4.runtime.dfa.DFA;
import org.antlr.v4.runtime.misc.IntervalSet; import org.antlr.v4.runtime.misc.IntervalSet;
import org.antlr.v4.runtime.misc.NotNull; import org.antlr.v4.runtime.misc.NotNull;
import org.antlr.v4.runtime.misc.OrderedHashSet;
import org.antlr.v4.runtime.tree.AST; import org.antlr.v4.runtime.tree.AST;
/** This is the default error handling mechanism for ANTLR parsers /** This is the default error handling mechanism for ANTLR parsers
@ -545,10 +547,38 @@ public class DefaultErrorStrategy<Symbol> implements ANTLRErrorStrategy<Symbol>
// System.err.println("consumeUntil("+set.toString(recognizer.getTokenNames())+")"); // System.err.println("consumeUntil("+set.toString(recognizer.getTokenNames())+")");
int ttype = recognizer.getInputStream().LA(1); int ttype = recognizer.getInputStream().LA(1);
while (ttype != Token.EOF && !set.contains(ttype) ) { while (ttype != Token.EOF && !set.contains(ttype) ) {
//System.out.println("consume during recover LA(1)="+getTokenNames()[input.LA(1)]); //System.out.println("consume during recover LA(1)="+getTokenNames()[input.LA(1)]);
// recognizer.getInputStream().consume(); // recognizer.getInputStream().consume();
recognizer.consume(); recognizer.consume();
ttype = recognizer.getInputStream().LA(1); ttype = recognizer.getInputStream().LA(1);
} }
} }
@Override
public void reportAmbiguity(@NotNull BaseRecognizer<Symbol> recognizer,
int startIndex, int stopIndex, @NotNull IntervalSet ambigAlts,
@NotNull OrderedHashSet<ATNConfig> configs)
{
}
@Override
public void reportConflict(@NotNull BaseRecognizer<Symbol> recognizer,
int startIndex, int stopIndex, @NotNull IntervalSet ambigAlts,
@NotNull OrderedHashSet<ATNConfig> configs)
{
}
@Override
public void reportContextSensitivity(@NotNull BaseRecognizer<Symbol> recognizer, @NotNull DFA dfa,
int startIndex, int stopIndex, @NotNull OrderedHashSet<ATNConfig> configs)
{
}
@Override
public void reportInsufficientPredicates(@NotNull BaseRecognizer<Symbol> recognizer,
int startIndex, int stopIndex, @NotNull IntervalSet ambigAlts,
@NotNull SemanticContext[] altToPred,
@NotNull OrderedHashSet<ATNConfig> configs)
{
}
} }

View File

@ -0,0 +1,75 @@
/*
[The "BSD license"]
Copyright (c) 2011 Terence Parr
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. The name of the author may not be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.antlr.v4.runtime;
import org.antlr.v4.runtime.atn.ATNConfig;
import org.antlr.v4.runtime.atn.SemanticContext;
import org.antlr.v4.runtime.dfa.DFA;
import org.antlr.v4.runtime.misc.IntervalSet;
import org.antlr.v4.runtime.misc.NotNull;
import org.antlr.v4.runtime.misc.OrderedHashSet;
import java.util.Arrays;
public class DiagnosticErrorStrategy<Symbol> extends DefaultErrorStrategy<Symbol> {
@Override
public void reportAmbiguity(@NotNull BaseRecognizer<Symbol> recognizer,
int startIndex, int stopIndex, @NotNull IntervalSet ambigAlts,
@NotNull OrderedHashSet<ATNConfig> configs)
{
recognizer.notifyListeners("reportAmbiguity " + ambigAlts + ":" + configs + ", input=" +
recognizer.getInputString(startIndex, stopIndex));
}
@Override
public void reportConflict(@NotNull BaseRecognizer<Symbol> recognizer,
int startIndex, int stopIndex, IntervalSet ambigAlts, OrderedHashSet<ATNConfig> configs) {
recognizer.notifyListeners("reportConflict " + ambigAlts + ":" + configs + ", input=" +
recognizer.getInputString(startIndex, stopIndex));
}
@Override
public void reportContextSensitivity(@NotNull BaseRecognizer<Symbol> recognizer, @NotNull DFA dfa,
int startIndex, int stopIndex, @NotNull OrderedHashSet<ATNConfig> configs)
{
recognizer.notifyListeners("reportContextSensitivity: " + configs + ", input=" +
recognizer.getInputString(startIndex, stopIndex));
}
@Override
public void reportInsufficientPredicates(@NotNull BaseRecognizer<Symbol> recognizer,
int startIndex, int stopIndex, @NotNull IntervalSet ambigAlts,
@NotNull SemanticContext[] altToPred,
@NotNull OrderedHashSet<ATNConfig> configs)
{
recognizer.notifyListeners("reportInsufficientPredicates " + ambigAlts + ":" + Arrays.toString(altToPred) +
", " + configs + ", input=" + recognizer.getInputString(startIndex, stopIndex));
}
}

View File

@ -65,7 +65,8 @@ public class ParserATNSimulator<Symbol> extends ATNSimulator {
public final DFA[] decisionToDFA; public final DFA[] decisionToDFA;
/** By default we do full context-sensitive LL(*) parsing not /** By default we do full context-sensitive LL(*) parsing not
* Strong LL(*) parsing. That means we use context information * Strong LL(*) parsing. If we fail with Strong LL(*) we
* try full LL(*). That means we rewind and use context information
* when closure operations fall off the end of the rule that * when closure operations fall off the end of the rule that
* holds the decision were evaluating. * holds the decision were evaluating.
*/ */
@ -78,7 +79,7 @@ public class ParserATNSimulator<Symbol> extends ATNSimulator {
* value is reset upon prediction call to adaptivePredict() or the * value is reset upon prediction call to adaptivePredict() or the
* predictATN/DFA methods. * predictATN/DFA methods.
* *
* The full stack at any moment is [config.outerContext + config.context]. * The full stack at any moment is [outerContext + config.context].
*/ */
@NotNull @NotNull
protected ParserRuleContext outerContext = ParserRuleContext.EMPTY; protected ParserRuleContext outerContext = ParserRuleContext.EMPTY;
@ -182,10 +183,7 @@ public class ParserATNSimulator<Symbol> extends ATNSimulator {
{ {
DFA dfa = new DFA(startState); DFA dfa = new DFA(startState);
ParserRuleContext ctx = ParserRuleContext.EMPTY; ParserRuleContext ctx = ParserRuleContext.EMPTY;
OrderedHashSet<ATNConfig> s0_closure =
computeStartState(dfa.decision, startState, ctx);
return predictATN(dfa, input, ctx, false); return predictATN(dfa, input, ctx, false);
// return execATN(input, dfa, input.index(), s0_closure, false);
} }
public int execDFA(@NotNull SymbolStream<Symbol> input, @NotNull DFA dfa, public int execDFA(@NotNull SymbolStream<Symbol> input, @NotNull DFA dfa,
@ -204,20 +202,49 @@ public class ParserATNSimulator<Symbol> extends ATNSimulator {
int t = input.LA(1); int t = input.LA(1);
loop: loop:
while ( true ) { while ( true ) {
if ( dfa_debug ) System.out.println("DFA state "+s.stateNumber+" LA(1)=="+t); if ( dfa_debug ) System.out.println("DFA state "+s.stateNumber+" LA(1)=="+getLookaheadName(input));
// TODO: ctxSensitive // TODO: ctxSensitive
if ( s.isCtxSensitive ) { if ( s.isCtxSensitive ) {
Integer predI = s.ctxToPrediction.get(outerContext); Integer predI = s.ctxToPrediction.get(outerContext);
if ( dfa_debug ) System.out.println("ctx sensitive state "+outerContext+"->"+predI+ if ( dfa_debug ) System.out.println("ctx sensitive state "+outerContext+"->"+predI+
" in "+s); " in "+s);
if ( predI!=null ) return predI; if ( predI!=null ) return predI;
// System.out.println("start all over with ATN; can't use DFA");
// start all over with ATN; can't use DFA // TODO: this was cut / pasted from retryWithContext. refactor somehow to call retryWithContext
input.seek(startIndex); int old_k = input.index();
DFA throwAwayDFA = new DFA(dfa.atnStartState); input.seek(startIndex);
int alt = execATN(input, throwAwayDFA, startIndex, s0.configs, true); DFA ctx_dfa = new DFA(dfa.atnStartState);
s.ctxToPrediction.put(outerContext, alt); int ctx_alt = predictATN(ctx_dfa, input, outerContext, true);
return alt; if ( retry_debug ) System.out.println("retry from DFA predicts "+ctx_alt+
" with conflict="+(ctx_dfa.conflictSet!=null) +
" full ctx dfa="+ctx_dfa.toString(parser.getTokenNames()));
if ( ctx_dfa.conflictSet!=null ) {
reportAmbiguity(startIndex, input.index(), getAmbiguousAlts(ctx_dfa.conflictSet), ctx_dfa.conflictSet);
}
else {
if ( old_k != input.index() ) {
if ( retry_debug ) System.out.println("used diff amount of k; old="+(old_k-startIndex+1)+", new="+(input.index()-startIndex+1));
}
retry_with_context_indicates_no_conflict++;
reportContextSensitivity(dfa, ctx_dfa.conflictSet, startIndex, input.index());
}
// END cut/paste from retryWithContext
s.ctxToPrediction.put(outerContext, ctx_alt);
if ( retry_debug ) System.out.println("updated DFA:\n"+dfa.toString(parser.getTokenNames()));
return ctx_alt;
//// System.out.println("start all over with ATN; can't use DFA");
// // start all over with ATN; can't use DFA
// input.seek(startIndex);
// DFA throwAwayDFA = new DFA(dfa.atnStartState);
// int alt = execATN(input, throwAwayDFA, startIndex, s0.configs, true);
// if ( dfa_debug ) {
// System.out.print("back from DFA update for ctx sensitive state; DFA=\n" + dfa.toString(parser.getTokenNames()));
// }
// s.ctxToPrediction.put(outerContext, alt);
// return alt;
} }
if ( s.isAcceptState ) { if ( s.isAcceptState ) {
if ( s.predicates!=null ) { if ( s.predicates!=null ) {
@ -235,11 +262,11 @@ public class ParserATNSimulator<Symbol> extends ATNSimulator {
} }
// if no edge, pop over to ATN interpreter, update DFA and return // if no edge, pop over to ATN interpreter, update DFA and return
if ( s.edges == null || t >= s.edges.length || t < -1 || s.edges[t+1] == null ) { if ( s.edges == null || t >= s.edges.length || t < -1 || s.edges[t+1] == null ) {
if ( dfa_debug ) System.out.println("no edge for "+t); if ( dfa_debug ) System.out.println("no edge for "+parser.getTokenNames()[t]);
int alt = -1; int alt = -1;
if ( dfa_debug ) { if ( dfa_debug ) {
System.out.println("ATN exec upon "+ System.out.println("ATN exec upon "+
getInputString(input, startIndex) + parser.getInputString(startIndex) +
" at DFA state "+s.stateNumber); " at DFA state "+s.stateNumber);
} }
try { try {
@ -299,20 +326,6 @@ public class ParserATNSimulator<Symbol> extends ATNSimulator {
return prevAcceptState.prediction; return prevAcceptState.prediction;
} }
public String getInputString(@NotNull SymbolStream<Symbol> input, int start) {
return getInputString(input, start, input.index());
}
public String getInputString(@NotNull SymbolStream<Symbol> input, int start, int stop) {
if ( input instanceof TokenStream ) {
return ((TokenStream)input).toString(start,stop);
}
else if ( input instanceof ASTNodeStream) {
return ((ASTNodeStream<Symbol>)input).toString(input.get(start),input.get(stop));
}
return "n/a";
}
public int execATN(@NotNull SymbolStream<Symbol> input, public int execATN(@NotNull SymbolStream<Symbol> input,
@NotNull DFA dfa, @NotNull DFA dfa,
int startIndex, int startIndex,
@ -368,11 +381,11 @@ public class ParserATNSimulator<Symbol> extends ATNSimulator {
} }
String rname = getRuleName(i); String rname = getRuleName(i);
System.out.println("AMBIG dec "+dfa.decision+" in "+rname+" for alt "+ambigAlts+" upon "+ System.out.println("AMBIG dec "+dfa.decision+" in "+rname+" for alt "+ambigAlts+" upon "+
getInputString(input, startIndex)); parser.getInputString(startIndex));
System.out.println("REACH="+reach); System.out.println("REACH="+reach);
} }
// System.out.println("AMBIG dec "+dfa.decision+" for alt "+ambigAlts+" upon "+ // System.out.println("AMBIG dec "+dfa.decision+" for alt "+ambigAlts+" upon "+
// getInputString(input, startIndex)); // parser.getInputString(startIndex));
// System.out.println("userWantsCtxSensitive="+userWantsCtxSensitive); // System.out.println("userWantsCtxSensitive="+userWantsCtxSensitive);
// can we resolve with predicates? // can we resolve with predicates?
@ -401,16 +414,28 @@ public class ParserATNSimulator<Symbol> extends ATNSimulator {
return uniqueAlt; return uniqueAlt;
} }
dfa.conflict = true; // at least one DFA state is ambiguous boolean resolveConflict = false;
if ( !userWantsCtxSensitive ) { dfa.conflictSet = (OrderedHashSet<ATNConfig>)reach.clone(); // most recent set with conflict
reportConflict(startIndex, input.index(), ambigAlts, reach); if ( !userWantsCtxSensitive ) {
} reportConflict(startIndex, input.index(), ambigAlts, reach);
resolveConflict = true;
}
else {
// TODO: add optimization to avoid retry if no config dips into outer config
if ( outerContext==ParserRuleContext.EMPTY ) { // TODO: or no configs dip into outer ctx
if ( retry_debug ) System.out.println("ctx empty; no need to retry");
// no point in retrying with ctx since it's same.
// this implies that we have a true ambiguity
reportAmbiguity(startIndex, input.index(), ambigAlts, reach);
resolveConflict = true;
}
}
if ( !userWantsCtxSensitive || useContext ) { if ( resolveConflict || useContext ) {
// resolve ambiguity // resolve ambiguity
if ( decState!=null && decState.isGreedy ) { if ( decState!=null && decState.isGreedy ) {
// if greedy, resolve in favor of alt coming first // if greedy, resolve in favor of alt coming first
resolveToMinAlt(reach, ambigAlts); resolveToMinAlt(reach, ambigAlts);
} }
else { else {
// if nongreedy loop, always pick exit branch to match // if nongreedy loop, always pick exit branch to match
@ -618,26 +643,26 @@ public class ParserATNSimulator<Symbol> extends ATNSimulator {
retry_with_context++; retry_with_context++;
int old_k = input.index(); int old_k = input.index();
// retry using context, if any; if none, kill all but min as before // retry using context, if any; if none, kill all but min as before
if ( retry_debug ) System.out.println("RETRY '"+ getInputString(input, startIndex) + if ( retry_debug ) System.out.println("RETRY '"+ parser.getInputString(startIndex) +
"' with ctx="+ originalContext); "' with ctx="+ originalContext);
int min = ambigAlts.getMinElement(); // int min = ambigAlts.getMinElement();
if ( originalContext==ParserRuleContext.EMPTY ) { // if ( originalContext==ParserRuleContext.EMPTY ) {
if ( retry_debug ) System.out.println("ctx empty; no need to retry"); // if ( retry_debug ) System.out.println("ctx empty; no need to retry");
// no point in retrying with ctx since it's same. // // no point in retrying with ctx since it's same.
// this implies that we have a true ambiguity // // this implies that we have a true ambiguity
reportAmbiguity(startIndex, input.index(), ambigAlts, reach); // reportAmbiguity(startIndex, input.index(), ambigAlts, reach);
return min; // return min;
} // }
// otherwise we have to retry with context, filling in tmp DFA. // otherwise we have to retry with context, filling in tmp DFA.
// if it comes back with conflict, we have a true ambiguity // if it comes back with conflict, we have a true ambiguity
input.seek(startIndex); // rewind input.seek(startIndex); // rewind
DFA ctx_dfa = new DFA(dfa.atnStartState); DFA ctx_dfa = new DFA(dfa.atnStartState);
int ctx_alt = predictATN(ctx_dfa, input, originalContext, true); int ctx_alt = predictATN(ctx_dfa, input, originalContext, true);
if ( retry_debug ) System.out.println("retry predicts "+ctx_alt+" vs "+ambigAlts.getMinElement()+ if ( retry_debug ) System.out.println("retry predicts "+ctx_alt+" vs "+ambigAlts.getMinElement()+
" with conflict="+ctx_dfa.conflict+ " with conflict="+(ctx_dfa.conflictSet!=null) +
" full ctx dfa="+ctx_dfa.toString(parser.getTokenNames())); " full ctx dfa="+ctx_dfa.toString(parser.getTokenNames()));
if ( ctx_dfa.conflict ) { if ( ctx_dfa.conflictSet!=null ) {
// System.out.println("retry gives ambig for "+input.toString(startIndex, input.index())); // System.out.println("retry gives ambig for "+input.toString(startIndex, input.index()));
reportAmbiguity(startIndex, input.index(), ambigAlts, reach); reportAmbiguity(startIndex, input.index(), ambigAlts, reach);
} }
@ -645,10 +670,11 @@ public class ParserATNSimulator<Symbol> extends ATNSimulator {
// System.out.println("NO ambig for "+input.toString(startIndex, input.index())); // System.out.println("NO ambig for "+input.toString(startIndex, input.index()));
// System.out.println(ctx_dfa.toString(parser.getTokenNames())); // System.out.println(ctx_dfa.toString(parser.getTokenNames()));
if ( old_k != input.index() ) { if ( old_k != input.index() ) {
if ( retry_debug ) System.out.println("used diff amount of k; old="+(old_k-startIndex+1)+", new="+(input.index()-startIndex+1)); if ( retry_debug ) System.out.println("used diff amount of k; old="+(old_k-startIndex+1)+
", new="+(input.index()-startIndex+1));
} }
retry_with_context_indicates_no_conflict++; retry_with_context_indicates_no_conflict++;
reportContextSensitivity(startIndex, input.index(), ambigAlts, reach); reportContextSensitivity(dfa, reach, startIndex, input.index());
} }
// it's not context-sensitive; true ambig. fall thru to strip dead alts // it's not context-sensitive; true ambig. fall thru to strip dead alts
@ -896,63 +922,61 @@ public class ParserATNSimulator<Symbol> extends ATNSimulator {
{ {
if ( debug || retry_debug ) { if ( debug || retry_debug ) {
System.out.println("reportConflict "+alts+":"+configs+ System.out.println("reportConflict "+alts+":"+configs+
", input="+getInputString(parser.getInputStream(), startIndex, stopIndex)); ", input="+parser.getInputString(startIndex, stopIndex));
} }
if ( parser!=null ) parser.reportConflict(startIndex, stopIndex, alts, configs); if ( parser!=null ) parser.getErrorHandler().reportConflict(parser, startIndex, stopIndex, alts, configs);
} }
public void reportContextSensitivity(int startIndex, int stopIndex, public void reportContextSensitivity(DFA dfa, OrderedHashSet<ATNConfig> configs, int startIndex, int stopIndex) {
@NotNull IntervalSet alts, if ( debug || retry_debug ) {
@NotNull OrderedHashSet<ATNConfig> configs) System.out.println("reportContextSensitivity decision="+dfa.decision+":"+configs+
{ ", input="+parser.getInputString(startIndex, stopIndex));
if ( debug || retry_debug ) { }
System.out.println("reportContextSensitivity "+alts+":"+configs+ if ( parser!=null ) parser.getErrorHandler().reportContextSensitivity(parser, dfa, startIndex, stopIndex, configs);
", input="+getInputString(parser.getInputStream(), startIndex, stopIndex)); }
}
if ( parser!=null ) parser.reportContextSensitivity(startIndex, stopIndex, alts, configs);
}
/** If context sensitive parsing, we know it's ambiguity not conflict */ /** If context sensitive parsing, we know it's ambiguity not conflict */
public void reportAmbiguity(int startIndex, int stopIndex, public void reportAmbiguity(int startIndex, int stopIndex,
@NotNull IntervalSet ambigAlts, @NotNull IntervalSet ambigAlts,
@NotNull OrderedHashSet<ATNConfig> configs) @NotNull OrderedHashSet<ATNConfig> configs)
{ {
if ( debug || retry_debug ) { if ( debug || retry_debug ) {
System.out.println("reportAmbiguity "+ System.out.println("reportAmbiguity "+
ambigAlts+":"+configs+ ambigAlts+":"+configs+
", input="+getInputString(parser.getInputStream(), startIndex, stopIndex)); ", input="+parser.getInputString(startIndex, stopIndex));
} }
if ( parser!=null ) parser.reportAmbiguity(startIndex, stopIndex, ambigAlts, configs); if ( parser!=null ) parser.getErrorHandler().reportAmbiguity(parser, startIndex, stopIndex,
} ambigAlts, configs);
}
public void reportInsufficientPredicates(int startIndex, int stopIndex, public void reportInsufficientPredicates(int startIndex, int stopIndex,
@NotNull IntervalSet ambigAlts, @NotNull IntervalSet ambigAlts,
@NotNull SemanticContext[] altToPred, @NotNull SemanticContext[] altToPred,
@NotNull OrderedHashSet<ATNConfig> configs) @NotNull OrderedHashSet<ATNConfig> configs)
{ {
if ( debug || retry_debug ) { if ( debug || retry_debug ) {
System.out.println("reportInsufficientPredicates "+ System.out.println("reportInsufficientPredicates "+
ambigAlts+":"+Arrays.toString(altToPred)+ ambigAlts+":"+Arrays.toString(altToPred)+
getInputString(parser.getInputStream(), startIndex, stopIndex)); parser.getInputString(startIndex, stopIndex));
} }
if ( parser!=null ) { if ( parser!=null ) {
parser.reportInsufficientPredicates(startIndex, stopIndex, ambigAlts, parser.getErrorHandler().reportInsufficientPredicates(parser, startIndex, stopIndex, ambigAlts,
altToPred, configs); altToPred, configs);
} }
} }
public static int getUniqueAlt(@NotNull Collection<ATNConfig> configs) { public static int getUniqueAlt(@NotNull Collection<ATNConfig> configs) {
int alt = ATN.INVALID_ALT_NUMBER; int alt = ATN.INVALID_ALT_NUMBER;
for (ATNConfig c : configs) { for (ATNConfig c : configs) {
if ( alt == ATN.INVALID_ALT_NUMBER ) { if ( alt == ATN.INVALID_ALT_NUMBER ) {
alt = c.alt; // found first alt alt = c.alt; // found first alt
} }
else if ( c.alt!=alt ) { else if ( c.alt!=alt ) {
return ATN.INVALID_ALT_NUMBER; return ATN.INVALID_ALT_NUMBER;
} }
} }
return alt; return alt;
} }
@Nullable @Nullable
public ATNConfig configWithAltAtStopState(@NotNull Collection<ATNConfig> configs, int alt) { public ATNConfig configWithAltAtStopState(@NotNull Collection<ATNConfig> configs, int alt) {

View File

@ -28,9 +28,11 @@
*/ */
package org.antlr.v4.runtime.dfa; package org.antlr.v4.runtime.dfa;
import org.antlr.v4.runtime.atn.ATNConfig;
import org.antlr.v4.runtime.atn.ATNState; import org.antlr.v4.runtime.atn.ATNState;
import org.antlr.v4.runtime.misc.NotNull; import org.antlr.v4.runtime.misc.NotNull;
import org.antlr.v4.runtime.misc.Nullable; import org.antlr.v4.runtime.misc.Nullable;
import org.antlr.v4.runtime.misc.OrderedHashSet;
import java.util.LinkedHashMap; import java.util.LinkedHashMap;
import java.util.Map; import java.util.Map;
@ -49,10 +51,10 @@ public class DFA {
@NotNull @NotNull
public final ATNState atnStartState; public final ATNState atnStartState;
/** Does at least one state have a conflict? Mainly used as return value /** Set of configs for a DFA state with at least one conflict? Mainly used as "return value"
* from predictATN() * from predictATN() for retry.
*/ */
public boolean conflict; public OrderedHashSet<ATNConfig> conflictSet;
public DFA(@NotNull ATNState atnStartState) { this.atnStartState = atnStartState; } public DFA(@NotNull ATNState atnStartState) { this.atnStartState = atnStartState; }

View File

@ -357,9 +357,6 @@ public abstract class BaseTest {
writeFile(tmpdir, fileName, grammarStr); writeFile(tmpdir, fileName, grammarStr);
try { try {
final List<String> options = new ArrayList<String>(); final List<String> options = new ArrayList<String>();
if ( debug ) {
options.add("-debug");
}
Collections.addAll(options, extraOptions); Collections.addAll(options, extraOptions);
options.add("-o"); options.add("-o");
options.add(tmpdir); options.add(tmpdir);
@ -1036,18 +1033,12 @@ public abstract class BaseTest {
" }\n" + " }\n" +
"}" "}"
); );
ST createParserST = ST createParserST = new ST(" <parserName> parser = new <parserName>(tokens);\n");
new ST( if ( debug ) {
"class Profiler2 extends Profiler {\n" +
" public void terminate() { ; }\n" +
"}\n"+
" Profiler2 profiler = new Profiler2();\n"+
" <parserName> parser = new <parserName>(tokens,profiler);\n" +
" profiler.setParser(parser);\n");
if ( !debug ) {
createParserST = createParserST =
new ST( new ST(
" <parserName> parser = new <parserName>(tokens);\n"); " <parserName> parser = new <parserName>(tokens);\n" +
" parser.setErrorHandler(new DiagnosticErrorStrategy());\n");
} }
outputFileST.add("createParser", createParserST); outputFileST.add("createParser", createParserST);
outputFileST.add("parserName", parserName); outputFileST.add("parserName", parserName);
@ -1086,7 +1077,6 @@ public abstract class BaseTest {
ST outputFileST = new ST( ST outputFileST = new ST(
"import org.antlr.v4.runtime.*;\n" + "import org.antlr.v4.runtime.*;\n" +
"import org.antlr.v4.runtime.tree.*;\n" + "import org.antlr.v4.runtime.tree.*;\n" +
// "import org.antlr.v4.runtime.debug.*;\n" +
"\n" + "\n" +
"public class Test {\n" + "public class Test {\n" +
" public static void main(String[] args) throws Exception {\n" + " public static void main(String[] args) throws Exception {\n" +
@ -1109,18 +1099,12 @@ public abstract class BaseTest {
" }\n" + " }\n" +
"}" "}"
); );
ST createParserST = ST createParserST = new ST(" <parserName> parser = new <parserName>(tokens);\n");
new ST( if ( debug ) {
"class Profiler2 extends Profiler {\n" +
" public void terminate() { ; }\n" +
"}\n"+
" Profiler2 profiler = new Profiler2();\n"+
" <parserName> parser = new <parserName>(tokens,profiler);\n" +
" profiler.setParser(parser);\n");
if ( !debug ) {
createParserST = createParserST =
new ST( new ST(
" <parserName> parser = new <parserName>(tokens);\n"); " <parserName> parser = new <parserName>(tokens);\n" +
" parser.setErrorHandler(new DiagnosticErrorStrategy());\n");
} }
outputFileST.add("createParser", createParserST); outputFileST.add("createParser", createParserST);
outputFileST.add("parserName", parserName); outputFileST.add("parserName", parserName);
@ -1142,7 +1126,6 @@ public abstract class BaseTest {
ST outputFileST = new ST( ST outputFileST = new ST(
"import org.antlr.v4.runtime.*;\n" + "import org.antlr.v4.runtime.*;\n" +
"import org.antlr.v4.runtime.tree.*;\n" + "import org.antlr.v4.runtime.tree.*;\n" +
// "import org.antlr.v4.runtime.debug.*;\n" +
"\n" + "\n" +
"public class Test {\n" + "public class Test {\n" +
" public static void main(String[] args) throws Exception {\n" + " public static void main(String[] args) throws Exception {\n" +
@ -1161,18 +1144,12 @@ public abstract class BaseTest {
" }\n" + " }\n" +
"}" "}"
); );
ST createParserST = ST createParserST = new ST(" <parserName> parser = new <parserName>(tokens);\n");
new ST( if ( debug ) {
"class Profiler2 extends Profiler {\n" +
" public void terminate() { ; }\n" +
"}\n"+
" Profiler2 profiler = new Profiler2();\n"+
" <parserName> parser = new <parserName>(tokens,profiler);\n" +
" profiler.setParser(parser);\n");
if ( !debug ) {
createParserST = createParserST =
new ST( new ST(
" <parserName> parser = new <parserName>(tokens);\n"); " <parserName> parser = new <parserName>(tokens);\n" +
" parser.setErrorHandler(new DiagnosticErrorStrategy());\n");
} }
outputFileST.add("createParser", createParserST); outputFileST.add("createParser", createParserST);
outputFileST.add("parserName", parserName); outputFileST.add("parserName", parserName);
@ -1192,7 +1169,6 @@ public abstract class BaseTest {
"import org.antlr.v4.runtime.*;\n" + "import org.antlr.v4.runtime.*;\n" +
"import org.antlr.v4.stringtemplate.*;\n" + "import org.antlr.v4.stringtemplate.*;\n" +
"import org.antlr.v4.stringtemplate.language.*;\n" + "import org.antlr.v4.stringtemplate.language.*;\n" +
// "import org.antlr.v4.runtime.debug.*;\n" +
"import java.io.*;\n" + "import java.io.*;\n" +
"\n" + "\n" +
"public class Test {\n" + "public class Test {\n" +
@ -1216,18 +1192,12 @@ public abstract class BaseTest {
" }\n" + " }\n" +
"}" "}"
); );
ST createParserST = ST createParserST = new ST(" <parserName> parser = new <parserName>(tokens);\n");
new ST( if ( debug ) {
"class Profiler2 extends Profiler {\n" +
" public void terminate() { ; }\n" +
"}\n"+
" Profiler2 profiler = new Profiler2();\n"+
" <parserName> parser = new <parserName>(tokens,profiler);\n" +
" profiler.setParser(parser);\n");
if ( !debug ) {
createParserST = createParserST =
new ST( new ST(
" <parserName> parser = new <parserName>(tokens);\n"); " <parserName> parser = new <parserName>(tokens);\n" +
" parser.setErrorHandler(new DiagnosticErrorStrategy());\n");
} }
outputFileST.add("createParser", createParserST); outputFileST.add("createParser", createParserST);
outputFileST.add("parserName", parserName); outputFileST.add("parserName", parserName);

View File

@ -36,7 +36,6 @@ import org.antlr.v4.runtime.ParserRuleContext;
import org.antlr.v4.runtime.TokenStream; import org.antlr.v4.runtime.TokenStream;
import org.antlr.v4.runtime.atn.*; import org.antlr.v4.runtime.atn.*;
import org.antlr.v4.runtime.dfa.DFA; import org.antlr.v4.runtime.dfa.DFA;
import org.antlr.v4.runtime.misc.Utils;
import org.antlr.v4.tool.DOTGenerator; import org.antlr.v4.tool.DOTGenerator;
import org.antlr.v4.tool.Grammar; import org.antlr.v4.tool.Grammar;
import org.antlr.v4.tool.LexerGrammar; import org.antlr.v4.tool.LexerGrammar;
@ -321,14 +320,16 @@ public class TestATNParserPrediction extends BaseTest {
ParserRuleContext a_e_ctx = new ParserRuleContext(a_ctx, a_e_invoke.stateNumber, bStart.stateNumber); ParserRuleContext a_e_ctx = new ParserRuleContext(a_ctx, a_e_invoke.stateNumber, bStart.stateNumber);
ParserRuleContext b_e_ctx = new ParserRuleContext(b_ctx, b_e_invoke.stateNumber, bStart.stateNumber); ParserRuleContext b_e_ctx = new ParserRuleContext(b_ctx, b_e_invoke.stateNumber, bStart.stateNumber);
ParserATNSimulator interp = new ParserATNSimulator(atn); List<Integer> types = getTokenTypesViaATN("ab", lexInterp);
System.out.println(types);
TokenStream input = new IntTokenStream(types);
// ParserATNSimulator interp = new ParserATNSimulator(atn);
ParserInterpreter interp = new ParserInterpreter(g, input);
// interp.setContextSensitive(true); the default // interp.setContextSensitive(true); the default
List<Integer> types = getTokenTypesViaATN("ab", lexInterp);
System.out.println(types);
TokenStream input = new IntTokenStream(types);
int alt = interp.adaptivePredict(input, 0, b_e_ctx); int alt = interp.adaptivePredict(input, 0, b_e_ctx);
assertEquals(alt, 2); assertEquals(alt, 2);
DFA dfa = interp.decisionToDFA[0]; DFA dfa = interp.getATNSimulator().decisionToDFA[0];
String expecting = String expecting =
"s0-'a'->s1\n" + "s0-'a'->s1\n" +
"s1-'b'->s2\n" + "s1-'b'->s2\n" +
@ -393,121 +394,112 @@ public class TestATNParserPrediction extends BaseTest {
assertEquals(expecting, dfa.toString(g.getTokenDisplayNames())); assertEquals(expecting, dfa.toString(g.getTokenDisplayNames()));
} }
@Test public void testFullContextIF_THEN_ELSEParse() throws Exception { @Test public void testFullContextIF_THEN_ELSEParse() {
LexerGrammar lg = new LexerGrammar( String grammar =
"lexer grammar L;\n" + "grammar T;\n"+
"LC : '{' ;\n" + "s" +
"RC : '}' ;\n" + "@after {dumpDFA();}\n" +
"IF : 'if' ;\n" + " : '{' stat* '}'" +
"ELSE : 'else' ;\n" + " ;\n" +
"BREAK : 'break' ;\n" + "stat: 'if' ID 'then' stat ('else' stat)?\n" +
"RETURN : 'return' ;\n" + " | 'break'\n" +
"THEN : 'then' ;\n" + " | 'return'\n" +
"WS : (' '|'\\t'|'\\n')+ {skip();} ;\n"); " ;" +
// AB predicted in both alts of e but in diff contexts. "ID : 'a'..'z'+ ;\n"+
Grammar g = new Grammar( "WS : (' '|'\\t'|'\\n')+ {skip();} ;\n";
"parser grammar T;\n"+ String input = "{ if x then break }";
"tokens {LC; RC; IF; ELSE; BREAK; RETURN; THEN;}\n" + String result = execParser("T.g", grammar, "TParser", "TLexer", "s",
"s : LC stat* RC ;\n" + input, true);
"stat: IF ID THEN stat (ELSE stat)?\n" + String expecting =
" | BREAK\n" + "Decision 0:\n" +
" | RETURN\n" + "s0-'if'->:s1=>1\n" +
" ;"); "s0-'}'->:s2=>2\n" +
"\n" +
"Decision 1:\n" +
"s0-'}'->:s1=>2\n";
assertEquals(expecting, result);
assertEquals(null, this.stderrDuringParse);
ATN lexatn = createATN(lg); input = "{ if x then break else return }";
LexerATNSimulator lexInterp = new LexerATNSimulator(lexatn); result = execParser("T.g", grammar, "TParser", "TLexer", "s",
input, true);
expecting =
"Decision 0:\n" +
"s0-'if'->:s1=>1\n" +
"s0-'}'->:s2=>2\n" +
"\n" +
"Decision 1:\n" +
"s0-'else'->:s1@{[6]=1}\n";
assertEquals(expecting, result);
assertEquals("line 1:18 reportContextSensitivity: [15|1|[25], 29|1|[25], 31|1|[25], 15|2|[25]|up=1, 29|2|[25]|up=1, 31|2|[25]|up=1], input=else\n",
this.stderrDuringParse);
semanticProcess(lg); input = "{ if x then break else return }";
g.importVocab(lg); result = execParser("T.g", grammar, "TParser", "TLexer", "s",
semanticProcess(g); input, true);
expecting =
"Decision 0:\n" +
"s0-'if'->:s1=>1\n" +
"s0-'}'->:s2=>2\n" +
"\n" +
"Decision 1:\n" +
"s0-'else'->:s1@{[6]=1}\n";
assertEquals(expecting, result);
assertEquals("line 1:18 reportContextSensitivity: [15|1|[25], 29|1|[25], 31|1|[25], 15|2|[25]|up=1, 29|2|[25]|up=1, 31|2|[25]|up=1], input=else\n",
this.stderrDuringParse);
ParserATNFactory f = new ParserATNFactory(g); input =
ATN atn = f.createATN(); "{ if x then break else return\n" +
"if x then if y then break else return }";
result = execParser("T.g", grammar, "TParser", "TLexer", "s",
input, true);
expecting =
"Decision 0:\n" +
"s0-'if'->:s1=>1\n" +
"s0-'}'->:s2=>2\n" +
"\n" +
"Decision 1:\n" +
"s0-'else'->:s1@{[6]=1, [21 6]=1}\n" +
"s0-'}'->:s2=>2\n";
assertEquals(expecting, result);
assertEquals("line 1:18 reportContextSensitivity: [15|1|[25], 29|1|[25], 31|1|[25], 15|2|[25]|up=1, 29|2|[25]|up=1, 31|2|[25]|up=1], input=else\n" +
"line 2:26 reportAmbiguity {1..2}:[1|1|[], 1|2|[]], input=else\n",
this.stderrDuringParse);
ATNState sStart = atn.ruleToStartState[g.getRule("s").index]; input =
if ( sStart.transition(0).target instanceof BlockStartState ) { "{ if x then break else return\n" +
sStart = sStart.transition(0).target; "if x then if y then break else return }";
} result = execParser("T.g", grammar, "TParser", "TLexer", "s",
DecisionState decState = (DecisionState)sStart; input, true);
expecting =
"Decision 0:\n" +
"s0-'if'->:s1=>1\n" +
"s0-'}'->:s2=>2\n" +
"\n" +
"Decision 1:\n" +
"s0-'else'->:s1@{[6]=1, [21 6]=1}\n" +
"s0-'}'->:s2=>2\n";
assertEquals(expecting, result);
assertEquals("line 1:18 reportContextSensitivity: [15|1|[25], 29|1|[25], 31|1|[25], 15|2|[25]|up=1, 29|2|[25]|up=1, 31|2|[25]|up=1], input=else\n" +
"line 2:26 reportAmbiguity {1..2}:[1|1|[], 1|2|[]], input=else\n",
this.stderrDuringParse);
DOTGenerator dot = new DOTGenerator(g); input =
System.out.println(dot.getDOT(atn.ruleToStartState[g.getRule("s").index])); "{ if x then if y then break else break }";
result = execParser("T.g", grammar, "TParser", "TLexer", "s",
ParserATNSimulator interp = new ParserATNSimulator(atn); input, true);
List<Integer> types = getTokenTypesViaATN("{break}", lexInterp); expecting =
int WS = lg.getTokenType("WS"); "Decision 0:\n" +
Utils.removeAllElements(types, WS); "s0-'if'->:s1=>1\n" +
System.out.println(types); "s0-'}'->:s2=>2\n" +
TokenStream input = new IntTokenStream(types); "\n" +
"Decision 1:\n" +
"s0-'else'->:s1@{[21 6]=1}\n" +
int alt = interp.matchATN(input, decState); "s0-'}'->:s2=>2\n";
// int alt = interp.adaptivePredict(input, 0, ParserRuleContext.EMPTY); assertEquals(expecting, result);
assertEquals(alt, 1); assertEquals("line 1:28 reportAmbiguity {1..2}:[15|1|[25], 29|1|[25], 31|1|[25], 15|2|[25]|up=1, 29|2|[25]|up=1, 31|2|[25]|up=1], input=else\n",
DFA dfa = interp.decisionToDFA[0]; this.stderrDuringParse);
String expecting = }
"s0-'a'->s1\n" +
"s1-'b'->s2\n" +
"s2-EOF->:s3@{[10]=2}\n";
assertEquals(expecting, dfa.toString(g.getTokenDisplayNames()));
// alt = interp.adaptivePredict(input, 0, ParserRuleContext.EMPTY);
// assertEquals(alt, 2);
// expecting =
// "s0-'a'->s1\n" +
// "s1-'b'->s2\n" +
// "s2-EOF->:s3@{[10]=2}\n";
// assertEquals(expecting, dfa.toString(g.getTokenDisplayNames()));
//
// alt = interp.adaptivePredict(input, 0, ParserRuleContext.EMPTY);
// assertEquals(alt, 1);
// expecting =
// "s0-'a'->s1\n" +
// "s1-'b'->s2\n" +
// "s2-EOF->:s3@{[10]=2, [6]=1}\n";
// assertEquals(expecting, dfa.toString(g.getTokenDisplayNames()));
//
// alt = interp.adaptivePredict(input, 0, ParserRuleContext.EMPTY); // cached
// assertEquals(alt, 2);
// expecting =
// "s0-'a'->s1\n" +
// "s1-'b'->s2\n" +
// "s2-EOF->:s3@{[10]=2, [6]=1}\n";
// assertEquals(expecting, dfa.toString(g.getTokenDisplayNames()));
//
// alt = interp.adaptivePredict(input, 0, ParserRuleContext.EMPTY); // cached
// assertEquals(alt, 1);
// expecting =
// "s0-'a'->s1\n" +
// "s1-'b'->s2\n" +
// "s2-EOF->:s3@{[10]=2, [6]=1}\n";
// assertEquals(expecting, dfa.toString(g.getTokenDisplayNames()));
//
// types = getTokenTypesViaATN("b", lexInterp);
// System.out.println(types);
// input = new IntTokenStream(types);
// alt = interp.adaptivePredict(input, 0, null); // ctx irrelevant
// assertEquals(alt, 2);
// expecting =
// "s0-'a'->s1\n" +
// "s0-'b'->:s4=>2\n" +
// "s1-'b'->s2\n" +
// "s2-EOF->:s3@{[10]=2, [6]=1}\n";
// assertEquals(expecting, dfa.toString(g.getTokenDisplayNames()));
//
// types = getTokenTypesViaATN("aab", lexInterp);
// System.out.println(types);
// input = new IntTokenStream(types);
// alt = interp.adaptivePredict(input, 0, null);
// assertEquals(alt, 1);
// expecting =
// "s0-'a'->s1\n" +
// "s0-'b'->:s4=>2\n" +
// "s1-'a'->:s5=>1\n" +
// "s1-'b'->s2\n" +
// "s2-EOF->:s3@{[10]=2, [6]=1}\n";
// assertEquals(expecting, dfa.toString(g.getTokenDisplayNames()));
}
@Test public void testRecursiveLeftPrefix() throws Exception { @Test public void testRecursiveLeftPrefix() throws Exception {
LexerGrammar lg = new LexerGrammar( LexerGrammar lg = new LexerGrammar(