Merge pull request #51 from parrt/master

a bunch of new updates
This commit is contained in:
Terence Parr 2012-03-28 10:37:43 -07:00
commit e2d9ffc767
43 changed files with 663 additions and 341 deletions

View File

@ -29,6 +29,10 @@
package org.antlr.v4.runtime;
import org.antlr.v4.runtime.atn.ATNConfigSet;
import org.antlr.v4.runtime.dfa.DFA;
import org.antlr.v4.runtime.misc.IntervalSet;
import org.antlr.v4.runtime.misc.NotNull;
import org.antlr.v4.runtime.misc.Nullable;
/** How to emit recognition errors */
@ -72,4 +76,30 @@ public interface ANTLRErrorListener<Symbol> {
int charPositionInLine,
String msg,
@Nullable RecognitionException e);
/** Called when the parser detects a true ambiguity: an input sequence can be matched
* literally by two or more pass through the grammar. ANTLR resolves the ambiguity in
* favor of the alternative appearing first in the grammar. The start and stop index are
* zero-based absolute indices into the token stream. ambigAlts is a set of alternative numbers
* that can match the input sequence. This method is only called when we are parsing with
* full context.
*/
void reportAmbiguity(@NotNull Parser recognizer,
DFA dfa, int startIndex, int stopIndex, @NotNull IntervalSet ambigAlts,
@NotNull ATNConfigSet configs);
void reportAttemptingFullContext(@NotNull Parser recognizer,
@NotNull DFA dfa,
int startIndex, int stopIndex,
@NotNull ATNConfigSet configs);
/** Called by the parser when it find a conflict that is resolved by retrying the parse
* with full context. This is not a warning; it simply notifies you that your grammar
* is more complicated than Strong LL can handle. The parser moved up to full context
* parsing for that input sequence.
*/
void reportContextSensitivity(@NotNull Parser recognizer,
@NotNull DFA dfa,
int startIndex, int stopIndex,
@NotNull ATNConfigSet configs);
}

View File

@ -1,10 +1,5 @@
package org.antlr.v4.runtime;
import org.antlr.v4.runtime.atn.ATNConfigSet;
import org.antlr.v4.runtime.atn.DecisionState;
import org.antlr.v4.runtime.atn.SemanticContext;
import org.antlr.v4.runtime.dfa.DFA;
import org.antlr.v4.runtime.misc.IntervalSet;
import org.antlr.v4.runtime.misc.NotNull;
import org.antlr.v4.runtime.misc.Nullable;
@ -114,30 +109,4 @@ public interface ANTLRErrorStrategy {
void reportError(@NotNull Parser recognizer,
@Nullable RecognitionException e)
throws RecognitionException;
/** Called when the parser detects a true ambiguity: an input sequence can be matched
* literally by two or more pass through the grammar. ANTLR resolves the ambiguity in
* favor of the alternative appearing first in the grammar. The start and stop index are
* zero-based absolute indices into the token stream. ambigAlts is a set of alternative numbers
* that can match the input sequence. This method is only called when we are parsing with
* full context.
*/
void reportAmbiguity(@NotNull Parser recognizer,
DFA dfa, int startIndex, int stopIndex, @NotNull IntervalSet ambigAlts,
@NotNull ATNConfigSet configs);
void reportAttemptingFullContext(@NotNull Parser recognizer,
@NotNull DFA dfa,
int startIndex, int stopIndex,
@NotNull ATNConfigSet configs);
/** Called by the parser when it find a conflict that is resolved by retrying the parse
* with full context. This is not a warning; it simply notifies you that your grammar
* is more complicated than Strong LL can handle. The parser moved up to full context
* parsing for that input sequence.
*/
void reportContextSensitivity(@NotNull Parser recognizer,
@NotNull DFA dfa,
int startIndex, int stopIndex,
@NotNull ATNConfigSet configs);
}

View File

@ -0,0 +1,78 @@
/*
[The "BSD license"]
Copyright (c) 2012 Terence Parr
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. The name of the author may not be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.antlr.v4.runtime;
import org.antlr.v4.runtime.atn.ATNConfigSet;
import org.antlr.v4.runtime.dfa.DFA;
import org.antlr.v4.runtime.misc.IntervalSet;
/**
*
* @author Sam Harwell
*/
public class BaseErrorListener<Symbol> implements ANTLRErrorListener<Symbol> {
@Override
public <T extends Symbol> void error(Recognizer<T, ?> recognizer,
T offendingSymbol,
int line,
int charPositionInLine,
String msg,
RecognitionException e)
{
}
@Override
public void reportAmbiguity(Parser recognizer,
DFA dfa,
int startIndex,
int stopIndex,
IntervalSet ambigAlts,
ATNConfigSet configs)
{
}
@Override
public void reportAttemptingFullContext(Parser recognizer,
DFA dfa,
int startIndex,
int stopIndex,
ATNConfigSet configs)
{
}
@Override
public void reportContextSensitivity(Parser recognizer,
DFA dfa,
int startIndex,
int stopIndex,
ATNConfigSet configs)
{
}
}

View File

@ -32,7 +32,7 @@ package org.antlr.v4.runtime;
*
* @author Sam Harwell
*/
public class ConsoleErrorListener implements ANTLRErrorListener<Object> {
public class ConsoleErrorListener extends BaseErrorListener<Object> {
public static final ConsoleErrorListener INSTANCE = new ConsoleErrorListener();
@Override

View File

@ -29,8 +29,14 @@
package org.antlr.v4.runtime;
import org.antlr.v4.runtime.atn.*;
import org.antlr.v4.runtime.dfa.DFA;
import org.antlr.v4.runtime.atn.ATN;
import org.antlr.v4.runtime.atn.ATNState;
import org.antlr.v4.runtime.atn.BlockStartState;
import org.antlr.v4.runtime.atn.PlusBlockStartState;
import org.antlr.v4.runtime.atn.PlusLoopbackState;
import org.antlr.v4.runtime.atn.RuleTransition;
import org.antlr.v4.runtime.atn.StarLoopEntryState;
import org.antlr.v4.runtime.atn.StarLoopbackState;
import org.antlr.v4.runtime.misc.IntervalSet;
import org.antlr.v4.runtime.misc.NotNull;
@ -549,25 +555,4 @@ public class DefaultErrorStrategy implements ANTLRErrorStrategy {
ttype = recognizer.getInputStream().LA(1);
}
}
@Override
public void reportAmbiguity(@NotNull Parser recognizer,
DFA dfa, int startIndex, int stopIndex, @NotNull IntervalSet ambigAlts,
@NotNull ATNConfigSet configs)
{
}
@Override
public void reportAttemptingFullContext(@NotNull Parser recognizer,
@NotNull DFA dfa,
int startIndex, int stopIndex,
@NotNull ATNConfigSet configs)
{
}
@Override
public void reportContextSensitivity(@NotNull Parser recognizer, @NotNull DFA dfa,
int startIndex, int stopIndex, @NotNull ATNConfigSet configs)
{
}
}

View File

@ -34,13 +34,14 @@ import org.antlr.v4.runtime.dfa.DFA;
import org.antlr.v4.runtime.misc.IntervalSet;
import org.antlr.v4.runtime.misc.NotNull;
public class DiagnosticErrorStrategy extends DefaultErrorStrategy {
public class DiagnosticErrorListener extends BaseErrorListener<Token> {
@Override
public void reportAmbiguity(@NotNull Parser recognizer,
DFA dfa, int startIndex, int stopIndex, @NotNull IntervalSet ambigAlts,
@NotNull ATNConfigSet configs)
{
recognizer.notifyErrorListeners("reportAmbiguity d=" + dfa.decision + ": ambigAlts=" + ambigAlts + ":" + configs + ", input='" +
recognizer.notifyErrorListeners("reportAmbiguity d=" + dfa.decision +
": ambigAlts=" + ambigAlts + ", input='" +
recognizer.getInputString(startIndex, stopIndex) + "'");
}
@ -50,7 +51,8 @@ public class DiagnosticErrorStrategy extends DefaultErrorStrategy {
int startIndex, int stopIndex,
@NotNull ATNConfigSet configs)
{
recognizer.notifyErrorListeners("reportAttemptingFullContext d=" + dfa.decision + ": " + configs + ", input='" +
recognizer.notifyErrorListeners("reportAttemptingFullContext d=" +
dfa.decision + ", input='" +
recognizer.getInputString(startIndex, stopIndex) + "'");
}
@ -58,7 +60,8 @@ public class DiagnosticErrorStrategy extends DefaultErrorStrategy {
public void reportContextSensitivity(@NotNull Parser recognizer, @NotNull DFA dfa,
int startIndex, int stopIndex, @NotNull ATNConfigSet configs)
{
recognizer.notifyErrorListeners("reportContextSensitivity d=" + dfa.decision + ": " + configs + ", input='" +
recognizer.notifyErrorListeners("reportContextSensitivity d=" +
dfa.decision + ", input='" +
recognizer.getInputString(startIndex, stopIndex) + "'");
}
}

View File

@ -309,10 +309,9 @@ public abstract class Lexer extends Recognizer<Integer, LexerATNSimulator>
public void notifyListeners(LexerNoViableAltException e) {
String msg = "token recognition error at: '"+
_input.substring(_tokenStartCharIndex, _input.index())+"'";
List<? extends ANTLRErrorListener<? super Integer>> listeners = getErrorListeners();
for (ANTLRErrorListener<? super Integer> listener : listeners) {
listener.error(this, null, _tokenStartLine, _tokenStartCharPositionInLine, msg, e);
}
ANTLRErrorListener<? super Integer> listener = getErrorListenerDispatch();
listener.error(this, null, _tokenStartLine, _tokenStartCharPositionInLine, msg, e);
}
public String getCharErrorDisplay(int c) {

View File

@ -45,18 +45,18 @@ public abstract class Parser extends Recognizer<Token, ParserATNSimulator<Token>
public class TraceListener implements ParseListener<Token> {
@Override
public void enterNonLRRule(ParserRuleContext<Token> ctx) {
System.out.println("enter " + getRuleNames()[ctx.ruleIndex] + ", LT(1)=" + _input.LT(1).getText());
System.out.println("enter " + getRuleNames()[ctx.getRuleIndex()] + ", LT(1)=" + _input.LT(1).getText());
}
@Override
public void exitEveryRule(ParserRuleContext<Token> ctx) {
System.out.println("exit "+getRuleNames()[ctx.ruleIndex]+", LT(1)="+_input.LT(1).getText());
System.out.println("exit "+getRuleNames()[ctx.getRuleIndex()]+", LT(1)="+_input.LT(1).getText());
}
@Override
public void visitTerminal(ParserRuleContext<Token> parent, Token token) {
System.out.println("consume "+token+" rule "+
getRuleNames()[parent.ruleIndex]+
getRuleNames()[parent.getRuleIndex()]+
" alt="+parent.altNum);
}
}
@ -327,10 +327,9 @@ public abstract class Parser extends Recognizer<Token, ParserATNSimulator<Token>
line = ((Token) offendingToken).getLine();
charPositionInLine = ((Token) offendingToken).getCharPositionInLine();
}
List<? extends ANTLRErrorListener<? super Token>> listeners = getErrorListeners();
for (ANTLRErrorListener<? super Token> listener : listeners) {
listener.error(this, offendingToken, line, charPositionInLine, msg, e);
}
ANTLRErrorListener<? super Token> listener = getErrorListenerDispatch();
listener.error(this, offendingToken, line, charPositionInLine, msg, e);
}
/** Consume the current symbol and return it. E.g., given the following
@ -381,12 +380,12 @@ public abstract class Parser extends Recognizer<Token, ParserATNSimulator<Token>
public void enterRule(ParserRuleContext<Token> localctx, int ruleIndex) {
_ctx = localctx;
_ctx.start = _input.LT(1);
_ctx.ruleIndex = ruleIndex;
if (_buildParseTrees) addContextToParseTree();
if ( _parseListeners != null) triggerEnterRuleEvent();
}
public void exitRule() {
_ctx.stop = _input.LT(-1);
// trigger event on _ctx, before it reverts to parent
if ( _parseListeners != null) triggerExitRuleEvent();
_ctx = (ParserRuleContext<Token>)_ctx.parent;
@ -408,10 +407,10 @@ public abstract class Parser extends Recognizer<Token, ParserATNSimulator<Token>
public void pushNewRecursionContext(ParserRuleContext<Token> localctx, int ruleIndex) {
_ctx = localctx;
_ctx.start = _input.LT(1);
_ctx.ruleIndex = ruleIndex;
}
public void unrollRecursionContexts(ParserRuleContext<Token> _parentctx) {
_ctx.stop = _input.LT(-1);
ParserRuleContext<Token> retctx = _ctx; // save current ctx (return value)
// unroll so _ctx is as it was before call to recursive method

View File

@ -30,6 +30,7 @@ package org.antlr.v4.runtime;
import org.antlr.v4.runtime.atn.ATN;
import org.antlr.v4.runtime.atn.ATNState;
import org.antlr.v4.runtime.misc.Interval;
import org.antlr.v4.runtime.misc.NotNull;
import org.antlr.v4.runtime.misc.Nullable;
import org.antlr.v4.runtime.tree.ParseTree;
@ -102,12 +103,15 @@ public class ParserRuleContext<Symbol extends Token> extends RuleContext {
public Symbol start, stop;
/** Set during parsing to identify which rule parser is in. */
public int ruleIndex;
/** Set during parsing to identify which alt of rule parser is in. */
public int altNum;
/**
* The exception which forced this rule to return. If the rule successfully
* completed, this is {@code null}.
*/
public RecognitionException exception;
public ParserRuleContext() { }
/** COPY a ctx (I'm deliberately not using copy constructor) */
@ -119,7 +123,6 @@ public class ParserRuleContext<Symbol extends Token> extends RuleContext {
this.start = ctx.start;
this.stop = ctx.stop;
this.ruleIndex = ctx.ruleIndex;
}
public ParserRuleContext(@Nullable ParserRuleContext<Symbol> parent, int invokingStateNumber, int stateNumber) {
@ -289,7 +292,20 @@ public class ParserRuleContext<Symbol extends Token> extends RuleContext {
public int getChildCount() { return children!=null ? children.size() : 0; }
@Override
public int getRuleIndex() { return ruleIndex; }
public Interval getSourceInterval() {
if ( start==null || stop==null ) return Interval.INVALID;
return Interval.of(start.getTokenIndex(), stop.getTokenIndex());
}
/** Return the text matched by this context and below in the parse
* tree. It includes tokens from this.start .. this.stop inclusive.
* It includes hidden channel tokens between start, stop. The
* edge tokens are always on-channel tokens.
*/
public String getText(TokenStream tokens) {
Interval range = getSourceInterval();
return range==Interval.INVALID ? null : tokens.toString(range.a, range.b);
}
public Symbol getStart() { return start; }
public Symbol getStop() { return stop; }

View File

@ -0,0 +1,97 @@
/*
[The "BSD license"]
Copyright (c) 2012 Terence Parr
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. The name of the author may not be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.antlr.v4.runtime;
import org.antlr.v4.runtime.atn.ATNConfigSet;
import org.antlr.v4.runtime.dfa.DFA;
import org.antlr.v4.runtime.misc.IntervalSet;
import java.util.Collection;
/**
*
* @author Sam Harwell
*/
public class ProxyErrorListener<Symbol> implements ANTLRErrorListener<Symbol> {
private final Collection<? extends ANTLRErrorListener<? super Symbol>> delegates;
public ProxyErrorListener(Collection<? extends ANTLRErrorListener<? super Symbol>> delegates) {
this.delegates = delegates;
}
@Override
public <T extends Symbol> void error(Recognizer<T, ?> recognizer,
T offendingSymbol,
int line,
int charPositionInLine,
String msg,
RecognitionException e)
{
for (ANTLRErrorListener<? super Symbol> listener : delegates) {
listener.error(recognizer, offendingSymbol, line, charPositionInLine, msg, e);
}
}
@Override
public void reportAmbiguity(Parser recognizer,
DFA dfa,
int startIndex,
int stopIndex,
IntervalSet ambigAlts,
ATNConfigSet configs)
{
for (ANTLRErrorListener<? super Symbol> listener : delegates) {
listener.reportAmbiguity(recognizer, dfa, startIndex, stopIndex, ambigAlts, configs);
}
}
@Override
public void reportAttemptingFullContext(Parser recognizer,
DFA dfa,
int startIndex,
int stopIndex,
ATNConfigSet configs)
{
for (ANTLRErrorListener<? super Symbol> listener : delegates) {
listener.reportAttemptingFullContext(recognizer, dfa, startIndex, stopIndex, configs);
}
}
@Override
public void reportContextSensitivity(Parser recognizer,
DFA dfa,
int startIndex,
int stopIndex,
ATNConfigSet configs)
{
for (ANTLRErrorListener<? super Symbol> listener : delegates) {
listener.reportContextSensitivity(recognizer, dfa, startIndex, stopIndex, configs);
}
}
}

View File

@ -120,6 +120,10 @@ public abstract class Recognizer<Symbol, ATNInterpreter extends ATNSimulator> {
return new ArrayList<ANTLRErrorListener<? super Symbol>>(_listeners);
}
public ANTLRErrorListener<? super Symbol> getErrorListenerDispatch() {
return new ProxyErrorListener<Symbol>(getErrorListeners());
}
// subclass needs to override these if there are sempreds or actions
// that the ATN interp needs to execute
public boolean sempred(@Nullable RuleContext _localctx, int ruleIndex, int actionIndex) {

View File

@ -208,7 +208,12 @@ public class RuleContext implements ParseTree.RuleNode {
return invokingState == -1;
}
// satisfy the ParseTree interface
// satisfy the ParseTree / SyntaxTree interface
@Override
public Interval getSourceInterval() {
return Interval.INVALID;
}
@Override
public RuleContext getRuleContext() { return this; }
@ -231,14 +236,6 @@ public class RuleContext implements ParseTree.RuleNode {
return 0;
}
@Override
public Interval getSourceInterval() {
if ( getChildCount()==0 ) return Interval.INVALID;
int start = getChild(0).getSourceInterval().a;
int stop = getChild(getChildCount()-1).getSourceInterval().b;
return new Interval(start, stop);
}
@Override
public <T> T accept(ParseTreeVisitor<? extends T> visitor) { return visitor.visitChildren(this); }

View File

@ -60,6 +60,10 @@ public abstract class ATNSimulator {
int p = 0;
atn.grammarType = toInt(data[p++]);
atn.maxTokenType = toInt(data[p++]);
//
// STATES
//
int nstates = toInt(data[p++]);
for (int i=1; i<=nstates; i++) {
int stype = toInt(data[p++]);
@ -75,6 +79,10 @@ public abstract class ATNSimulator {
}
atn.addState(s);
}
//
// RULES
//
int nrules = toInt(data[p++]);
if ( atn.grammarType == ATN.LEXER ) {
atn.ruleToTokenType = new int[nrules];
@ -92,11 +100,19 @@ public abstract class ATNSimulator {
atn.ruleToActionIndex[i] = actionIndex;
}
}
//
// MODES
//
int nmodes = toInt(data[p++]);
for (int i=0; i<nmodes; i++) {
int s = toInt(data[p++]);
atn.modeToStartState.add((TokensStartState)atn.states.get(s));
}
//
// SETS
//
int nsets = toInt(data[p++]);
for (int i=1; i<=nsets; i++) {
int nintervals = toInt(data[p]);
@ -108,6 +124,10 @@ public abstract class ATNSimulator {
p += 2;
}
}
//
// EDGES
//
int nedges = toInt(data[p++]);
for (int i=1; i<=nedges; i++) {
int src = toInt(data[p]);
@ -125,6 +145,10 @@ public abstract class ATNSimulator {
srcState.addTransition(trans);
p += 6;
}
//
// DECISIONS
//
int ndecisions = toInt(data[p++]);
for (int i=1; i<=ndecisions; i++) {
int s = toInt(data[p++]);
@ -134,6 +158,7 @@ public abstract class ATNSimulator {
decState.decision = i-1;
decState.isGreedy = isGreedy==1;
}
return atn;
}

View File

@ -241,7 +241,8 @@ public class LexerATNSimulator extends ATNSimulator {
t = input.LA(1);
}
return failOrAccept(prevAccept, input, prevAccept.state.configset, t);
ATNConfigSet reach = prevAccept.state != null ? prevAccept.state.configset : null;
return failOrAccept(prevAccept, input, reach, t);
}
protected int execATN(@NotNull CharStream input, @NotNull ATNConfigSet s0, @Nullable DFAState ds0) {

View File

@ -1408,7 +1408,7 @@ public class ParserATNSimulator<Symbol extends Token> extends ATNSimulator {
System.out.println("reportAttemptingFullContext decision="+dfa.decision+":"+configs+
", input="+parser.getInputString(startIndex, stopIndex));
}
if ( parser!=null ) parser.getErrorHandler().reportAttemptingFullContext(parser, dfa, startIndex, stopIndex, configs);
if ( parser!=null ) parser.getErrorListenerDispatch().reportAttemptingFullContext(parser, dfa, startIndex, stopIndex, configs);
}
public void reportContextSensitivity(DFA dfa, ATNConfigSet configs, int startIndex, int stopIndex) {
@ -1416,7 +1416,7 @@ public class ParserATNSimulator<Symbol extends Token> extends ATNSimulator {
System.out.println("reportContextSensitivity decision="+dfa.decision+":"+configs+
", input="+parser.getInputString(startIndex, stopIndex));
}
if ( parser!=null ) parser.getErrorHandler().reportContextSensitivity(parser, dfa, startIndex, stopIndex, configs);
if ( parser!=null ) parser.getErrorListenerDispatch().reportContextSensitivity(parser, dfa, startIndex, stopIndex, configs);
}
/** If context sensitive parsing, we know it's ambiguity not conflict */
@ -1445,7 +1445,7 @@ public class ParserATNSimulator<Symbol extends Token> extends ATNSimulator {
ambigAlts+":"+configs+
", input="+parser.getInputString(startIndex, stopIndex));
}
if ( parser!=null ) parser.getErrorHandler().reportAmbiguity(parser, dfa, startIndex, stopIndex,
if ( parser!=null ) parser.getErrorListenerDispatch().reportAmbiguity(parser, dfa, startIndex, stopIndex,
ambigAlts, configs);
}
}

View File

@ -91,7 +91,13 @@ public abstract class Transition {
@NotNull
public ATNState target;
protected Transition(@NotNull ATNState target) { this.target = target; }
protected Transition(@NotNull ATNState target) {
if (target == null) {
throw new NullPointerException("target cannot be null.");
}
this.target = target;
}
public int getSerializationType() { return 0; }

View File

@ -52,8 +52,7 @@ public class Interval {
* Interval object with a..a in it. On Java.g, 218623 IntervalSets
* have a..a (set with 1 element).
*/
public static Interval create(int a, int b) {
//return new Interval(a,b);
public static Interval of(int a, int b) {
// cache just a..a
if ( a!=b || a<0 || a>INTERVAL_POOL_MAX_VALUE ) {
return new Interval(a,b);
@ -64,6 +63,14 @@ public class Interval {
return cache[a];
}
/** return number of elements between a and b inclusively. x..x is length 1.
* if b < a, then length is 0. 9..10 has length 2.
*/
public int length() {
if ( b<a ) return 0;
return b-a+1;
}
@Override
public boolean equals(Object o) {
if ( o==null ) {
@ -112,12 +119,12 @@ public class Interval {
/** Return the interval computed from combining this and other */
public Interval union(Interval other) {
return Interval.create(Math.min(a,other.a), Math.max(b,other.b));
return Interval.of(Math.min(a, other.a), Math.max(b, other.b));
}
/** Return the interval in common between this and o */
public Interval intersection(Interval other) {
return Interval.create(Math.max(a,other.a), Math.min(b,other.b));
return Interval.of(Math.max(a, other.a), Math.min(b, other.b));
}
/** Return the interval with elements from this not in other;
@ -129,13 +136,13 @@ public class Interval {
Interval diff = null;
// other.a to left of this.a (or same)
if ( other.startsBeforeNonDisjoint(this) ) {
diff = Interval.create(Math.max(this.a,other.b+1),
this.b);
diff = Interval.of(Math.max(this.a, other.b + 1),
this.b);
}
// other.a to right of this.a
else if ( other.startsAfterNonDisjoint(this) ) {
diff = Interval.create(this.a, other.a-1);
diff = Interval.of(this.a, other.a - 1);
}
return diff;
}

View File

@ -28,9 +28,15 @@
*/
package org.antlr.v4.runtime.misc;
import org.antlr.v4.runtime.*;
import org.antlr.v4.runtime.Lexer;
import org.antlr.v4.runtime.Token;
import java.util.*;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.ListIterator;
import java.util.Set;
/** A set of integers that relies on ranges being common to do
* "run-length-encoded" like compression (if you view an IntSet like
@ -111,7 +117,7 @@ public class IntervalSet implements IntSet {
* {1..5, 6..7, 10..20}. Adding 4..8 yields {1..8, 10..20}.
*/
public void add(int a, int b) {
add(Interval.create(a,b));
add(Interval.of(a, b));
}
// copy on write so we can cache a..a intervals and sets of that

View File

@ -29,7 +29,15 @@
package org.antlr.v4.runtime.misc;
import org.antlr.v4.runtime.*;
import org.antlr.v4.runtime.ANTLRInputStream;
import org.antlr.v4.runtime.CharStream;
import org.antlr.v4.runtime.CommonTokenStream;
import org.antlr.v4.runtime.DiagnosticErrorListener;
import org.antlr.v4.runtime.Lexer;
import org.antlr.v4.runtime.Parser;
import org.antlr.v4.runtime.ParserRuleContext;
import org.antlr.v4.runtime.Token;
import org.antlr.v4.runtime.TokenStream;
import java.io.FileInputStream;
import java.io.InputStream;
@ -147,7 +155,7 @@ public class TestRig {
Constructor<Parser> parserCtor = parserClass.getConstructor(TokenStream.class);
Parser parser = parserCtor.newInstance(tokens);
parser.setErrorHandler(new DiagnosticErrorStrategy());
parser.addErrorListener(new DiagnosticErrorListener());
if ( printTree || gui || psFile!=null ) {
parser.setBuildParseTree(true);

View File

@ -41,7 +41,7 @@ public interface SyntaxTree extends Tree {
* node is a leaf, then the interval represents a single token.
*
* If source interval is unknown, this does not return null.
* It returns an interval of length 0.
* It returns Interval.INVALID.
*/
Interval getSourceInterval();
}

View File

@ -1,6 +1,8 @@
grammar T;
s : f f EOF;
f : | x;
x : 'a' 'b';
s : e ';' ;
e : e '*' e
| ID
| INT
;
INT : '0'..'9'+;
WS : (' '|'\n') {skip();} ;

View File

@ -29,7 +29,7 @@
import org.antlr.v4.runtime.ANTLRFileStream;
import org.antlr.v4.runtime.CommonTokenStream;
import org.antlr.v4.runtime.DiagnosticErrorStrategy;
import org.antlr.v4.runtime.DiagnosticErrorListener;
public class TestR {
public static void main(String[] args) throws Exception {
@ -41,7 +41,7 @@ public class TestR {
// }
RParser p = new RParser(tokens);
p.setBuildParseTree(true);
p.setErrorHandler(new DiagnosticErrorStrategy());
p.addErrorListener(new DiagnosticErrorListener());
p.prog();
}
}

View File

@ -37,7 +37,7 @@ recRule(ruleName, precArgDef, argName, primaryAlts, opAlts, setResultAction,
userRetvals, leftRecursiveRuleRefLabels) ::=
<<
<ruleName>[<precArgDef>]<if(userRetvals)> returns [<userRetvals>]<endif>
: ( <primaryAlts:{alt | <alt.altText> }; separator="\n | ">
: ( {} <primaryAlts:{alt | <alt.altText> }; separator="\n | ">
)
( <opAlts; separator="\n | ">
)*

View File

@ -117,7 +117,7 @@ Parser(parser, funcs, atn, sempredFuncs, superclass) ::= <<
Parser_(parser, funcs, atn, sempredFuncs, ctor, extras, superclass) ::= <<
@SuppressWarnings({"all", "warnings", "unchecked", "unused", "cast"})
public class <parser.name> extends <superclass> {
public <if(parser.abstractRecognizer)>abstract <endif>class <parser.name> extends <superclass> {
<if(parser.tokens)>
public static final int
<parser.tokens:{k | <k>=<parser.tokens.(k)>}; separator=", ", wrap, anchor>;
@ -229,11 +229,11 @@ RuleFunction(currentRule,code,locals,ruleCtx,altLabelCtxs,namedActions,finallyAc
int _alt;
<endif>
<code>
_localctx.stop = _input.LT(-1);
<postamble; separator="\n">
<namedActions.after>
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
@ -265,11 +265,11 @@ LeftRecursiveRuleFunction(currentRule,code,locals,ruleCtx,altLabelCtxs,
int _alt;
<endif>
<code>
_localctx.stop = _input.LT(-1);
<postamble; separator="\n">
<namedActions.after>
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
@ -608,6 +608,7 @@ public static class <struct.name> extends <superClass><if(interfaces)> implement
super(parent, state);
<struct.ctorAttrs:{a | this.<a.name> = <a.name>;}; separator="\n">
}
@Override public int getRuleIndex() { return RULE_<struct.derivedFromName>; }
<if(struct.provideCopyFrom)> <! don't need copy unless we have subclasses !>
public <struct.name>() { }
public void copyFrom(<struct.name> ctx) {
@ -713,7 +714,7 @@ import org.antlr.v4.runtime.misc.*;
Lexer(lexer, atn, actionFuncs, sempredFuncs) ::= <<
@SuppressWarnings({"all", "warnings", "unchecked", "unused", "cast"})
public class <lexer.name> extends Lexer {
public <if(lexer.abstractRecognizer)>abstract <endif>class <lexer.name> extends Lexer {
public static final int
<lexer.tokens:{k | <k>=<lexer.tokens.(k)>}; separator=", ", wrap, anchor>;
<rest(lexer.modes):{m| public static final int <m> = <i>;}; separator="\n">

View File

@ -116,6 +116,7 @@ public class Tool {
public boolean gen_listener = true;
public boolean gen_parse_listener = false;
public boolean gen_visitor = false;
public boolean abstract_recognizer = false;
public static Option[] optionDefs = {
new Option("outputDirectory", "-o", OptionArgType.STRING, "specify output directory where all output is generated"),
@ -133,6 +134,7 @@ public class Tool {
new Option("gen_parse_listener", "-no-parse-listener", "don't generate parse listener (default)"),
new Option("gen_visitor", "-visitor", "generate parse tree visitor"),
new Option("gen_visitor", "-no-visitor", "don't generate parse tree visitor (default)"),
new Option("abstract_recognizer", "-abstract", "generate abstract recognizer classes"),
new Option("saveLexer", "-Xsave-lexer", "save temp lexer file created for combined grammars"),
new Option("launch_ST_inspector", "-XdbgST", "launch StringTemplate visualizer on generated code"),

View File

@ -133,6 +133,11 @@ public class ATNSerializer {
if ( s==null ) continue; // might be optimized away
for (int i=0; i<s.getNumberOfTransitions(); i++) {
Transition t = s.transition(i);
if (atn.states.get(t.target.stateNumber) == null) {
throw new IllegalStateException("Cannot serialize a transition to a removed state.");
}
int src = s.stateNumber;
int trg = t.target.stateNumber;
int edgeType = Transition.serializationTypes.get(t.getClass());

View File

@ -49,6 +49,7 @@ public class Lexer extends OutputModelObject {
public String[] tokenNames;
public Set<String> ruleNames;
public Collection<String> modes;
public boolean abstractRecognizer;
@ModelElement public SerializedATN atn;
@ModelElement public LinkedHashMap<Rule, RuleActionFunction> actionFuncs =
@ -89,6 +90,7 @@ public class Lexer extends OutputModelObject {
}
}
ruleNames = g.rules.keySet();
abstractRecognizer = g.isAbstract();
}
}

View File

@ -47,6 +47,7 @@ public class Parser extends OutputModelObject {
public Set<String> ruleNames;
public Collection<Rule> rules;
public ParserFile file;
public boolean abstractRecognizer;
@ModelElement public List<RuleFunction> funcs = new ArrayList<RuleFunction>();
@ModelElement public SerializedATN atn;
@ -89,5 +90,7 @@ public class Parser extends OutputModelObject {
} else {
superclass = new DefaultParserSuperClass();
}
abstractRecognizer = g.isAbstract();
}
}

View File

@ -74,6 +74,7 @@ public class BasicSemanticChecks extends GrammarTreeVisitor {
add("TokenLabelType");
add("superClass");
add("filter");
add("abstract");
}
};
@ -83,6 +84,7 @@ public class BasicSemanticChecks extends GrammarTreeVisitor {
add("tokenVocab");
add("TokenLabelType");
add("superClass");
add("abstract");
}
};

View File

@ -362,6 +362,11 @@ public class Grammar implements AttributeResolver {
return parent.getOutermostGrammar();
}
public boolean isAbstract() {
return Boolean.parseBoolean(getOptionString("abstract"))
|| (tool != null && tool.abstract_recognizer);
}
/** Get the name of the generated recognizer; may or may not be same
* as grammar name.
* Recognizer is TParser and TLexer from T if combined, else
@ -377,9 +382,16 @@ public class Grammar implements AttributeResolver {
buf.append(g.name);
buf.append('_');
}
if (isAbstract()) {
buf.append("Abstract");
}
buf.append(name);
qualifiedName = buf.toString();
}
else if (isAbstract()) {
qualifiedName = "Abstract" + name;
}
if ( isCombined() || (isLexer() && implicitLexer!=null) )
{
suffix = Grammar.getGrammarTypeToFileNameSuffix(getType());

View File

@ -317,7 +317,14 @@ public class GrammarTransformPipeline {
(GrammarAST)adaptor.create(ANTLRParser.RULES, "RULES");
lexerAST.addChild(lexerRulesRoot);
List<GrammarAST> rulesWeMoved = new ArrayList<GrammarAST>();
GrammarASTWithOptions[] rules = ((List<?>)combinedRulesRoot.getChildren()).toArray(new GrammarASTWithOptions[0]);
GrammarASTWithOptions[] rules;
if (combinedRulesRoot.getChildCount() > 0) {
rules = ((List<?>)combinedRulesRoot.getChildren()).toArray(new GrammarASTWithOptions[0]);
}
else {
rules = new GrammarASTWithOptions[0];
}
if ( rules!=null ) {
for (GrammarASTWithOptions r : rules) {
String ruleName = r.getChild(0).getText();

View File

@ -36,7 +36,15 @@ import org.antlr.v4.automata.LexerATNFactory;
import org.antlr.v4.automata.ParserATNFactory;
import org.antlr.v4.codegen.CodeGenerator;
import org.antlr.v4.misc.Utils;
import org.antlr.v4.runtime.*;
import org.antlr.v4.runtime.ANTLRInputStream;
import org.antlr.v4.runtime.CharStream;
import org.antlr.v4.runtime.CommonToken;
import org.antlr.v4.runtime.CommonTokenStream;
import org.antlr.v4.runtime.Lexer;
import org.antlr.v4.runtime.Token;
import org.antlr.v4.runtime.TokenSource;
import org.antlr.v4.runtime.TokenStream;
import org.antlr.v4.runtime.WritableToken;
import org.antlr.v4.runtime.atn.ATN;
import org.antlr.v4.runtime.atn.ATNState;
import org.antlr.v4.runtime.atn.DecisionState;
@ -44,7 +52,12 @@ import org.antlr.v4.runtime.atn.LexerATNSimulator;
import org.antlr.v4.runtime.dfa.DFA;
import org.antlr.v4.runtime.misc.Nullable;
import org.antlr.v4.semantics.SemanticPipeline;
import org.antlr.v4.tool.*;
import org.antlr.v4.tool.ANTLRMessage;
import org.antlr.v4.tool.DOTGenerator;
import org.antlr.v4.tool.Grammar;
import org.antlr.v4.tool.GrammarSemanticsMessage;
import org.antlr.v4.tool.LexerGrammar;
import org.antlr.v4.tool.Rule;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
@ -56,13 +69,29 @@ import javax.tools.JavaCompiler;
import javax.tools.JavaFileObject;
import javax.tools.StandardJavaFileManager;
import javax.tools.ToolProvider;
import java.io.*;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.PipedInputStream;
import java.io.PipedOutputStream;
import java.io.PrintStream;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLClassLoader;
import java.util.*;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import java.util.logging.Level;
import java.util.logging.Logger;
@ -610,29 +639,10 @@ public abstract class BaseTest {
msg = msg.replaceAll("\r","\\\\r");
msg = msg.replaceAll("\t","\\\\t");
// ignore error number
if ( expect!=null ) expect = stripErrorNum(expect);
actual = stripErrorNum(actual);
assertEquals("error in: "+msg,expect,actual);
}
}
// can be multi-line
//error(29): A.g:2:11: unknown attribute reference a in $a
//error(29): A.g:2:11: unknown attribute reference a in $a
String stripErrorNum(String errs) {
String[] lines = errs.split("\n");
for (int i=0; i<lines.length; i++) {
String s = lines[i];
int lp = s.indexOf("error(");
int rp = s.indexOf(')', lp);
if ( lp>=0 && rp>=0 ) {
lines[i] = s.substring(0, lp) + s.substring(rp+1, s.length());
}
}
return Utils.join(lines, "\n");
}
public String getFilenameFromFirstLineOfGrammar(String line) {
String fileName = "<string>";
int grIndex = line.lastIndexOf("grammar");
@ -895,7 +905,7 @@ public abstract class BaseTest {
createParserST =
new ST(
" <parserName> parser = new <parserName>(tokens);\n" +
" parser.setErrorHandler(new DiagnosticErrorStrategy());\n");
" parser.addErrorListener(new DiagnosticErrorListener());\n");
}
outputFileST.add("createParser", createParserST);
outputFileST.add("parserName", parserName);

View File

@ -50,18 +50,18 @@ public class TestBasicSemanticErrors extends BaseTest {
"b : ( options { ick=bar; greedy=true; } : ID )+ ;\n" +
"c : ID<blue> ID<x=y> ;",
// YIELDS
"warning(47): U.g:2:10: illegal option foo\n" +
"warning(47): U.g:2:19: illegal option k\n" +
": U.g:4:8: token names must start with an uppercase letter: f\n" +
": U.g:4:8: can't assign string value to token name f in non-combined grammar\n" +
": U.g:5:8: can't assign string value to token name S in non-combined grammar\n" +
"warning(47): U.g:8:10: illegal option x\n" +
": U.g:8:0: repeated grammar prequel spec (option, token, or import); please merge\n" +
": U.g:7:0: repeated grammar prequel spec (option, token, or import); please merge\n" +
"warning(47): U.g:11:10: illegal option blech\n" +
"warning(47): U.g:11:21: illegal option greedy\n" +
"warning(47): U.g:14:16: illegal option ick\n" +
"warning(47): U.g:15:16: illegal option x\n",
"warning(48): U.g:2:10: illegal option foo\n" +
"warning(48): U.g:2:19: illegal option k\n" +
"error(26): U.g:4:8: token names must start with an uppercase letter: f\n" +
"error(25): U.g:4:8: can't assign string value to token name f in non-combined grammar\n" +
"error(25): U.g:5:8: can't assign string value to token name S in non-combined grammar\n" +
"warning(48): U.g:8:10: illegal option x\n" +
"error(20): U.g:8:0: repeated grammar prequel spec (option, token, or import); please merge\n" +
"error(20): U.g:7:0: repeated grammar prequel spec (option, token, or import); please merge\n" +
"warning(48): U.g:11:10: illegal option blech\n" +
"warning(48): U.g:11:21: illegal option greedy\n" +
"warning(48): U.g:14:16: illegal option ick\n" +
"warning(48): U.g:15:16: illegal option x\n",
};
@Test public void testU() { super.testErrors(U, false); }

View File

@ -55,7 +55,7 @@ public class TestFullContextParsing extends BaseTest {
"Decision 0:\n" +
"s0-ID->:s1=>1\n"; // not ctx sensitive
assertEquals(expecting, result);
assertEquals("line 1:0 reportAmbiguity d=0: ambigAlts={1..2}:[(1,1,[]), (1,2,[])],conflictingAlts={1..2}, input='abc'\n",
assertEquals("line 1:0 reportAmbiguity d=0: ambigAlts={1..2}, input='abc'\n",
this.stderrDuringParse);
}
@ -77,8 +77,8 @@ public class TestFullContextParsing extends BaseTest {
"s0-INT->s1\n" +
"s1-ID->s2^\n";
assertEquals(expecting, result);
assertEquals("line 1:5 reportAttemptingFullContext d=1: [(28,1,[18 10]), (20,2,[10])], input='34abc'\n" +
"line 1:2 reportContextSensitivity d=1: [(20,1,[10])],uniqueAlt=1, input='34'\n",
assertEquals("line 1:5 reportAttemptingFullContext d=1, input='34abc'\n" +
"line 1:2 reportContextSensitivity d=1, input='34'\n",
this.stderrDuringParse);
result = execParser("T.g", grammar, "TParser", "TLexer", "s",
@ -88,8 +88,8 @@ public class TestFullContextParsing extends BaseTest {
"s0-INT->s1\n" +
"s1-ID->s2^\n";
assertEquals(expecting, result);
assertEquals("line 1:5 reportAttemptingFullContext d=1: [(28,1,[22 14]), (24,2,[14])], input='34abc'\n" +
"line 1:5 reportContextSensitivity d=1: [(1,2,[])],uniqueAlt=2, input='34abc'\n",
assertEquals("line 1:5 reportAttemptingFullContext d=1, input='34abc'\n" +
"line 1:5 reportContextSensitivity d=1, input='34abc'\n",
this.stderrDuringParse);
}
@ -107,19 +107,14 @@ public class TestFullContextParsing extends BaseTest {
String result = execParser("T.g", grammar, "TParser", "TLexer", "s",
"$ 34 abc @ 34 abc", true);
String expecting =
"Decision 1:\n" +
"s0-EOF->:s3=>2\n" +
"s0-'@'->:s2=>1\n" +
"s0-'$'->:s1=>1\n" +
"\n" +
"Decision 2:\n" +
"s0-INT->s1\n" +
"s1-ID->s2^\n";
assertEquals(expecting, result);
assertEquals("line 1:5 reportAttemptingFullContext d=2: [(30,1,[20 10]), (22,2,[10])], input='34abc'\n" +
"line 1:2 reportContextSensitivity d=2: [(22,1,[10])],uniqueAlt=1, input='34'\n" +
"line 1:14 reportAttemptingFullContext d=2: [(30,1,[24 14]), (26,2,[14])], input='34abc'\n" +
"line 1:14 reportContextSensitivity d=2: [(8,2,[18]), (12,2,[18]), (1,2,[])],uniqueAlt=2, input='34abc'\n",
assertEquals("line 1:5 reportAttemptingFullContext d=2, input='34abc'\n" +
"line 1:2 reportContextSensitivity d=2, input='34'\n" +
"line 1:14 reportAttemptingFullContext d=2, input='34abc'\n" +
"line 1:14 reportContextSensitivity d=2, input='34abc'\n",
this.stderrDuringParse);
}
@ -139,10 +134,6 @@ public class TestFullContextParsing extends BaseTest {
String result = execParser("T.g", grammar, "TParser", "TLexer", "s",
input, true);
String expecting =
"Decision 0:\n" +
"s0-'if'->:s1=>1\n" +
"s0-'}'->:s2=>2\n" +
"\n" +
"Decision 1:\n" +
"s0-'}'->:s1=>2\n";
assertEquals(expecting, result);
@ -153,27 +144,20 @@ public class TestFullContextParsing extends BaseTest {
result = execParser("T.g", grammar, "TParser", "TLexer", "s",
input, true);
expecting =
"Decision 0:\n" +
"s0-'if'->:s1=>1\n" +
"s0-'}'->:s2=>2\n" +
"\n" +
"Decision 1:\n" +
"s0-'else'->:s1=>1\n" +
"s0-'else'->s1^\n" +
"s0-'}'->:s2=>2\n";
assertEquals(expecting, result);
assertEquals("line 1:29 reportAmbiguity d=1: ambigAlts={1..2}:[(25,1,[]), (25,2,[],up=1)],conflictingAlts={1..2},dipsIntoOuterContext, input='else'\n",
assertEquals("line 1:29 reportAttemptingFullContext d=1, input='else'\n" +
"line 1:38 reportAmbiguity d=1: ambigAlts={1..2}, input='elsefoo}'\n",
this.stderrDuringParse);
input = "{ if x then return else foo }";
result = execParser("T.g", grammar, "TParser", "TLexer", "s",
input, true);
expecting =
"Decision 0:\n" +
"s0-'if'->:s1=>1\n" +
"s0-'}'->:s2=>2\n" +
"\n" +
"Decision 1:\n" +
"s0-'else'->:s1=>1\n";
"s0-'else'->s1^\n";
assertEquals(expecting, result);
// Technically, this input sequence is not ambiguous because else
// uniquely predicts going into the optional subrule. else cannot
@ -181,21 +165,19 @@ public class TestFullContextParsing extends BaseTest {
// the start of a stat. But, we are using the theory that
// SLL(1)=LL(1) and so we are avoiding full context parsing
// by declaring all else clause parsing to be ambiguous.
assertEquals("line 1:19 reportAmbiguity d=1: ambigAlts={1..2}:[(25,1,[]), (25,2,[],up=1)],conflictingAlts={1..2},dipsIntoOuterContext, input='else'\n",
assertEquals("line 1:19 reportAttemptingFullContext d=1, input='else'\n" +
"line 1:19 reportContextSensitivity d=1, input='else'\n",
this.stderrDuringParse);
input = "{ if x then return else foo }";
result = execParser("T.g", grammar, "TParser", "TLexer", "s",
input, true);
expecting =
"Decision 0:\n" +
"s0-'if'->:s1=>1\n" +
"s0-'}'->:s2=>2\n" +
"\n" +
"Decision 1:\n" +
"s0-'else'->:s1=>1\n";
"s0-'else'->s1^\n";
assertEquals(expecting, result);
assertEquals("line 1:19 reportAmbiguity d=1: ambigAlts={1..2}:[(25,1,[]), (25,2,[],up=1)],conflictingAlts={1..2},dipsIntoOuterContext, input='else'\n",
assertEquals("line 1:19 reportAttemptingFullContext d=1, input='else'\n" +
"line 1:19 reportContextSensitivity d=1, input='else'\n",
this.stderrDuringParse);
input =
@ -204,15 +186,14 @@ public class TestFullContextParsing extends BaseTest {
result = execParser("T.g", grammar, "TParser", "TLexer", "s",
input, true);
expecting =
"Decision 0:\n" +
"s0-'if'->:s1=>1\n" +
"s0-'}'->:s2=>2\n" +
"\n" +
"Decision 1:\n" +
"s0-'else'->:s1=>1\n" +
"s0-'else'->s1^\n" +
"s0-'}'->:s2=>2\n";
assertEquals(expecting, result);
assertEquals("line 1:19 reportAmbiguity d=1: ambigAlts={1..2}:[(25,1,[]), (25,2,[],up=1)],conflictingAlts={1..2},dipsIntoOuterContext, input='else'\n",
assertEquals("line 1:19 reportAttemptingFullContext d=1, input='else'\n" +
"line 1:19 reportContextSensitivity d=1, input='else'\n" +
"line 2:27 reportAttemptingFullContext d=1, input='else'\n" +
"line 2:36 reportAmbiguity d=1: ambigAlts={1..2}, input='elsefoo}'\n",
this.stderrDuringParse);
input =
@ -221,15 +202,14 @@ public class TestFullContextParsing extends BaseTest {
result = execParser("T.g", grammar, "TParser", "TLexer", "s",
input, true);
expecting =
"Decision 0:\n" +
"s0-'if'->:s1=>1\n" +
"s0-'}'->:s2=>2\n" +
"\n" +
"Decision 1:\n" +
"s0-'else'->:s1=>1\n" +
"s0-'else'->s1^\n" +
"s0-'}'->:s2=>2\n";
assertEquals(expecting, result);
assertEquals("line 1:19 reportAmbiguity d=1: ambigAlts={1..2}:[(25,1,[]), (25,2,[],up=1)],conflictingAlts={1..2},dipsIntoOuterContext, input='else'\n",
assertEquals("line 1:19 reportAttemptingFullContext d=1, input='else'\n" +
"line 1:19 reportContextSensitivity d=1, input='else'\n" +
"line 2:27 reportAttemptingFullContext d=1, input='else'\n" +
"line 2:36 reportAmbiguity d=1: ambigAlts={1..2}, input='elsefoo}'\n",
this.stderrDuringParse);
}
@ -259,10 +239,10 @@ public class TestFullContextParsing extends BaseTest {
assertEquals("pass.\n", found);
String expecting =
"line 1:4 reportAttemptingFullContext d=1: [(35,1,[27 15 8]), (41,1,[27 15 8]), (49,1,[27 15 8]), (35,2,[27 21 8]), (41,2,[27 21 8]), (49,2,[27 21 8])], input='a(i)<-'\n" +
"line 1:7 reportContextSensitivity d=1: [(53,2,[])],uniqueAlt=2, input='a(i)<-x'\n" +
"line 1:3 reportAttemptingFullContext d=3: [(35,1,[27 21 8]), (41,2,[27 21 8]), (49,3,[27 21 8])], input='a(i)'\n" +
"line 1:7 reportAmbiguity d=3: ambigAlts={2..3}:[(53,2,[]), (53,3,[])],conflictingAlts={2..3}, input='a(i)<-x'\n";
"line 1:4 reportAttemptingFullContext d=1, input='a(i)<-'\n" +
"line 1:7 reportContextSensitivity d=1, input='a(i)<-x'\n" +
"line 1:3 reportAttemptingFullContext d=3, input='a(i)'\n" +
"line 1:7 reportAmbiguity d=3: ambigAlts={2..3}, input='a(i)<-x'\n";
assertEquals(expecting, this.stderrDuringParse);
}

View File

@ -276,6 +276,67 @@ public class TestLeftRecursion extends BaseTest {
runTests(grammar, tests, "s");
}
@Test
public void testAmbigLR() throws Exception {
String grammar =
"// START: g\n" +
"grammar Expr;\n" +
"// END: g\n" +
"\n" +
"// START:stat\n" +
"prog: stat ;\n" +
"\n" +
"stat: expr NEWLINE -> printExpr\n" +
" | ID '=' expr NEWLINE -> assign\n" +
" | NEWLINE -> blank\n" +
" ;\n" +
"// END:stat\n" +
"\n" +
"// START:expr\n" +
"expr: expr ('*'|'/') expr -> MulDiv\n" +
" | expr ('+'|'-') expr -> AddSub\n" +
" | INT -> int\n" +
" | ID -> id\n" +
" | '(' expr ')' -> parens\n" +
" ;\n" +
"// END:expr\n" +
"\n" +
"// show marginal cost of adding a clear/wipe command for memory\n" +
"\n" +
"// START:tokens\n" +
"MUL : '*' ; // assigns token name to '*' used above in grammar\n" +
"DIV : '/' ;\n" +
"ADD : '+' ;\n" +
"SUB : '-' ;\n" +
"ID : [a-zA-Z]+ ; // match identifiers\n" +
"INT : [0-9]+ ; // match integers\n" +
"NEWLINE:'\\r'? '\\n' ; // return newlines to parser (is end-statement signal)\n" +
"WS : [ \\t]+ -> skip ; // toss out whitespace\n" +
"// END:tokens\n";
String result = execParser("Expr.g4", grammar, "ExprParser", "ExprLexer", "prog", "1\n", true);
assertNull(stderrDuringParse);
result = execParser("Expr.g4", grammar, "ExprParser", "ExprLexer", "prog", "a = 5\n", true);
assertNull(stderrDuringParse);
result = execParser("Expr.g4", grammar, "ExprParser", "ExprLexer", "prog", "b = 6\n", true);
assertNull(stderrDuringParse);
result = execParser("Expr.g4", grammar, "ExprParser", "ExprLexer", "prog", "a+b*2\n", true);
assertEquals("line 1:1 reportAttemptingFullContext d=3, input='+'\n" +
"line 1:1 reportContextSensitivity d=3, input='+'\n" +
"line 1:3 reportAttemptingFullContext d=3, input='*'\n" +
"line 1:3 reportAmbiguity d=3: ambigAlts={1..2}, input='*'\n",
stderrDuringParse);
result = execParser("Expr.g4", grammar, "ExprParser", "ExprLexer", "prog", "(1+2)*3\n", true);
assertEquals("line 1:2 reportAttemptingFullContext d=3, input='+'\n" +
"line 1:2 reportContextSensitivity d=3, input='+'\n" +
"line 1:5 reportAttemptingFullContext d=3, input='*'\n" +
"line 1:5 reportContextSensitivity d=3, input='*'\n",
stderrDuringParse);
}
public void runTests(String grammar, String[] tests, String startRule) {
rawGenerateAndBuildRecognizer("T.g", grammar, "TParser", "TLexer");
writeRecognizerAndCompile("TParser",

View File

@ -142,4 +142,29 @@ public class TestLexerErrors extends BaseTest {
// TEST RECOVERY
/**
* This is a regression test for #45 "NullPointerException in LexerATNSimulator.execDFA".
* https://github.com/antlr/antlr4/issues/46
*/
@Test
public void testLexerExecDFA() throws Exception {
String grammar =
"grammar T;\n" +
"start : ID ':' expr;\n" +
"expr : primary expr? {} | expr '->' ID;\n" +
"primary : ID;\n" +
"ID : [a-z]+;\n" +
"\n";
String result = execLexer("T.g", grammar, "TLexer", "x : x", false);
String expecting =
"[@0,0:0='x',<5>,1:0]\n" +
"[@1,2:2=':',<4>,1:2]\n" +
"[@2,4:4='x',<5>,1:4]\n" +
"[@3,5:4='<EOF>',<-1>,1:5]\n";
assertEquals(expecting, result);
assertEquals("line 1:1 token recognition error at: ' '\n" +
"line 1:3 token recognition error at: ' '\n",
this.stderrDuringParse);
}
}

View File

@ -192,7 +192,7 @@ public class TestListeners extends BaseTest {
"1\n" +
"2\n" +
"3\n" +
"1 [14 6]\n";
"1 [16 6]\n";
assertEquals(expecting, result);
}
}

View File

@ -180,23 +180,16 @@ public class TestNonGreedyLoops extends BaseTest {
"x", true);
assertEquals("alt 1\n" +
"Decision 0:\n" +
"s0-ID->:s1=>1\n" +
"\n" +
"Decision 1:\n" +
"s0-ID->:s1=>2\n", found);
assertEquals("line 1:0 extraneous input 'x' expecting <EOF>\n", this.stderrDuringParse);
"s0-ID->:s1=>1\n", found);
assertNull(this.stderrDuringParse);
found = execParser("T.g", grammar, "TParser", "TLexer", "s",
"34", true);
assertEquals("alt 1\n" +
"Decision 0:\n" +
"s0-INT->s1\n" +
"s1-EOF->:s2=>1\n" +
"\n" +
"Decision 1:\n" +
"s0-INT->:s1=>2\n", found); // resolves INT EOF to alt 1 from s since ambig 'tween a and b
assertEquals("line 1:2 reportAmbiguity d=0: ambigAlts={1..2}:[(1,1,[]), (1,2,[])],conflictingAlts={1..2}, input='34'\n" +
"line 1:0 extraneous input '34' expecting <EOF>\n",
"s1-EOF->:s2=>1\n", found); // resolves INT EOF to alt 1 from s since ambig 'tween a and b
assertEquals("line 1:2 reportAmbiguity d=0: ambigAlts={1..2}, input='34'\n",
this.stderrDuringParse);
}
@ -360,10 +353,7 @@ public class TestNonGreedyLoops extends BaseTest {
"s2-INT->:s3=>1\n" +
"s2-ID->s4\n" +
"s4-';'->s5\n" +
"s5-EOF->:s6=>2\n" +
"\n" +
"Decision 1:\n" +
"s0-ID->:s1=>3\n", found);
"s5-EOF->:s6=>2\n", found);
input =
"if ( 1 ) { x=3; { return 4; } } return 99; abc=def;";
found = execParser("T.g", grammar, "TParser", "TLexer", "s",
@ -376,19 +366,7 @@ public class TestNonGreedyLoops extends BaseTest {
"s3-'='->s4\n" +
"s4-ID->s5\n" +
"s5-';'->s6\n" +
"s6-EOF->:s7=>2\n" +
"\n" +
"Decision 1:\n" +
"s0-'{'->:s2=>4\n" +
"s0-'if'->:s1=>1\n" +
"s0-'return'->:s4=>2\n" +
"s0-ID->:s3=>3\n" +
"\n" +
"Decision 2:\n" +
"s0-'{'->:s2=>1\n" +
"s0-'return'->:s3=>1\n" +
"s0-'}'->:s4=>2\n" +
"s0-ID->:s1=>1\n", found);
"s6-EOF->:s7=>2\n", found);
input =
"x=1; a=3;"; // FAILS to match since it can't match last element
execParser("T.g", grammar, "TParser", "TLexer", "s",
@ -436,10 +414,7 @@ public class TestNonGreedyLoops extends BaseTest {
"s1-'='->s2\n" +
"s2-INT->:s3=>1\n" +
"s2-ID->s4\n" +
"s4-';'->:s5=>2\n" +
"\n" +
"Decision 1:\n" +
"s0-ID->:s1=>3\n", found); // ignores x=1 that follows first a=b assignment
"s4-';'->:s5=>2\n", found); // ignores x=1 that follows first a=b assignment
input =
"if ( 1 ) { x=3; { return 4; } } return 99; abc=def;";
found = execParser("T.g", grammar, "TParser", "TLexer", "s",
@ -451,19 +426,7 @@ public class TestNonGreedyLoops extends BaseTest {
"s0-ID->s3\n" +
"s3-'='->s4\n" +
"s4-ID->s5\n" +
"s5-';'->:s6=>2\n" +
"\n" +
"Decision 1:\n" +
"s0-'{'->:s2=>4\n" +
"s0-'if'->:s1=>1\n" +
"s0-'return'->:s4=>2\n" +
"s0-ID->:s3=>3\n" +
"\n" +
"Decision 2:\n" +
"s0-'{'->:s2=>1\n" +
"s0-'return'->:s3=>1\n" +
"s0-'}'->:s4=>2\n" +
"s0-ID->:s1=>1\n", found);
"s5-';'->:s6=>2\n", found);
input =
"x=1; a=3;"; // FAILS to match since it can't match either stat
execParser("T.g", grammar, "TParser", "TLexer", "s",
@ -481,10 +444,7 @@ public class TestNonGreedyLoops extends BaseTest {
"s1-'='->s2\n" +
"s2-INT->:s3=>1\n" +
"s2-ID->s4\n" +
"s4-';'->:s5=>2\n" +
"\n" +
"Decision 1:\n" +
"s0-ID->:s1=>3\n", found); // should not finish all input
"s4-';'->:s5=>2\n", found); // should not finish all input
}
@Test public void testHTMLTags() throws Exception {
@ -504,11 +464,6 @@ public class TestNonGreedyLoops extends BaseTest {
found = execParser("T.g", grammar, "TParser", "TLexer", "s",
"<a>foo</a>", true);
assertEquals("<a>foo</a>\n" +
"Decision 0:\n" +
"s0-EOF->:s3=>2\n" +
"s0-'<'->:s1=>1\n" +
"s0-ID->:s2=>1\n" +
"\n" +
"Decision 1:\n" +
"s0-'<'->s1\n" +
"s0-ID->:s5=>2\n" +
@ -527,20 +482,16 @@ public class TestNonGreedyLoops extends BaseTest {
"Decision 3:\n" +
"s0-'>'->:s2=>2\n" +
"s0-ID->:s1=>1\n", found);
assertEquals("line 1:6 reportAttemptingFullContext d=1: [(20,1,[14 6]), (16,2,[6])], input='<a>foo<'\n" +
"line 1:6 reportAmbiguity d=1: ambigAlts={1..2}:[(26,1,[32 32 32 32 14 6]), (33,1,[14 6]), (22,1,[14 6 10 10]), (26,1,[14 6 10 10]), (33,1,[14 6 10 10]), (20,1,[14 6 10 10 10]), (16,1,[6 10 10 10]), (1,1,[]), (22,2,[14 6 10 10 10 10]), (26,2,[14 6 10 10 10 10]), (33,2,[14 6 10 10 10 10]), (20,2,[14 6 10 10 10 10 10]), (16,2,[6 10 10 10 10 10]), (1,2,[])],conflictingAlts={1..2}, input='<a>foo<'\n" +
"line 1:10 reportAttemptingFullContext d=1: [(20,1,[14 6]), (16,2,[6])], input='</a>'\n" +
"line 1:10 reportAmbiguity d=1: ambigAlts={1..2}:[(35,1,[]), (35,2,[])],conflictingAlts={1..2}, input='</a>'\n" +
"line 1:7 reportAmbiguity d=2: ambigAlts={1..2}:[(26,1,[]), (33,1,[]), (26,2,[]), (33,2,[])],conflictingAlts={1..2}, input='/'\n",
assertEquals("line 1:6 reportAttemptingFullContext d=1, input='<a>foo<'\n" +
"line 1:6 reportAmbiguity d=1: ambigAlts={1..2}, input='<a>foo<'\n" +
"line 1:10 reportAttemptingFullContext d=1, input='</a>'\n" +
"line 1:10 reportAmbiguity d=1: ambigAlts={1..2}, input='</a>'\n" +
"line 1:7 reportAmbiguity d=2: ambigAlts={1..2}, input='/'\n",
this.stderrDuringParse);
found = execParser("T.g", grammar, "TParser", "TLexer", "s",
"<a></a>", true);
assertEquals("<a></a>\n" +
"Decision 0:\n" +
"s0-EOF->:s2=>2\n" +
"s0-'<'->:s1=>1\n" +
"\n" +
"Decision 1:\n" +
"s0-'<'->s1\n" +
"s1-'/'->s2\n" +
@ -560,10 +511,6 @@ public class TestNonGreedyLoops extends BaseTest {
found = execParser("T.g", grammar, "TParser", "TLexer", "s",
"</b><a src=\"abc\", width=32>", true);
assertEquals("</b><asrc=\"abc\",width=32>\n" +
"Decision 0:\n" +
"s0-EOF->:s2=>2\n" +
"s0-'<'->:s1=>1\n" +
"\n" +
"Decision 1:\n" +
"s0-'<'->s1\n" +
"s1-'/'->s2\n" +
@ -626,15 +573,6 @@ public class TestNonGreedyLoops extends BaseTest {
"s3-INT->s3\n" +
"s4-'='->s3\n" +
"\n" +
"Decision 1:\n" + // (tag|header)
"s0-'<'->:s1=>1\n" +
"\n" +
"Decision 2:\n" + // (...)*
"s0-EOF->:s3=>2\n" +
"s0-'<'->:s2=>1\n" +
"s0-','->:s1=>1\n" +
"s0-INT->:s1=>1\n" +
"\n" +
"Decision 3:\n" + // .+
"s0-'x'->:s1=>1\n" +
"s0-'>'->:s2=>2\n" +
@ -655,13 +593,6 @@ public class TestNonGreedyLoops extends BaseTest {
"s4-ID->s5\n" +
"s5-'>'->:s6=>2\n" +
"\n" +
"Decision 1:\n" +
"s0-'<'->:s1=>1\n" +
"\n" +
"Decision 2:\n" +
"s0-EOF->:s2=>2\n" +
"s0-'x'->:s1=>1\n" +
"\n" +
"Decision 3:\n" +
"s0-'>'->:s2=>2\n" +
"s0-ID->:s1=>1\n", found);
@ -670,7 +601,7 @@ public class TestNonGreedyLoops extends BaseTest {
// Seeing '.' in a lookahead prediction can be misleading!!
found = execParser("T.g", grammar, "TParser", "TLexer", "s",
"x <><a>", true);
assertEquals("null\n" +
assertEquals("<\n" +
"<a>\n" +
"Decision 0:\n" +
"s0-'x'->s1\n" +
@ -682,14 +613,6 @@ public class TestNonGreedyLoops extends BaseTest {
"s4-'>'->:s7=>2\n" +
"s4-'<'->:s5=>2\n" +
"\n" +
"Decision 1:\n" +
"s0-'<'->:s1=>1\n" +
"\n" +
"Decision 2:\n" +
"s0-EOF->:s3=>2\n" +
"s0-'x'->:s1=>1\n" +
"s0-'>'->:s2=>1\n" +
"\n" +
"Decision 3:\n" +
"s0-'>'->:s1=>2\n" +
"s0-ID->:s2=>1\n", // doesn't match tag; null

View File

@ -28,6 +28,7 @@
package org.antlr.v4.test;
import org.antlr.v4.automata.ATNSerializer;
import org.junit.Test;
/** test runtime parse errors */
@ -253,4 +254,27 @@ public class TestParseErrors extends BaseTest {
assertEquals(expecting, result);
}
/**
* This is a regression test for #45 "NullPointerException in ATNConfig.hashCode".
* https://github.com/antlr/antlr4/issues/45
*
* The original cause of this issue was an error in the tool's ATN state optimization,
* which is now detected early in {@link ATNSerializer} by ensuring that all
* serialized transitions point to states which were not removed.
*/
@Test
public void testInvalidATNStateRemoval() throws Exception {
String grammar =
"grammar T;\n" +
"start : ID ':' expr;\n" +
"expr : primary expr? {} | expr '->' ID;\n" +
"primary : ID;\n" +
"ID : [a-z]+;\n" +
"\n";
String result = execParser("T.g", grammar, "TParser", "TLexer", "start", "x:x", true);
String expecting = "";
assertEquals(expecting, result);
assertNull(this.stderrDuringParse);
}
}

View File

@ -28,22 +28,48 @@
package org.antlr.v4.test;
import org.antlr.v4.runtime.*;
import org.antlr.v4.runtime.atn.*;
import org.antlr.v4.runtime.dfa.*;
import org.antlr.v4.runtime.ANTLRFileStream;
import org.antlr.v4.runtime.ANTLRInputStream;
import org.antlr.v4.runtime.BailErrorStrategy;
import org.antlr.v4.runtime.BaseErrorListener;
import org.antlr.v4.runtime.CharStream;
import org.antlr.v4.runtime.CommonTokenStream;
import org.antlr.v4.runtime.DefaultErrorStrategy;
import org.antlr.v4.runtime.Lexer;
import org.antlr.v4.runtime.Parser;
import org.antlr.v4.runtime.RecognitionException;
import org.antlr.v4.runtime.Recognizer;
import org.antlr.v4.runtime.Token;
import org.antlr.v4.runtime.TokenStream;
import org.antlr.v4.runtime.atn.ATNConfig;
import org.antlr.v4.runtime.atn.LexerATNSimulator;
import org.antlr.v4.runtime.atn.ParserATNSimulator;
import org.antlr.v4.runtime.dfa.DFA;
import org.antlr.v4.runtime.dfa.DFAState;
import org.antlr.v4.runtime.misc.Nullable;
import org.antlr.v4.runtime.tree.ParseTree;
import org.antlr.v4.runtime.tree.ParseTreeListener;
import org.antlr.v4.runtime.tree.ParseTreeWalker;
import org.junit.Assert;
import org.junit.Ignore;
import org.junit.Test;
import java.io.*;
import java.lang.reflect.*;
import java.net.*;
import java.util.*;
import java.util.logging.*;
import java.io.File;
import java.io.FilenameFilter;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.lang.reflect.Constructor;
import java.lang.reflect.Method;
import java.net.URL;
import java.net.URLClassLoader;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.logging.Level;
import java.util.logging.Logger;
public class TestPerformance extends BaseTest {
/** Parse all java files under this package within the JDK_SOURCE_ROOT. */
@ -513,7 +539,7 @@ public class TestPerformance extends BaseTest {
void parseFile(CharStream input);
}
private static class DescriptiveErrorListener implements ANTLRErrorListener<Token> {
private static class DescriptiveErrorListener extends BaseErrorListener<Token> {
public static DescriptiveErrorListener INSTANCE = new DescriptiveErrorListener();
@Override

View File

@ -153,7 +153,10 @@ public class TestSemPredEvalParser extends BaseTest {
"alt 1\n" +
"alt 1\n";
assertEquals(expecting, found);
assertEquals("line 1:0 reportAmbiguity d=0: ambigAlts={1..2}:[(6,1,[],up=1), (1,1,[],up=1), (6,2,[],up=1), (1,2,[],up=1), (6,3,[],{1:0}?,up=1), (1,3,[],{1:0}?,up=1)],hasSemanticContext=true,conflictingAlts={1..3},dipsIntoOuterContext, input='x'\n",
assertEquals("line 1:0 reportAttemptingFullContext d=0, input='x'\n" +
"line 1:0 reportAmbiguity d=0: ambigAlts={1..2}, input='x'\n" +
"line 1:3 reportAttemptingFullContext d=0, input='y'\n" +
"line 1:3 reportAmbiguity d=0: ambigAlts={1..2}, input='y'\n",
this.stderrDuringParse);
}
@ -184,7 +187,10 @@ public class TestSemPredEvalParser extends BaseTest {
"alt 2\n" +
"alt 2\n";
assertEquals(expecting, found);
assertEquals("line 1:4 reportAmbiguity d=0: ambigAlts={2..3}:[(6,2,[],up=1), (10,2,[],up=1), (1,2,[],up=1), (6,3,[],up=1), (10,3,[],up=1), (1,3,[],up=1), (6,4,[],{1:0}?,up=1), (10,4,[],{1:0}?,up=1), (1,4,[],{1:0}?,up=1)],hasSemanticContext=true,conflictingAlts={2..4},dipsIntoOuterContext, input='x'\n",
assertEquals("line 1:4 reportAttemptingFullContext d=0, input='x'\n" +
"line 1:4 reportAmbiguity d=0: ambigAlts={2..3}, input='x'\n" +
"line 1:7 reportAttemptingFullContext d=0, input='y'\n" +
"line 1:7 reportAmbiguity d=0: ambigAlts={2..3}, input='y'\n",
this.stderrDuringParse);
}

View File

@ -20,8 +20,9 @@ public class TestSymbolIssues extends BaseTest {
"\n" +
"ID : 'a'..'z'+ ID ;",
// YIELDS
"warning(51): A.g:2:10: illegal option opt\n" +
"error(60): A.g:7:1: redefinition of header action\n" +
"warning(48): A.g:2:10: illegal option opt\n" +
"warning(48): A.g:2:21: illegal option k\n" +
"error(59): A.g:7:1: redefinition of header action\n" +
"warning(51): A.g:2:10: illegal option opt\n" +
"error(19): A.g:11:0: rule a redefinition\n" +
"error(60): A.g:5:1: redefinition of members action\n" +
@ -41,11 +42,11 @@ public class TestSymbolIssues extends BaseTest {
"\n" +
"s : FOO ;",
// YIELDS
"error(26): B.g:2:9: can't assign string value to token name X in non-combined grammar\n" +
"error(36): B.g:4:4: label s conflicts with rule with same name\n" +
"error(36): B.g:4:9: label b conflicts with rule with same name\n" +
"error(37): B.g:4:15: label X conflicts with token with same name\n" +
"error(42): B.g:6:9: label x type mismatch with previous definition: TOKEN_LIST_LABEL!=TOKEN_LABEL\n"
"error(25): B.g:2:9: can't assign string value to token name X in non-combined grammar\n" +
"error(35): B.g:4:4: label s conflicts with rule with same name\n" +
"error(35): B.g:4:9: label b conflicts with rule with same name\n" +
"error(36): B.g:4:15: label X conflicts with token with same name\n" +
"error(40): B.g:6:9: label x type mismatch with previous definition: TOKEN_LIST_LABEL!=TOKEN_LABEL\n"
};
static String[] D = {
@ -60,8 +61,8 @@ public class TestSymbolIssues extends BaseTest {
" : ID ;",
// YIELDS
"error(39): D.g:3:21: label j conflicts with rule a's return value or parameter with same name\n" +
"error(43): D.g:5:0: rule b's argument i conflicts a return value with same name\n"
"error(37): D.g:3:21: label j conflicts with rule a's return value or parameter with same name\n" +
"error(41): D.g:5:0: rule b's argument i conflicts a return value with same name\n"
};
static String[] E = {
@ -77,10 +78,10 @@ public class TestSymbolIssues extends BaseTest {
"a : A ;\n",
// YIELDS
"error(74): E.g:4:8: cannot redefine B; token name already defined\n" +
"error(74): E.g:5:4: cannot redefine C; token name already defined\n" +
"error(74): E.g:6:8: cannot redefine D; token name already defined\n" +
"error(73): E.g:7:8: cannot alias X='e'; string already assigned to E\n"
"error(73): E.g:4:8: cannot redefine B; token name already defined\n" +
"error(73): E.g:5:4: cannot redefine C; token name already defined\n" +
"error(73): E.g:6:8: cannot redefine D; token name already defined\n" +
"error(72): E.g:7:8: cannot alias X='e'; string already assigned to E\n"
};
@Test public void testA() { super.testErrors(A, false); }

View File

@ -8,37 +8,37 @@ public class TestToolSyntaxErrors extends BaseTest {
"grammar A;\n" +
"",
// YIELDS
"error(63): A.g::: grammar A has no rules\n",
"error(64): A.g::: grammar A has no rules\n",
"A;",
"error(17): <string>:1:0: 'A' came as a complete surprise to me\n",
"error(16): <string>:1:0: 'A' came as a complete surprise to me\n",
"grammar ;",
"error(17): <string>:1:8: ';' came as a complete surprise to me while looking for an identifier\n",
"error(16): <string>:1:8: ';' came as a complete surprise to me while looking for an identifier\n",
"grammar A\n" +
"a : ID ;\n",
"error(17): <string>:2:0: missing SEMI at 'a'\n",
"error(16): <string>:2:0: missing SEMI at 'a'\n",
"grammar A;\n" +
"a : ID ;;\n"+
"b : B ;",
"error(17): A.g:2:8: ';' came as a complete surprise to me\n",
"error(16): A.g:2:8: ';' came as a complete surprise to me\n",
"grammar A;;\n" +
"a : ID ;\n",
"error(17): A;.g:1:10: ';' came as a complete surprise to me\n",
"error(16): A;.g:1:10: ';' came as a complete surprise to me\n",
"grammar A;\n" +
"a @init : ID ;\n",
"error(17): A.g:2:8: mismatched input ':' expecting ACTION while matching rule preamble\n",
"error(16): A.g:2:8: mismatched input ':' expecting ACTION while matching rule preamble\n",
"grammar A;\n" +
"a ( A | B ) D ;\n" +
"b : B ;",
": A.g:2:3: '(' came as a complete surprise to me while matching rule preamble\n" +
": A.g:2:11: mismatched input ')' expecting SEMI while matching a rule\n" +
": A.g:2:15: mismatched input ';' expecting COLON while matching a lexer rule\n",
"error(16): A.g:2:3: '(' came as a complete surprise to me while matching rule preamble\n" +
"error(16): A.g:2:11: mismatched input ')' expecting SEMI while matching a rule\n" +
"error(16): A.g:2:15: mismatched input ';' expecting COLON while matching a lexer rule\n",
};
@Test public void testA() { super.testErrors(A, true); }
@ -48,7 +48,7 @@ public class TestToolSyntaxErrors extends BaseTest {
"grammar A;\n" +
"a : : A ;\n" +
"b : B ;",
"error(17): A.g:2:4: ':' came as a complete surprise to me while matching alternative\n",
"error(16): A.g:2:4: ':' came as a complete surprise to me while matching alternative\n",
};
super.testErrors(pair, true);
}
@ -58,7 +58,7 @@ public class TestToolSyntaxErrors extends BaseTest {
"grammar A;\n" +
"a : A \n" +
"b : B ;",
"error(17): A.g:3:0: unterminated rule (missing ';') detected at 'b :' while looking for rule element\n",
"error(16): A.g:3:0: unterminated rule (missing ';') detected at 'b :' while looking for rule element\n",
};
super.testErrors(pair, true);
}
@ -68,7 +68,7 @@ public class TestToolSyntaxErrors extends BaseTest {
"lexer grammar A;\n" +
"A : 'a' \n" +
"B : 'b' ;",
"error(17): A.g:3:0: unterminated rule (missing ';') detected at 'B :' while looking for lexer rule element\n",
"error(16): A.g:3:0: unterminated rule (missing ';') detected at 'B :' while looking for lexer rule element\n",
};
super.testErrors(pair, true);
}
@ -78,7 +78,7 @@ public class TestToolSyntaxErrors extends BaseTest {
"grammar A;\n" +
"a : A \n" +
"b[int i] returns [int y] : B ;",
"error(17): A.g:3:9: unterminated rule (missing ';') detected at 'returns int y' while looking for rule element\n"
"error(16): A.g:3:9: unterminated rule (missing ';') detected at 'returns int y' while looking for rule element\n"
};
super.testErrors(pair, true);
}
@ -90,7 +90,7 @@ public class TestToolSyntaxErrors extends BaseTest {
" catch [Exception e] {...}\n" +
"b : B ;\n",
"error(17): A.g:2:4: unterminated rule (missing ';') detected at 'b catch' while looking for rule element\n"
"error(16): A.g:2:4: unterminated rule (missing ';') detected at 'b catch' while looking for rule element\n"
};
super.testErrors(pair, true);
}
@ -101,7 +101,7 @@ public class TestToolSyntaxErrors extends BaseTest {
"a : A \n" +
" catch [Exception e] {...}\n",
"error(17): A.g:2:4: unterminated rule (missing ';') detected at 'A catch' while looking for rule element\n"
"error(16): A.g:2:4: unterminated rule (missing ';') detected at 'A catch' while looking for rule element\n"
};
super.testErrors(pair, true);
}
@ -112,7 +112,7 @@ public class TestToolSyntaxErrors extends BaseTest {
"a @ options {k=1;} : A ;\n" +
"b : B ;",
"error(17): A.g:2:4: 'options {' came as a complete surprise to me while looking for an identifier\n"
"error(16): A.g:2:4: 'options {' came as a complete surprise to me while looking for an identifier\n"
};
super.testErrors(pair, true);
}
@ -123,7 +123,7 @@ public class TestToolSyntaxErrors extends BaseTest {
"a } : A ;\n" +
"b : B ;",
"error(17): A.g:2:2: '}' came as a complete surprise to me while matching rule preamble\n"
"error(16): A.g:2:2: '}' came as a complete surprise to me while matching rule preamble\n"
};
super.testErrors(pair, true);
}
@ -135,8 +135,8 @@ public class TestToolSyntaxErrors extends BaseTest {
"mode foo;\n" +
"b : B ;",
": A.g:4:0: 'b' came as a complete surprise to me\n" +
": A.g:4:6: mismatched input ';' expecting COLON while matching a lexer rule\n"
"error(16): A.g:4:0: 'b' came as a complete surprise to me\n" +
"error(16): A.g:4:6: mismatched input ';' expecting COLON while matching a lexer rule\n"
};
super.testErrors(pair, true);
}