got token insertion working

[git-p4: depot-paths = "//depot/code/antlr4/main/": change = 9096]
This commit is contained in:
parrt 2011-10-02 16:15:02 -08:00
parent c33b06fdde
commit 1556d076bd
4 changed files with 71 additions and 545 deletions

View File

@ -36,7 +36,7 @@ public class DefaultANTLRErrorStrategy implements ANTLRErrorStrategy {
throws RecognitionException throws RecognitionException
{ {
if ( e instanceof NoViableAltException ) { if ( e instanceof NoViableAltException ) {
reportNoViableAlternative(recognizer, (NoViableAltException)e); reportNoViableAlternative(recognizer, (NoViableAltException) e);
} }
else if ( e instanceof InputMismatchException ) { else if ( e instanceof InputMismatchException ) {
reportInputMismatch(recognizer, (InputMismatchException)e); reportInputMismatch(recognizer, (InputMismatchException)e);
@ -172,25 +172,35 @@ public class DefaultANTLRErrorStrategy implements ANTLRErrorStrategy {
throws RecognitionException throws RecognitionException
{ {
IntervalSet expecting = getExpectedTokens(recognizer); IntervalSet expecting = getExpectedTokens(recognizer);
IntervalSet follow = null; Object currentSymbol = recognizer.getCurrentInputSymbol();
RecognitionException e = null;
// if next token is what we are looking for then "delete" this token // if next token is what we are looking for then "delete" this token
int nextTokenType = recognizer.getInputStream().LA(2); int nextTokenType = recognizer.getInputStream().LA(2);
if ( expecting.contains(nextTokenType) ) { if ( expecting.contains(nextTokenType) ) {
reportUnwantedToken(recognizer); reportUnwantedToken(recognizer);
/*
System.err.println("recoverFromMismatchedToken deleting "+ System.err.println("recoverFromMismatchedToken deleting "+
((TokenStream)recognizer.getInputStream()).LT(1)+ ((TokenStream)recognizer.getInputStream()).LT(1)+
" since "+((TokenStream)recognizer.getInputStream()).LT(2)+ " since "+((TokenStream)recognizer.getInputStream()).LT(2)+
" is what we want"); " is what we want");
*/
recognizer.getInputStream().consume(); // simply delete extra token recognizer.getInputStream().consume(); // simply delete extra token
// we want to return the token we're actually matching // we want to return the token we're actually matching
Object matchedSymbol = recognizer.getCurrentInputSymbol(); Object matchedSymbol = recognizer.getCurrentInputSymbol();
recognizer.getInputStream().consume(); // move past ttype token as if all were ok recognizer.getInputStream().consume(); // move past ttype token as if all were ok
return matchedSymbol; return matchedSymbol;
} }
// can't recover with single token deletion, try insertion // can't recover with single token deletion, try insertion
if ( mismatchIsMissingToken() ) { // if current token is consistent with what could come after current
// ATN state, then we know we're missing a token; error recovery
// is free to conjure up and insert the missing token
ATNState currentState = recognizer._interp.atn.states.get(recognizer._ctx.s);
ATNState next = currentState.transition(0).target;
IntervalSet expectingAtLL2 = recognizer._interp.atn.nextTokens(next, recognizer._ctx);
System.out.println("LT(2) set="+expectingAtLL2.toString(recognizer.getTokenNames()));
if ( expectingAtLL2.contains(((Token)currentSymbol).getType()) ) {
reportMissingToken(recognizer); reportMissingToken(recognizer);
return getMissingSymbol(recognizer); return getMissingSymbol(recognizer);
} }
@ -198,44 +208,6 @@ public class DefaultANTLRErrorStrategy implements ANTLRErrorStrategy {
throw new InputMismatchException(recognizer); throw new InputMismatchException(recognizer);
} }
public IntervalSet getExpectedTokens(BaseRecognizer recognizer) {
return recognizer.getExpectedTokens();
}
public boolean mismatchIsMissingToken() {
return false;
/*
if ( follow==null ) {
// we have no information about the follow; we can only consume
// a single token and hope for the best
return false;
}
// compute what can follow this grammar element reference
if ( follow.member(Token.EOR_TOKEN_TYPE) ) {
IntervalSet viableTokensFollowingThisRule = computeNextViableTokenSet();
follow = follow.or(viableTokensFollowingThisRule);
if ( ctx.sp>=0 ) { // remove EOR if we're not the start symbol
follow.remove(Token.EOR_TOKEN_TYPE);
}
}
// if current token is consistent with what could come after set
// then we know we're missing a token; error recovery is free to
// "insert" the missing token
//System.out.println("viable tokens="+follow.toString(getTokenNames()));
//System.out.println("LT(1)="+((TokenStream)input).LT(1));
// IntervalSet cannot handle negative numbers like -1 (EOF) so I leave EOR
// in follow set to indicate that the fall of the start symbol is
// in the set (EOF can follow).
if ( follow.member(input.LA(1)) || follow.member(Token.EOR_TOKEN_TYPE) ) {
//System.out.println("LT(1)=="+((TokenStream)input).LT(1)+" is consistent with what follows; inserting...");
return true;
}
return false;
*/
}
/** Conjure up a missing token during error recovery. /** Conjure up a missing token during error recovery.
* *
* The recognizer attempts to recover from single missing * The recognizer attempts to recover from single missing
@ -273,6 +245,10 @@ public class DefaultANTLRErrorStrategy implements ANTLRErrorStrategy {
return t; return t;
} }
public IntervalSet getExpectedTokens(BaseRecognizer recognizer) {
return recognizer.getExpectedTokens();
}
/** How should a token be displayed in an error message? The default /** How should a token be displayed in an error message? The default
* is to display just the text, but during development you might * is to display just the text, but during development you might
* want to have a lot of information spit out. Override in that case * want to have a lot of information spit out. Override in that case
@ -298,29 +274,6 @@ public class DefaultANTLRErrorStrategy implements ANTLRErrorStrategy {
return "'"+s+"'"; return "'"+s+"'";
} }
/** Report a recognition problem.
*
* This method sets errorRecovery to indicate the parser is recovering
* not parsing. Once in recovery mode, no errors are generated.
* To get out of recovery mode, the parser must successfully match
* a token (after a resync). So it will go:
*
* 1. error occurs
* 2. enter recovery mode, report error
* 3. consume until token found in resynch set
* 4. try to resume parsing
* 5. next match() will reset errorRecovery mode
*/
// public void _reportError(BaseRecognizer recognizer,
// RecognitionException e) {
// // if we've already reported an error and have not matched a token
// // yet successfully, don't report any errors.
// if ( recognizer.errorRecovery ) return;
// trackError(recognizer);
//
// recognizer.notifyListeners(e.offendingToken, "dsfdkjasdf");
// }
/* Compute the error recovery set for the current rule. During /* Compute the error recovery set for the current rule. During
* rule invocation, the parser pushes the set of tokens that can * rule invocation, the parser pushes the set of tokens that can
* follow that rule reference on the stack; this amounts to * follow that rule reference on the stack; this amounts to
@ -429,15 +382,6 @@ public class DefaultANTLRErrorStrategy implements ANTLRErrorStrategy {
return recoverSet; return recoverSet;
} }
// public void consumeUntil(BaseRecognizer recognizer, int tokenType) {
// //System.out.println("consumeUntil "+tokenType);
// int ttype = recognizer.getInputStream().LA(1);
// while (ttype != Token.EOF && ttype != tokenType) {
// recognizer.getInputStream().consume();
// ttype = recognizer.getInputStream().LA(1);
// }
// }
/** Consume tokens until one matches the given token set */ /** Consume tokens until one matches the given token set */
public void consumeUntil(BaseRecognizer recognizer, IntervalSet set) { public void consumeUntil(BaseRecognizer recognizer, IntervalSet set) {
//System.out.println("consumeUntil("+set.toString(getTokenNames())+")"); //System.out.println("consumeUntil("+set.toString(getTokenNames())+")");

View File

@ -403,7 +403,9 @@ CommonSetStuff(m, expr, capture, invert) ::= <<
setState(<m.stateNumber>); setState(<m.stateNumber>);
<if(m.labels)><m.labels:{l | <labelref(l)> = }><endif>_input.LT(1); <if(m.labels)><m.labels:{l | <labelref(l)> = }><endif>_input.LT(1);
<capture> <capture>
if ( <if(!invert)>!<endif>(<expr>) ) throw new InputMismatchException(this); if ( <if(!invert)>!<endif>(<expr>) ) {
<if(m.labels)><m.labels:{l | <labelref(l)> = (Token)}><endif>_errHandler.recoverInline(this);
}
_input.consume(); _input.consume();
>> >>

View File

@ -1,470 +0,0 @@
// DUP OF JAVA JUST TO CHK OTHER TARGET ABILITY
javaTypeInitMap ::= [
"int":"0",
"long":"0",
"float":"0.0f",
"double":"0.0",
"boolean":"false",
"byte":"0",
"short":"0",
"char":"0",
default:"null" // anything other than an atomic type
]
// args must be <object-model-object>, <fields-resulting-in-STs>
ParserFile(file, parser, namedActions) ::= <<
// $ANTLR ANTLRVersion> <file.fileName> generatedTimestamp>
<namedActions.header>
import org.antlr.v4.runtime.NoViableAltException;
import org.antlr.v4.runtime.Parser;
import org.antlr.v4.runtime.CharStream;
import org.antlr.v4.runtime.EarlyExitException;
import org.antlr.v4.runtime.RecognitionException;
import org.antlr.v4.runtime.FailedPredicateException;
import org.antlr.v4.runtime.Token;
import org.antlr.v4.runtime.TokenStream;
import org.antlr.v4.runtime.atn.*;
import org.antlr.v4.runtime.dfa.DFA;
import org.antlr.v4.runtime.*;
import org.antlr.v4.runtime.misc.*;
<parser>
>>
Parser(parser, scopes, funcs, atn, actions, sempreds) ::= <<
public class <parser.name> extends Parser {
public static final int
<parser.tokens:{k | <k>=<parser.tokens.(k)>}; separator=", ", wrap, anchor>;
public static final String[] tokenNames = {
"\<INVALID>", "\<INVALID>", "\<INVALID>",
<parser.tokenNames:{k | "<k>"}; separator=", ", wrap, anchor>
};
public static final String[] ruleNames = {
"\<INVALID>",
<parser.ruleNames:{r | "<r>"}; separator=", ", wrap, anchor>
};
<scopes>
<namedActions.members>
<parser:ctor()>
<funcs; separator="\n">
@Override
public String[] getTokenNames() { return tokenNames; }
@Override
public String[] getRuleNames() { return ruleNames; }
@Override
public ATN getATN() { return _ATN; }
<dumpActions(actions,sempreds)>
<atn>
}
>>
dumpActions(actions,sempreds) ::= <<
<if(sempreds)>
public boolean sempred(int ruleIndex, int predIndex) {
switch ( predIndex ) {
<sempreds:{index|
case <index> : return <sempreds.(index)>;}; separator="\n">
}
return true;
}
<endif>
<if(actions)>
public void action(int ruleIndex, int actionIndex) {
switch ( actionIndex ) {
<actions:{index|
case <index> : <actions.(index)> break;}; separator="\n">
}
}
<endif>
>>
ctor(p) ::= <<
public <p.name>(TokenStream input) {
super(input);
_interp = new ParserInterpreter(this,_ATN);
}
>>
/*
// S.g:5:1: b returns [String q, float x] : A ;
public final S.b_return b() throws RecognitionException {
b_stack.push(new b_scope());
S.b_return retval = new S.b_return();
*/
RuleFunction(currentRule,code,decls,context,scope,namedActions,finallyAction) ::= <<
<context>
<scope>
<if(currentRule.modifiers)><currentRule.modifiers:{f | <f> }><else>public final <endif><currentRule.ctxType> <currentRule.name>() throws RecognitionException {
_ctx = new ParserRuleContext(_ctx, <currentRule.startState>);
ParserRuleContext _thisctx = _ctx;
<if(currentRule.scope)>
<currentRule.scope.name>_stack.push(new <currentRule.scope.name>());
<endif>
//System.out.println("enter "+ruleNames[<currentRule.index>]);
<currentRule.globalScopesUsed:{s | <s>_stack.push(new <s>());}; separator="\n">
<namedActions.init>
<decls; separator="\n">
try {
<code>
}
catch (RecognitionException re) {
reportError(re);
recover();
}
finally {
<namedActions.after>
<currentRule.globalScopesUsed:{s | <s>_stack.pop();}; separator="\n">
<if(currentRule.scope)><currentRule.scope.name>_stack.pop();<endif>
<finallyAction>
_ctx = (ParserRuleContext)_ctx.parent;
//System.out.println("exit "+ruleNames[<currentRule.index>]);
}
return _thisctx;
}
>>
CodeBlock(c, ops) ::= <<
<ops; separator="\n">
>>
LL1AltBlock(choice, alts, error) ::= <<
switch ( input.LA(1) ) {
<choice.altLook,alts:{look,alt| <cases(ttypes=look)>
<alt>
break;}; separator="\n">
default :
<error>
}
>>
LL1OptionalBlock(choice, alts, error) ::= <<
switch ( input.LA(1) ) {
<choice.altLook,alts:{look,alt| <cases(ttypes=look)>
<alt>
break;}; separator="\n">
default :
<error>
}
>>
LL1OptionalBlockSingleAlt(choice, expr, alts, preamble, error, followExpr) ::= <<
<preamble; separator="\n">
if ( <expr> ) {
<alts; separator="\n">
}
<!else if ( !(<followExpr>) ) <error>!>
>>
LL1StarBlock(choice, alts, sync) ::= <<
<choice.loopLabel>:
while (true) {
switch ( input.LA(1) ) {
<choice.altLook,alts:{look,alt| <cases(look)>
<alt>
break;}; separator="\n">
<cases(choice.exitLook)>
break <choice.loopLabel>;
}
//<sync>
}
>>
LL1StarBlockSingleAlt(choice, loopExpr, alts, preamble, iteration, sync) ::= <<
<preamble; separator="\n">
while ( <loopExpr> ) {
<alts; separator="\n">
<iteration>
//<sync>
}
>>
LL1PlusBlock(choice, alts, iteration, loopExpr, sync, error, iterationSync) ::= <<
//<sync>
do {
switch ( input.LA(1) ) {
<choice.altLook,alts:{look,alt| <cases(look)>
<alt>
break;}; separator="\n">
default :
<error>
}
<iteration>
//<iterationSync>
} while ( <loopExpr> );
>>
LL1PlusBlockSingleAlt(choice, loopExpr, alts, preamble, iteration,
sync, iterationSync) ::=
<<
//<sync>
<preamble; separator="\n">
do {
<alts; separator="\n">
<iteration>
// <iterationSync>
} while ( <loopExpr> );
>>
// LL(*) stuff
AltBlock(choice, alts, error) ::= <<
switch ( _interp.adaptivePredict(input,<choice.decision>,_ctx) ) {
<alts:{alt |
case <i>:
<alt>
break;}; separator="\n">
default :
<error>
}
>>
OptionalBlock(choice, alts, error) ::= <<
switch ( _interp.adaptivePredict(input,<choice.decision>,_ctx) ) {
<alts:{alt |
case <i>:
<alt>
break;}; separator="\n">
}
>>
StarBlock(choice, alts, sync) ::= <<
int _alt<choice.uniqueID> = _interp.adaptivePredict(input,<choice.decision>,_ctx);
while ( _alt<choice.uniqueID>!=<choice.exitAlt> ) {
switch ( _alt<choice.uniqueID> ) {
<alts:{alt|
case <i>:
<alt>
break;}; separator="\n">
}
_alt<choice.uniqueID> = _interp.adaptivePredict(input,<choice.decision>,_ctx);
}
>>
PlusBlock(choice, alts, error) ::= <<
int _alt<choice.uniqueID> = _interp.adaptivePredict(input,<choice.decision>,_ctx);
do {
switch ( _alt<choice.uniqueID> ) {
<alts:{alt|
case <i>:
<alt>
break;}; separator="\n">
default :
<error>
}
_alt<choice.uniqueID> = _interp.adaptivePredict(input,<choice.decision>,_ctx);
} while ( _alt<choice.uniqueID>!=<choice.exitAlt> );
>>
Sync(s) ::= "sync(<s.expecting.name>);"
ThrowNoViableAlt(t) ::= "throw new NoViableAltException(this,_ctx);"
TestSetInline(s) ::= <<
<s.ttypes:{ttype | <s.varName>==<ttype>}; separator=" || ">
>>
cases(ttypes) ::= <<
<ttypes:{t | case <t>:}; separator="\n">
>>
InvokeRule(r) ::= <<
_ctx.s = <r.stateNumber>;
<if(r.labels)><r.labels:{l | <l> = }><endif><r.name>(<r.argExprs:{e| ,<e>}>);
>>
MatchToken(m) ::= <<
_ctx.s = <m.stateNumber>;
<if(m.labels)><m.labels:{l | <l> = }>(Token)<endif>match(<m.name>);
>>
// ACTION STUFF
Action(a, chunks) ::= "<chunks>"
ForcedAction(a, chunks) ::= "<chunks>"
SemPred(p, chunks) ::= <<
if (!(<chunks>)) throw new FailedPredicateException(this, input, "<currentRule.name>", ""<!"<chunks>"!>);
>>
ActionText(t) ::= "<t.text>"
ArgRef(a) ::= "_ctx.<a.name>"
RetValueRef(a) ::= "_ctx.<a.name>"
QRetValueRef(a) ::= "<a.dict>.<a.name>"
/** How to translate $tokenLabel */
TokenRef(t) ::= "<t.name>"
SetAttr(s,rhsChunks) ::= "_ctx.<s.name> = <rhsChunks>;"
//SetQAttr(s,rhsChunks) ::= "<s.dict>.<s.name> = <rhsChunks>;"
TokenPropertyRef_text(t) ::= "(<t.label>!=null?<t.label>.getText():null)"
TokenPropertyRef_type(t) ::= "(<t.label>!=null?<t.label>.getType():0)"
TokenPropertyRef_line(t) ::= "(<t.label>!=null?<t.label>.getLine():0)"
TokenPropertyRef_pos(t) ::= "(<t.label>!=null?<t.label>.getCharPositionInLine():0)"
TokenPropertyRef_channel(t) ::= "(<t.label>!=null?<t.label>.getChannel():0)"
TokenPropertyRef_index(t) ::= "(<t.label>!=null?<t.label>.getTokenIndex():0)"
TokenPropertyRef_tree(t) ::= "<t.label>_tree"
TokenPropertyRef_int(t) ::= "(<t.label>!=null?Integer.valueOf(<t.label>.getText()):0)"
RulePropertyRef_start(r) ::= "(<r.label>!=null?((<file.TokenLabelType>)<r.label>.start):null)"
RulePropertyRef_stop(r) ::= "(<r.label>!=null?((<file.TokenLabelType>)<r.label>.stop):null)"
RulePropertyRef_tree(r) ::= "(<r.label>!=null?((<file.ASTLabelType>)<r.label>.tree):null)"
RulePropertyRef_text(r) ::= "(<r.label>!=null?((TokenStream)input).toString(<r.label>.start,<r.label>.stop):null)"
RulePropertyRef_st(r) ::= "(<r.label>!=null?<r.label>.st:null)"
DynScopeRef(s) ::= "<s.scope>_stack"
DynScopeAttrRef(s) ::= "<s.scope>_stack.peek().<s.attr>"
DynScopeAttrRef_negIndex(s, indexChunks) ::=
"<s.scope>_stack.get(<s.scope>_stack.size()-<indexChunks>-1).<s.attr>"
DynScopeAttrRef_index(s, indexChunks) ::=
"<s.scope>_stack.get(<indexChunks>).<s.attr>"
SetDynScopeAttr(s, rhsChunks) ::=
"<s.scope>_stack.peek().<s.attr> =<rhsChunks>;"
SetDynScopeAttr_negIndex(s, indexChunks, rhsChunks) ::=
"<s.scope>_stack.get(<s.scope>_stack.size()-<indexChunks>-1).<s.attr> =<rhsChunks>;"
SetDynScopeAttr_index(s, indexChunks, rhsChunks) ::=
"<s.scope>_stack.get(<indexChunks>).<s.attr> =<rhsChunks>;"
AddToList(a) ::= "<a.listName>.add(<first(a.opWithResultToAdd.labels)>);"
TokenDecl(t) ::= "Token <t.name>;"
TokenTypeDecl(t) ::= "int <t.name>;"
TokenListDecl(t) ::= "List\<Token> <t.name> = new ArrayList\<Token>();"
RuleContextDecl(r) ::= "<r.ctxName> <r.name>;"
CaptureNextToken(d) ::= "<d.varName> = input.LT(1);"
CaptureNextTokenType(d) ::= "<d.varName> = input.LA(1);"
StructDecl(s,attrs) ::= <<
public static class <s.name> extends ParserRuleContext {
<attrs:{a | <a>;}; separator="\n">
<if(s.ctorAttrs)>
public <s.name>(<s.ctorAttrs:{a | <a>,}> LABitSet follow) {
super(follow);
<s.ctorAttrs:{a | this.<a.name> = <a.name>;}; separator="\n">
}
<endif>
};
>>
DynamicScopeStruct(d,attrs) ::= <<
public static class <d.name> {
<attrs:{a | <a>;}; separator="\n">
};
public QStack\<<d.name>\> <d.name>_stack = new QStack\<<d.name>\>();
>>
AttributeDecl(d) ::= "<d.decl>"
DFADecl(dfa) ::= <<
// define <dfa.name>
>>
BitSetDecl(b) ::= <<
public static final LABitSet <b.name>=new LABitSet(new long[]{<b.hexWords:{it|<it>L};separator=",">}<if(b.fset.EOF)>, true<endif>);
>>
LexerFile(lexerFile, lexer, namedActions) ::= <<
// $ANTLR ANTLRVersion> <lexerFile.fileName> generatedTimestamp>
<namedActions.header>
import org.antlr.v4.runtime.Lexer;
import org.antlr.v4.runtime.CharStream;
import org.antlr.v4.runtime.Token;
import org.antlr.v4.runtime.TokenStream;
import org.antlr.v4.runtime.*;
import org.antlr.v4.runtime.atn.*;
import org.antlr.v4.runtime.dfa.DFA;
import org.antlr.v4.runtime.misc.*;
<lexer>
>>
Lexer(lexer, atn, actions, sempreds) ::= <<
public class <lexer.name> extends Lexer {
public static final int
<lexer.tokens:{k | <k>=<lexer.tokens.(k)>}; separator=", ", wrap, anchor>;
<lexer.modes:{m| public static final int <m> = <i0>;}; separator="\n">
public static final String[] tokenNames = {
"\<INVALID>", "\<INVALID>", "\<INVALID>",
<lexer.tokenNames:{k | "<k>"}; separator=", ", wrap, anchor>
};
public static final String[] ruleNames = {
"\<INVALID>",
<lexer.ruleNames:{r | "<r>"}; separator=", ", wrap, anchor>
};
<namedActions.members>
public <lexer.name>(CharStream input) {
super(input);
_interp = new LexerInterpreter(this,_ATN);
}
public String getGrammarFileName() { return "<lexerFile.fileName>"; }
@Override
public String[] getTokenNames() { return tokenNames; }
@Override
public String[] getRuleNames() { return ruleNames; }
@Override
public ATN getATN() { return _ATN; }
<lexer.namedActions.members>
<dumpActions(actions,sempreds)>
<atn>
}
>>
SerializedATN(model) ::= <<
public static final String _serializedATN =
"<model.serialized; wrap={"+<\n>"}, anchor>";
public static final ATN _ATN =
ATNInterpreter.deserialize(_serializedATN.toCharArray());
static {
org.antlr.v4.tool.DOTGenerator dot = new org.antlr.v4.tool.DOTGenerator(null);
//System.out.println(dot.getDOT(_ATN.decisionToATNState.get(0)));
}
>>
actionMethod(name, ruleIndex, actions) ::= <<
public void <name>_actions(int action) {
System.out.println("exec action "+action);
switch ( action ) {
<actions:{a |
case <i0> :
<a>
break;
}>
}
}<\n>
>>
sempredMethod(name, ruleIndex, preds) ::= <<
public boolean <name>_sempreds(int pred) {
switch ( pred ) {
<preds:{p |
case <i0> :
return <p>;
}>
default : return false;
}
}<\n>
>>
/** Using a type to init value map, try to init a type; if not in table
* must be an object, default value is "null".
*/
initValue(typeName) ::= <<
<javaTypeInitMap.(typeName)>
>>
codeFileExtension() ::= ".java"
true() ::= "true"
false() ::= "false"

View File

@ -32,6 +32,56 @@ import org.junit.Test;
/** test runtime parse errors */ /** test runtime parse errors */
public class TestParseErrors extends BaseTest { public class TestParseErrors extends BaseTest {
@Test public void testTokenMismatch() throws Exception {
String grammar =
"grammar T;\n" +
"a : 'a' 'b' ;";
String found = execParser("T.g", grammar, "TParser", "TLexer", "a", "aa", false);
String expecting = "line 1:1 mismatched input 'a' expecting 'b'\n";
String result = stderrDuringParse;
assertEquals(expecting, result);
}
@Test public void testSingleTokenDeletion() throws Exception {
String grammar =
"grammar T;\n" +
"a : 'a' 'b' ;";
String found = execParser("T.g", grammar, "TParser", "TLexer", "a", "aab", false);
String expecting = "line 1:1 extraneous input 'a' expecting 'b'\n";
String result = stderrDuringParse;
assertEquals(expecting, result);
}
@Test public void testSingleTokenDeletionExpectingSet() throws Exception {
String grammar =
"grammar T;\n" +
"a : 'a' ('b'|'c') ;";
String found = execParser("T.g", grammar, "TParser", "TLexer", "a", "aab", false);
String expecting = "line 1:1 extraneous input 'a' expecting {'b', 'c'}\n";
String result = stderrDuringParse;
assertEquals(expecting, result);
}
@Test public void testSingleTokenInsertion() throws Exception {
String grammar =
"grammar T;\n" +
"a : 'a' 'b' 'c' ;";
String found = execParser("T.g", grammar, "TParser", "TLexer", "a", "ac", false);
String expecting = "line 1:1 missing 'b' at 'c'\n";
String result = stderrDuringParse;
assertEquals(expecting, result);
}
@Test public void testSingleSetInsertion() throws Exception {
String grammar =
"grammar T;\n" +
"a : 'a' ('b'|'c') 'd' ;";
String found = execParser("T.g", grammar, "TParser", "TLexer", "a", "ad", false);
String expecting = "line 1:1 missing {'b', 'c'} at 'd'\n";
String result = stderrDuringParse;
assertEquals(expecting, result);
}
@Test public void testLL2() throws Exception { @Test public void testLL2() throws Exception {
String grammar = String grammar =
"grammar T;\n" + "grammar T;\n" +