sorted DFA output, added approx tests, got unit tests working again; notSet messed up. impl set transitions.

[git-p4: depot-paths = "//depot/code/antlr4/main/": change = 6786]
This commit is contained in:
parrt 2010-04-02 13:32:47 -08:00
parent d67e0d6a56
commit c2f24b5f8a
30 changed files with 2765 additions and 3040 deletions

View File

@ -86,8 +86,8 @@ public class LinearApproximator {
for (int i=0; i<s.getNumberOfTransitions(); i++) {
Transition t = s.transition(i);
LOOK(t.target, i+1, MAX_LINEAR_APPROXIMATE_DEPTH);
altLook.add(look);
altConfigs.add(configs);
altLook.add(look.clone());
altConfigs.add(configs.clone());
// for (int k=1; k<=MAX_LINEAR_APPROXIMATE_DEPTH; k++) {
// System.out.println(s.rule.name+"["+(i+1)+"]["+k+"]="+look[k].toString(g));
// System.out.println("configs["+(i+1)+"]["+k+"]="+ configs[k].toString());

View File

@ -1,5 +1,6 @@
package org.antlr.v4.automata;
import org.antlr.v4.misc.Utils;
import org.antlr.v4.tool.Grammar;
import org.antlr.v4.tool.Rule;
@ -41,7 +42,8 @@ public class DFASerializer {
buf.append("-"+t.toString(g)+"->"+ getStateString(t.target)+'\n');
}
}
return buf.toString();
String output = buf.toString();
return Utils.sortLinesInString(output);
}
String getStateString(DFAState s) {

View File

@ -1,6 +1,6 @@
package org.antlr.v4.automata;
import org.antlr.v4.misc.IntSet;
import org.antlr.v4.misc.IntervalSet;
import org.antlr.v4.tool.GrammarAST;
import org.antlr.v4.tool.TerminalAST;
@ -38,13 +38,13 @@ public interface NFAFactory {
/** From set build single edge graph o->o-set->o. To conform to
* what an alt block looks like, must have extra state on left.
*/
Handle set(IntSet set, GrammarAST associatedAST);
Handle set(IntervalSet set, GrammarAST associatedAST);
Handle tree(List<Handle> els);
Handle range(GrammarAST a, GrammarAST b);
Handle not(Handle A);
Handle not(GrammarAST a, Handle A);
/** For a non-lexer, just build a simple token reference atom.
* For a lexer, a string is a sequence of char to match. That is,

View File

@ -57,7 +57,7 @@ public class NFASerializer {
buf.append("-"+a.toString(g)+"->"+ getStateString(t.target)+'\n');
}
else {
buf.append("-"+t.toString()+"->"+ getStateString(t.target)+'\n');
buf.append("-"+t.toString(g)+"->"+ getStateString(t.target)+'\n');
}
}
}

View File

@ -3,7 +3,8 @@ package org.antlr.v4.automata;
import org.antlr.runtime.RecognitionException;
import org.antlr.runtime.tree.CommonTreeNodeStream;
import org.antlr.v4.misc.IntSet;
import org.antlr.v4.codegen.Target;
import org.antlr.v4.misc.IntervalSet;
import org.antlr.v4.parse.ANTLRParser;
import org.antlr.v4.parse.GrammarASTAdaptor;
import org.antlr.v4.parse.NFABuilder;
@ -105,12 +106,13 @@ public class ParserNFAFactory implements NFAFactory {
/** From set build single edge graph o->o-set->o. To conform to
* what an alt block looks like, must have extra state on left.
*/
public Handle set(IntSet set, GrammarAST associatedAST) {
//TODO impl
throw new UnsupportedOperationException();
//right.incidentTransition = left.transition;
public Handle set(IntervalSet set, GrammarAST associatedAST) {
BasicState left = newState(associatedAST);
BasicState right = newState(associatedAST);
left.transition = new SetTransition(set, right);
right.incidentTransition = left.transition;
//return null;
return new Handle(left, right);
}
public Handle tree(List<Handle> els) {
@ -120,8 +122,24 @@ public class ParserNFAFactory implements NFAFactory {
/** Not valid for non-lexers */
public Handle range(GrammarAST a, GrammarAST b) { throw new UnsupportedOperationException(); }
public Handle not(Handle A) {
return null;
public Handle not(GrammarAST n, Handle A) {
GrammarAST ast = A.left.ast;
int ttype = 0;
if ( g.getType()==ANTLRParser.LEXER ) {
ttype = Target.getCharValueFromGrammarCharLiteral(ast.getText());
}
else {
ttype = g.getTokenType(ast.getText());
}
IntervalSet notAtom =
(IntervalSet)IntervalSet.of(ttype).complement(g.getTokenTypes());
if ( notAtom.isNil() ) {
ErrorManager.grammarError(ErrorType.EMPTY_COMPLEMENT,
g.fileName,
ast.token,
ast.getText());
}
return set(notAtom, n);
}
/** For a non-lexer, just build a simple token reference atom. */

View File

@ -41,7 +41,7 @@ public class PredicateTransition extends Transition {
}
public String toString() {
return "{"+semanticContext+"}?";
return semanticContext.toString();
}
public String toString(Grammar g) {

View File

@ -1,17 +1,15 @@
package org.antlr.v4.automata;
import org.antlr.v4.misc.IntervalSet;
import org.antlr.v4.tool.Grammar;
/** A label containing a set of values */
/** A transition containing a set of values */
public class SetTransition extends Transition {
/** A set of token types or character codes if label==SET */
public IntervalSet label;
public SetTransition(IntervalSet label) {
if ( label==null ) {
this.label = IntervalSet.of(Label.INVALID);
return;
}
public SetTransition(IntervalSet label, NFAState target) {
super(target);
if ( label==null ) label = IntervalSet.of(Label.INVALID);
this.label = label;
}
@ -38,4 +36,12 @@ public class SetTransition extends Transition {
}
return this.label.equals(((SetTransition)o).label);
}
public String toString(Grammar g) {
return label.toString(g);
}
public String toString() {
return label.toString();
}
}

View File

@ -1,6 +1,8 @@
package org.antlr.v4.misc;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
/** */
public class Utils {
@ -64,5 +66,18 @@ public class Utils {
}
result.append(src.substring(startIndex,src.length()));
return result.toString();
}
}
public static String sortLinesInString(String s) {
String lines[] = s.split("\n");
Arrays.sort(lines);
List<String> linesL = Arrays.asList(lines);
StringBuffer buf = new StringBuffer();
for (String l : linesL) {
buf.append(l);
buf.append('\n');
}
return buf.toString();
}
}

View File

@ -1,4 +1,4 @@
// $ANTLR ${project.version} ${buildNumber} ANTLRLexer.g 2010-02-25 18:15:27
// $ANTLR ${project.version} ${buildNumber} ANTLRLexer.g 2010-04-02 12:57:42
/*
[The "BSD licence"]
@ -263,7 +263,7 @@ public class ANTLRLexer extends Lexer {
if ( (( input.LA(2) != '/')) ) {
alt3=1;
}
else if ( (((( true )&&( !(input.LA(1) == '*' && input.LA(2) == '/') ))||( true ))) ) {
else if ( ((( true )||(( true )&&( !(input.LA(1) == '*' && input.LA(2) == '/') )))) ) {
alt3=2;
}
else {

View File

@ -742,9 +742,7 @@ ebnfSuffix
| PLUS -> POSITIVE_CLOSURE[op]
;
atom: range (ROOT^ | BANG^)? // Range x..y - only valid in lexers
| // Qualified reference delegate.rule. This must be
atom: // Qualified reference delegate.rule. This must be
// lexically contiguous (no spaces either side of the DOT)
// otherwise it is two references with a wildcard in between
// and not a qualified reference.
@ -753,19 +751,17 @@ atom: range (ROOT^ | BANG^)? // Range x..y - only valid in lexers
input.LT(2).getCharPositionInLine() &&
input.LT(2).getCharPositionInLine()+1==input.LT(3).getCharPositionInLine()
}?
id DOT ruleref
-> ^(DOT id ruleref)
| // Qualified reference delegate.token.
{
input.LT(1).getCharPositionInLine()+input.LT(1).getText().length()==
input.LT(2).getCharPositionInLine() &&
input.LT(2).getCharPositionInLine()+1==input.LT(3).getCharPositionInLine()
}?
id DOT terminal
-> ^(DOT id terminal)
| terminal
id DOT ruleref -> ^(DOT id ruleref)
| range (ROOT^ | BANG^)? // Range x..y - only valid in lexers
| terminal (ROOT^ | BANG^)?
| ruleref
| notSet (ROOT^|BANG^)?
| notSet (ROOT^|BANG^)?
| // Wildcard '.' means any character in a lexer, any
// token in parser and any token or node in a tree parser
// Because the terminal rule is allowed to be the node
// specification for the start of a tree rule, we must
// later check that wildcard was not used for that.
DOT elementOptions? -> ^(WILDCARD<TerminalAST>[$DOT] elementOptions?)
;
catch [RecognitionException re] { throw re; } // pass upwards to element
@ -776,19 +772,8 @@ atom: range (ROOT^ | BANG^)? // Range x..y - only valid in lexers
// that are then used to create the inverse set of them.
//
notSet
: NOT notTerminal -> ^(NOT notTerminal)
| NOT block -> ^(NOT block)
;
// -------------------
// Valid set terminals
//
// The terminal tokens that can be members of an inverse set (for
// matching anything BUT these)
//
notTerminal
: TOKEN_REF<TerminalAST>
| STRING_LITERAL<TerminalAST>
: NOT terminal -> ^(NOT terminal)
| NOT block -> ^(NOT block)
;
// -------------
@ -836,31 +821,15 @@ range
;
terminal
: ( // Args are only valid for lexer rules
TOKEN_REF ARG_ACTION? elementOptions? -> ^(TOKEN_REF<TerminalAST> ARG_ACTION? elementOptions?)
| STRING_LITERAL elementOptions? -> ^(STRING_LITERAL<TerminalAST> elementOptions?)
| // Wildcard '.' means any character in a lexer, any
// token in parser and any token or node in a tree parser
// Because the terminal rule is allowed to be the node
// specification for the start of a tree rule, we must
// later check that wildcard was not used for that.
DOT elementOptions? -> ^(WILDCARD<TerminalAST>[$DOT] elementOptions?)
)
( ROOT -> ^(ROOT $terminal)
| BANG -> ^(BANG $terminal)
)?
: // Args are only valid for lexer rules
TOKEN_REF ARG_ACTION? elementOptions? -> ^(TOKEN_REF<TerminalAST> ARG_ACTION? elementOptions?)
| STRING_LITERAL elementOptions? -> ^(STRING_LITERAL<TerminalAST> elementOptions?)
;
// ---------------
// Generic options
//
// Terminals may be adorned with certain options when
// reference in the grammar: TOK<,,,>
//
elementOptions
: // Options begin with < and end with >
//
LT elementOption (COMMA elementOption)* GT -> ^(ELEMENT_OPTIONS elementOption+)
// Terminals may be adorned with certain options when
// reference in the grammar: TOK<,,,>
elementOptions
: LT elementOption (COMMA elementOption)* GT -> ^(ELEMENT_OPTIONS elementOption+)
;
// WHen used with elements we can specify what the tree node type can

File diff suppressed because it is too large Load Diff

View File

@ -299,23 +299,23 @@ atom: ^(ROOT range)
| ^(BANG range)
| ^(ROOT notSet)
| ^(BANG notSet)
| notSet
| ^(ROOT terminal)
| ^(BANG terminal)
| range
| ^(DOT ID terminal)
| ^(DOT ID ruleref)
| ^(WILDCARD elementOptions)
| WILDCARD
| terminal
| ruleref
;
notSet
: ^(NOT notTerminal)
: ^(NOT terminal)
| ^(NOT block)
;
notTerminal
: TOKEN_REF
| STRING_LITERAL
;
block
: ^(BLOCK optionsSpec? ruleAction* ACTION? altList)
;
@ -337,10 +337,6 @@ terminal
| ^(TOKEN_REF ARG_ACTION)
| ^(TOKEN_REF elementOptions)
| TOKEN_REF
| ^(WILDCARD elementOptions)
| WILDCARD
| ^(ROOT terminal)
| ^(BANG terminal)
;
elementOptions

File diff suppressed because it is too large Load Diff

View File

@ -1,4 +1,4 @@
// $ANTLR ${project.version} ${buildNumber} ActionSplitter.g 2010-02-25 18:15:30
// $ANTLR ${project.version} ${buildNumber} ActionSplitter.g 2010-04-02 12:57:46
package org.antlr.v4.parse;
@ -2329,11 +2329,11 @@ public class ActionSplitter extends org.antlr.v4.runtime.Lexer {
state.failed=false;
return success;
}
public final boolean synpred8_ActionSplitter() {
public final boolean synpred2_ActionSplitter() {
state.backtracking++;
int start = input.mark();
try {
synpred8_ActionSplitter_fragment(); // can never throw exception
synpred2_ActionSplitter_fragment(); // can never throw exception
} catch (RecognitionException re) {
System.err.println("impossible: "+re);
}
@ -2343,11 +2343,11 @@ public class ActionSplitter extends org.antlr.v4.runtime.Lexer {
state.failed=false;
return success;
}
public final boolean synpred2_ActionSplitter() {
public final boolean synpred8_ActionSplitter() {
state.backtracking++;
int start = input.mark();
try {
synpred2_ActionSplitter_fragment(); // can never throw exception
synpred8_ActionSplitter_fragment(); // can never throw exception
} catch (RecognitionException re) {
System.err.println("impossible: "+re);
}
@ -2491,27 +2491,21 @@ public class ActionSplitter extends org.antlr.v4.runtime.Lexer {
}
}
static final String DFA29_eotS =
"\31\uffff";
"\32\uffff";
static final String DFA29_eofS =
"\31\uffff";
"\32\uffff";
static final String DFA29_minS =
"\1\0\1\uffff\1\0\1\uffff\1\0\2\uffff\1\0\6\uffff\1\0\12\uffff";
"\2\0\6\uffff\1\0\12\uffff\1\0\2\uffff\1\0\3\uffff";
static final String DFA29_maxS =
"\1\uffff\1\uffff\1\0\1\uffff\1\0\2\uffff\1\0\6\uffff\1\0\12\uffff";
"\1\uffff\1\0\6\uffff\1\0\12\uffff\1\0\2\uffff\1\0\3\uffff";
static final String DFA29_acceptS =
"\1\uffff\1\24\1\uffff\1\3\1\uffff\1\1\1\2\1\uffff\1\16\1\17\1\20"+
"\1\21\1\22\1\23\1\uffff\1\4\1\5\1\6\1\7\1\10\1\11\1\12\1\13\1\14"+
"\1\15";
"\2\uffff\1\16\1\17\1\20\1\21\1\22\1\23\1\uffff\1\4\1\5\1\6\1\7\1"+
"\10\1\11\1\12\1\13\1\14\1\15\1\uffff\1\3\1\24\1\uffff\1\1\1\2\1"+
"\24";
static final String DFA29_specialS =
"\1\0\1\uffff\1\1\1\uffff\1\2\2\uffff\1\3\6\uffff\1\4\12\uffff}>";
"\1\0\1\1\6\uffff\1\2\12\uffff\1\3\2\uffff\1\4\3\uffff}>";
static final String[] DFA29_transitionS = {
"\44\1\1\16\1\7\11\1\1\4\54\1\1\2\uffa3\1",
"",
"\1\uffff",
"",
"\1\uffff",
"",
"",
"\44\31\1\10\1\1\11\31\1\26\54\31\1\23\uffa3\31",
"\1\uffff",
"",
"",
@ -2529,6 +2523,13 @@ public class ActionSplitter extends org.antlr.v4.runtime.Lexer {
"",
"",
"",
"",
"\1\uffff",
"",
"",
"\1\uffff",
"",
"",
""
};
@ -2572,102 +2573,102 @@ public class ActionSplitter extends org.antlr.v4.runtime.Lexer {
int LA29_0 = input.LA(1);
s = -1;
if ( ((LA29_0>='\u0000' && LA29_0<='#')||(LA29_0>='&' && LA29_0<='.')||(LA29_0>='0' && LA29_0<='[')||(LA29_0>=']' && LA29_0<='\uFFFF')) ) {s = 1;}
if ( (LA29_0=='%') ) {s = 1;}
else if ( (LA29_0=='\\') ) {s = 2;}
else if ( (LA29_0=='$') ) {s = 8;}
else if ( (LA29_0=='/') ) {s = 4;}
else if ( (LA29_0=='\\') ) {s = 19;}
else if ( (LA29_0=='%') ) {s = 7;}
else if ( (LA29_0=='/') ) {s = 22;}
else if ( (LA29_0=='$') ) {s = 14;}
else if ( ((LA29_0>='\u0000' && LA29_0<='#')||(LA29_0>='&' && LA29_0<='.')||(LA29_0>='0' && LA29_0<='[')||(LA29_0>=']' && LA29_0<='\uFFFF')) ) {s = 25;}
if ( s>=0 ) return s;
break;
case 1 :
int LA29_2 = input.LA(1);
int LA29_1 = input.LA(1);
int index29_2 = input.index();
int index29_1 = input.index();
input.rewind();
s = -1;
if ( (synpred3_ActionSplitter()) ) {s = 3;}
if ( (synpred14_ActionSplitter()) ) {s = 2;}
else if ( (true) ) {s = 1;}
else if ( (synpred15_ActionSplitter()) ) {s = 3;}
else if ( (synpred16_ActionSplitter()) ) {s = 4;}
else if ( (synpred17_ActionSplitter()) ) {s = 5;}
else if ( (synpred18_ActionSplitter()) ) {s = 6;}
else if ( (synpred19_ActionSplitter()) ) {s = 7;}
input.seek(index29_2);
input.seek(index29_1);
if ( s>=0 ) return s;
break;
case 2 :
int LA29_4 = input.LA(1);
int LA29_8 = input.LA(1);
int index29_4 = input.index();
int index29_8 = input.index();
input.rewind();
s = -1;
if ( (synpred1_ActionSplitter()) ) {s = 5;}
if ( (synpred4_ActionSplitter()) ) {s = 9;}
else if ( (synpred2_ActionSplitter()) ) {s = 6;}
else if ( (synpred5_ActionSplitter()) ) {s = 10;}
else if ( (true) ) {s = 1;}
else if ( (synpred6_ActionSplitter()) ) {s = 11;}
else if ( (synpred7_ActionSplitter()) ) {s = 12;}
else if ( (synpred8_ActionSplitter()) ) {s = 13;}
else if ( (synpred9_ActionSplitter()) ) {s = 14;}
else if ( (synpred10_ActionSplitter()) ) {s = 15;}
else if ( (synpred11_ActionSplitter()) ) {s = 16;}
else if ( (synpred12_ActionSplitter()) ) {s = 17;}
else if ( (synpred13_ActionSplitter()) ) {s = 18;}
input.seek(index29_4);
input.seek(index29_8);
if ( s>=0 ) return s;
break;
case 3 :
int LA29_7 = input.LA(1);
int LA29_19 = input.LA(1);
int index29_7 = input.index();
int index29_19 = input.index();
input.rewind();
s = -1;
if ( (synpred14_ActionSplitter()) ) {s = 8;}
if ( (synpred3_ActionSplitter()) ) {s = 20;}
else if ( (synpred15_ActionSplitter()) ) {s = 9;}
else if ( (synpred16_ActionSplitter()) ) {s = 10;}
else if ( (synpred17_ActionSplitter()) ) {s = 11;}
else if ( (synpred18_ActionSplitter()) ) {s = 12;}
else if ( (synpred19_ActionSplitter()) ) {s = 13;}
else if ( (true) ) {s = 21;}
input.seek(index29_7);
input.seek(index29_19);
if ( s>=0 ) return s;
break;
case 4 :
int LA29_14 = input.LA(1);
int LA29_22 = input.LA(1);
int index29_14 = input.index();
int index29_22 = input.index();
input.rewind();
s = -1;
if ( (synpred4_ActionSplitter()) ) {s = 15;}
if ( (synpred1_ActionSplitter()) ) {s = 23;}
else if ( (synpred5_ActionSplitter()) ) {s = 16;}
else if ( (synpred2_ActionSplitter()) ) {s = 24;}
else if ( (synpred6_ActionSplitter()) ) {s = 17;}
else if ( (synpred7_ActionSplitter()) ) {s = 18;}
else if ( (synpred8_ActionSplitter()) ) {s = 19;}
else if ( (synpred9_ActionSplitter()) ) {s = 20;}
else if ( (synpred10_ActionSplitter()) ) {s = 21;}
else if ( (synpred11_ActionSplitter()) ) {s = 22;}
else if ( (synpred12_ActionSplitter()) ) {s = 23;}
else if ( (synpred13_ActionSplitter()) ) {s = 24;}
else if ( (true) ) {s = 21;}
input.seek(index29_14);
input.seek(index29_22);
if ( s>=0 ) return s;
break;
}

View File

@ -128,21 +128,19 @@ atom returns [NFAFactory.Handle p]
| ^(BANG range) {$p = $range.p;}
| ^(ROOT notSet) {$p = $notSet.p;}
| ^(BANG notSet) {$p = $notSet.p;}
| notSet {$p = $notSet.p;}
| range {$p = $range.p;}
| ^(DOT ID terminal) {$p = $terminal.p;}
| ^(DOT ID ruleref) {$p = $ruleref.p;}
| ^(WILDCARD .) {$p = factory.wildcard($start);}
| WILDCARD {$p = factory.wildcard($start);}
| terminal {$p = $terminal.p;}
| ruleref {$p = $ruleref.p;}
;
notSet returns [NFAFactory.Handle p]
: ^(NOT notTerminal) {$p = factory.not($notTerminal.p);}
| ^(NOT block) {$p = factory.not($block.p);}
;
notTerminal returns [NFAFactory.Handle p]
: TOKEN_REF {$p = factory.tokenRef((TerminalAST)$TOKEN_REF);}
| STRING_LITERAL {$p = factory.stringLiteral((TerminalAST)$start);}
: ^(NOT terminal) {$p = factory.not($NOT, $terminal.p);}
| ^(NOT block) {$p = factory.not($NOT, $block.p);}
;
ruleref returns [NFAFactory.Handle p]
@ -161,8 +159,6 @@ terminal returns [NFAFactory.Handle p]
| ^(TOKEN_REF ARG_ACTION .) {$p = factory.tokenRef((TerminalAST)$start);}
| ^(TOKEN_REF .) {$p = factory.tokenRef((TerminalAST)$start);}
| TOKEN_REF {$p = factory.tokenRef((TerminalAST)$start);}
| ^(WILDCARD .) {$p = factory.wildcard($start);}
| WILDCARD {$p = factory.wildcard($start);}
| ^(ROOT t=terminal) {$p = $t.p;}
| ^(BANG t=terminal) {$p = $t.p;}
;

File diff suppressed because it is too large Load Diff

View File

@ -1,4 +1,4 @@
// $ANTLR ${project.version} ${buildNumber} BasicSemanticTriggers.g 2010-02-22 16:10:22
// $ANTLR ${project.version} ${buildNumber} BasicSemanticTriggers.g 2010-04-02 12:57:47
/*
[The "BSD license"]

View File

@ -142,7 +142,12 @@ tokensSection
rule
@init {List<GrammarAST> modifiers = new ArrayList<GrammarAST>();}
: ^( RULE name=ID (^(RULEMODIFIERS (m=. {modifiers.add($m);})+))? .)
: ^( RULE
name=ID (options {greedy=false;}:.)*
(^(RULEMODIFIERS (m=. {modifiers.add($m);})+))?
^(BLOCK .+)
.*
)
{
int numAlts = $RULE.getFirstChildWithType(BLOCK).getChildCount();
Rule r = new Rule(g, $name.text, (GrammarASTWithOptions)$RULE, numAlts);

View File

@ -1,4 +1,4 @@
// $ANTLR ${project.version} ${buildNumber} CollectSymbols.g 2010-03-26 16:10:17
// $ANTLR ${project.version} ${buildNumber} CollectSymbols.g 2010-04-02 12:57:47
/*
[The "BSD license"]
@ -589,7 +589,7 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter {
// $ANTLR start "rule"
// CollectSymbols.g:143:1: rule : ^( RULE name= ID ( ^( RULEMODIFIERS (m= . )+ ) )? . ) ;
// CollectSymbols.g:143:1: rule : ^( RULE name= ID ( options {greedy=false; } : . )* ( ^( RULEMODIFIERS (m= . )+ ) )? ^( BLOCK ( . )+ ) ( . )* ) ;
public final void rule() throws RecognitionException {
GrammarAST name=null;
GrammarAST RULE7=null;
@ -597,38 +597,61 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter {
List<GrammarAST> modifiers = new ArrayList<GrammarAST>();
try {
// CollectSymbols.g:145:2: ( ^( RULE name= ID ( ^( RULEMODIFIERS (m= . )+ ) )? . ) )
// CollectSymbols.g:145:6: ^( RULE name= ID ( ^( RULEMODIFIERS (m= . )+ ) )? . )
// CollectSymbols.g:145:2: ( ^( RULE name= ID ( options {greedy=false; } : . )* ( ^( RULEMODIFIERS (m= . )+ ) )? ^( BLOCK ( . )+ ) ( . )* ) )
// CollectSymbols.g:145:6: ^( RULE name= ID ( options {greedy=false; } : . )* ( ^( RULEMODIFIERS (m= . )+ ) )? ^( BLOCK ( . )+ ) ( . )* )
{
RULE7=(GrammarAST)match(input,RULE,FOLLOW_RULE_in_rule357); if (state.failed) return ;
match(input, Token.DOWN, null); if (state.failed) return ;
name=(GrammarAST)match(input,ID,FOLLOW_ID_in_rule361); if (state.failed) return ;
// CollectSymbols.g:145:22: ( ^( RULEMODIFIERS (m= . )+ ) )?
int alt5=2;
alt5 = dfa5.predict(input);
switch (alt5) {
name=(GrammarAST)match(input,ID,FOLLOW_ID_in_rule369); if (state.failed) return ;
// CollectSymbols.g:146:17: ( options {greedy=false; } : . )*
loop4:
do {
int alt4=2;
alt4 = dfa4.predict(input);
switch (alt4) {
case 1 :
// CollectSymbols.g:146:42: .
{
matchAny(input); if (state.failed) return ;
}
break;
default :
break loop4;
}
} while (true);
// CollectSymbols.g:147:9: ( ^( RULEMODIFIERS (m= . )+ ) )?
int alt6=2;
int LA6_0 = input.LA(1);
if ( (LA6_0==RULEMODIFIERS) ) {
alt6=1;
}
switch (alt6) {
case 1 :
// CollectSymbols.g:145:23: ^( RULEMODIFIERS (m= . )+ )
// CollectSymbols.g:147:10: ^( RULEMODIFIERS (m= . )+ )
{
match(input,RULEMODIFIERS,FOLLOW_RULEMODIFIERS_in_rule365); if (state.failed) return ;
match(input,RULEMODIFIERS,FOLLOW_RULEMODIFIERS_in_rule393); if (state.failed) return ;
match(input, Token.DOWN, null); if (state.failed) return ;
// CollectSymbols.g:145:39: (m= . )+
int cnt4=0;
loop4:
// CollectSymbols.g:147:26: (m= . )+
int cnt5=0;
loop5:
do {
int alt4=2;
int LA4_0 = input.LA(1);
int alt5=2;
int LA5_0 = input.LA(1);
if ( ((LA4_0>=SEMPRED && LA4_0<=ALT_REWRITE)) ) {
alt4=1;
if ( ((LA5_0>=SEMPRED && LA5_0<=ALT_REWRITE)) ) {
alt5=1;
}
switch (alt4) {
switch (alt5) {
case 1 :
// CollectSymbols.g:145:40: m= .
// CollectSymbols.g:147:27: m= .
{
m=(GrammarAST)input.LT(1);
matchAny(input); if (state.failed) return ;
@ -640,13 +663,13 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter {
break;
default :
if ( cnt4 >= 1 ) break loop4;
if ( cnt5 >= 1 ) break loop5;
if (state.backtracking>0) {state.failed=true; return ;}
EarlyExitException eee =
new EarlyExitException(4, input);
new EarlyExitException(5, input);
throw eee;
}
cnt4++;
cnt5++;
} while (true);
@ -657,7 +680,73 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter {
}
matchAny(input); if (state.failed) return ;
match(input,BLOCK,FOLLOW_BLOCK_in_rule416); if (state.failed) return ;
match(input, Token.DOWN, null); if (state.failed) return ;
// CollectSymbols.g:148:17: ( . )+
int cnt7=0;
loop7:
do {
int alt7=2;
int LA7_0 = input.LA(1);
if ( ((LA7_0>=SEMPRED && LA7_0<=ALT_REWRITE)) ) {
alt7=1;
}
else if ( (LA7_0==UP) ) {
alt7=2;
}
switch (alt7) {
case 1 :
// CollectSymbols.g:148:17: .
{
matchAny(input); if (state.failed) return ;
}
break;
default :
if ( cnt7 >= 1 ) break loop7;
if (state.backtracking>0) {state.failed=true; return ;}
EarlyExitException eee =
new EarlyExitException(7, input);
throw eee;
}
cnt7++;
} while (true);
match(input, Token.UP, null); if (state.failed) return ;
// CollectSymbols.g:149:9: ( . )*
loop8:
do {
int alt8=2;
int LA8_0 = input.LA(1);
if ( ((LA8_0>=SEMPRED && LA8_0<=ALT_REWRITE)) ) {
alt8=1;
}
else if ( (LA8_0==UP) ) {
alt8=2;
}
switch (alt8) {
case 1 :
// CollectSymbols.g:149:9: .
{
matchAny(input); if (state.failed) return ;
}
break;
default :
break loop8;
}
} while (true);
match(input, Token.UP, null); if (state.failed) return ;
if ( state.backtracking==1 ) {
@ -688,14 +777,14 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter {
};
// $ANTLR start "setAlt"
// CollectSymbols.g:156:1: setAlt : {...}? ( ALT | ALT_REWRITE ) ;
// CollectSymbols.g:161:1: setAlt : {...}? ( ALT | ALT_REWRITE ) ;
public final CollectSymbols.setAlt_return setAlt() throws RecognitionException {
CollectSymbols.setAlt_return retval = new CollectSymbols.setAlt_return();
retval.start = input.LT(1);
try {
// CollectSymbols.g:157:2: ({...}? ( ALT | ALT_REWRITE ) )
// CollectSymbols.g:157:4: {...}? ( ALT | ALT_REWRITE )
// CollectSymbols.g:162:2: ({...}? ( ALT | ALT_REWRITE ) )
// CollectSymbols.g:162:4: {...}? ( ALT | ALT_REWRITE )
{
if ( !((inContext("RULE BLOCK"))) ) {
if (state.backtracking>0) {state.failed=true; return retval;}
@ -730,13 +819,13 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter {
// $ANTLR start "finishRule"
// CollectSymbols.g:161:1: finishRule : RULE ;
// CollectSymbols.g:166:1: finishRule : RULE ;
public final void finishRule() throws RecognitionException {
try {
// CollectSymbols.g:162:2: ( RULE )
// CollectSymbols.g:162:4: RULE
// CollectSymbols.g:167:2: ( RULE )
// CollectSymbols.g:167:4: RULE
{
match(input,RULE,FOLLOW_RULE_in_finishRule425); if (state.failed) return ;
match(input,RULE,FOLLOW_RULE_in_finishRule484); if (state.failed) return ;
if ( state.backtracking==1 ) {
currentRule = null;
}
@ -756,24 +845,24 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter {
// $ANTLR start "ruleNamedAction"
// CollectSymbols.g:165:1: ruleNamedAction : {...}? ^( AT ID ACTION ) ;
// CollectSymbols.g:170:1: ruleNamedAction : {...}? ^( AT ID ACTION ) ;
public final void ruleNamedAction() throws RecognitionException {
GrammarAST ID8=null;
GrammarAST ACTION9=null;
try {
// CollectSymbols.g:166:2: ({...}? ^( AT ID ACTION ) )
// CollectSymbols.g:166:4: {...}? ^( AT ID ACTION )
// CollectSymbols.g:171:2: ({...}? ^( AT ID ACTION ) )
// CollectSymbols.g:171:4: {...}? ^( AT ID ACTION )
{
if ( !((inContext("RULE"))) ) {
if (state.backtracking>0) {state.failed=true; return ;}
throw new FailedPredicateException(input, "ruleNamedAction", "inContext(\"RULE\")");
}
match(input,AT,FOLLOW_AT_in_ruleNamedAction441); if (state.failed) return ;
match(input,AT,FOLLOW_AT_in_ruleNamedAction500); if (state.failed) return ;
match(input, Token.DOWN, null); if (state.failed) return ;
ID8=(GrammarAST)match(input,ID,FOLLOW_ID_in_ruleNamedAction443); if (state.failed) return ;
ACTION9=(GrammarAST)match(input,ACTION,FOLLOW_ACTION_in_ruleNamedAction445); if (state.failed) return ;
ID8=(GrammarAST)match(input,ID,FOLLOW_ID_in_ruleNamedAction502); if (state.failed) return ;
ACTION9=(GrammarAST)match(input,ACTION,FOLLOW_ACTION_in_ruleNamedAction504); if (state.failed) return ;
match(input, Token.UP, null); if (state.failed) return ;
if ( state.backtracking==1 ) {
@ -798,20 +887,20 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter {
// $ANTLR start "ruleAction"
// CollectSymbols.g:173:1: ruleAction : {...}? ACTION ;
// CollectSymbols.g:178:1: ruleAction : {...}? ACTION ;
public final void ruleAction() throws RecognitionException {
GrammarAST ACTION10=null;
try {
// CollectSymbols.g:174:2: ({...}? ACTION )
// CollectSymbols.g:174:4: {...}? ACTION
// CollectSymbols.g:179:2: ({...}? ACTION )
// CollectSymbols.g:179:4: {...}? ACTION
{
if ( !((inContext("RULE ...")&&!inContext("SCOPE")&&
!inContext("CATCH")&&!inContext("FINALLY")&&!inContext("AT"))) ) {
if (state.backtracking>0) {state.failed=true; return ;}
throw new FailedPredicateException(input, "ruleAction", "inContext(\"RULE ...\")&&!inContext(\"SCOPE\")&&\n\t\t !inContext(\"CATCH\")&&!inContext(\"FINALLY\")&&!inContext(\"AT\")");
}
ACTION10=(GrammarAST)match(input,ACTION,FOLLOW_ACTION_in_ruleAction465); if (state.failed) return ;
ACTION10=(GrammarAST)match(input,ACTION,FOLLOW_ACTION_in_ruleAction524); if (state.failed) return ;
if ( state.backtracking==1 ) {
currentRule.alt[currentAlt].actions.add((ActionAST)ACTION10);
@ -834,19 +923,19 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter {
// $ANTLR start "exceptionHandler"
// CollectSymbols.g:183:1: exceptionHandler : ^( CATCH ARG_ACTION ACTION ) ;
// CollectSymbols.g:188:1: exceptionHandler : ^( CATCH ARG_ACTION ACTION ) ;
public final void exceptionHandler() throws RecognitionException {
GrammarAST ACTION11=null;
try {
// CollectSymbols.g:184:2: ( ^( CATCH ARG_ACTION ACTION ) )
// CollectSymbols.g:184:4: ^( CATCH ARG_ACTION ACTION )
// CollectSymbols.g:189:2: ( ^( CATCH ARG_ACTION ACTION ) )
// CollectSymbols.g:189:4: ^( CATCH ARG_ACTION ACTION )
{
match(input,CATCH,FOLLOW_CATCH_in_exceptionHandler481); if (state.failed) return ;
match(input,CATCH,FOLLOW_CATCH_in_exceptionHandler540); if (state.failed) return ;
match(input, Token.DOWN, null); if (state.failed) return ;
match(input,ARG_ACTION,FOLLOW_ARG_ACTION_in_exceptionHandler483); if (state.failed) return ;
ACTION11=(GrammarAST)match(input,ACTION,FOLLOW_ACTION_in_exceptionHandler485); if (state.failed) return ;
match(input,ARG_ACTION,FOLLOW_ARG_ACTION_in_exceptionHandler542); if (state.failed) return ;
ACTION11=(GrammarAST)match(input,ACTION,FOLLOW_ACTION_in_exceptionHandler544); if (state.failed) return ;
match(input, Token.UP, null); if (state.failed) return ;
if ( state.backtracking==1 ) {
@ -871,18 +960,18 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter {
// $ANTLR start "finallyClause"
// CollectSymbols.g:191:1: finallyClause : ^( FINALLY ACTION ) ;
// CollectSymbols.g:196:1: finallyClause : ^( FINALLY ACTION ) ;
public final void finallyClause() throws RecognitionException {
GrammarAST ACTION12=null;
try {
// CollectSymbols.g:192:2: ( ^( FINALLY ACTION ) )
// CollectSymbols.g:192:4: ^( FINALLY ACTION )
// CollectSymbols.g:197:2: ( ^( FINALLY ACTION ) )
// CollectSymbols.g:197:4: ^( FINALLY ACTION )
{
match(input,FINALLY,FOLLOW_FINALLY_in_finallyClause502); if (state.failed) return ;
match(input,FINALLY,FOLLOW_FINALLY_in_finallyClause561); if (state.failed) return ;
match(input, Token.DOWN, null); if (state.failed) return ;
ACTION12=(GrammarAST)match(input,ACTION,FOLLOW_ACTION_in_finallyClause504); if (state.failed) return ;
ACTION12=(GrammarAST)match(input,ACTION,FOLLOW_ACTION_in_finallyClause563); if (state.failed) return ;
match(input, Token.UP, null); if (state.failed) return ;
if ( state.backtracking==1 ) {
@ -907,19 +996,19 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter {
// $ANTLR start "ruleArg"
// CollectSymbols.g:199:1: ruleArg : {...}? ARG_ACTION ;
// CollectSymbols.g:204:1: ruleArg : {...}? ARG_ACTION ;
public final void ruleArg() throws RecognitionException {
GrammarAST ARG_ACTION13=null;
try {
// CollectSymbols.g:200:2: ({...}? ARG_ACTION )
// CollectSymbols.g:200:4: {...}? ARG_ACTION
// CollectSymbols.g:205:2: ({...}? ARG_ACTION )
// CollectSymbols.g:205:4: {...}? ARG_ACTION
{
if ( !((inContext("RULE"))) ) {
if (state.backtracking>0) {state.failed=true; return ;}
throw new FailedPredicateException(input, "ruleArg", "inContext(\"RULE\")");
}
ARG_ACTION13=(GrammarAST)match(input,ARG_ACTION,FOLLOW_ARG_ACTION_in_ruleArg524); if (state.failed) return ;
ARG_ACTION13=(GrammarAST)match(input,ARG_ACTION,FOLLOW_ARG_ACTION_in_ruleArg583); if (state.failed) return ;
if ( state.backtracking==1 ) {
currentRule.args = ScopeParser.parseTypeList((ARG_ACTION13!=null?ARG_ACTION13.getText():null));
@ -942,18 +1031,18 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter {
// $ANTLR start "ruleReturns"
// CollectSymbols.g:207:1: ruleReturns : ^( RETURNS ARG_ACTION ) ;
// CollectSymbols.g:212:1: ruleReturns : ^( RETURNS ARG_ACTION ) ;
public final void ruleReturns() throws RecognitionException {
GrammarAST ARG_ACTION14=null;
try {
// CollectSymbols.g:208:2: ( ^( RETURNS ARG_ACTION ) )
// CollectSymbols.g:208:4: ^( RETURNS ARG_ACTION )
// CollectSymbols.g:213:2: ( ^( RETURNS ARG_ACTION ) )
// CollectSymbols.g:213:4: ^( RETURNS ARG_ACTION )
{
match(input,RETURNS,FOLLOW_RETURNS_in_ruleReturns541); if (state.failed) return ;
match(input,RETURNS,FOLLOW_RETURNS_in_ruleReturns600); if (state.failed) return ;
match(input, Token.DOWN, null); if (state.failed) return ;
ARG_ACTION14=(GrammarAST)match(input,ARG_ACTION,FOLLOW_ARG_ACTION_in_ruleReturns543); if (state.failed) return ;
ARG_ACTION14=(GrammarAST)match(input,ARG_ACTION,FOLLOW_ARG_ACTION_in_ruleReturns602); if (state.failed) return ;
match(input, Token.UP, null); if (state.failed) return ;
if ( state.backtracking==1 ) {
@ -978,40 +1067,40 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter {
// $ANTLR start "ruleScopeSpec"
// CollectSymbols.g:215:1: ruleScopeSpec : {...}? ( ^( SCOPE ACTION ) | ^( SCOPE (ids+= ID )+ ) ) ;
// CollectSymbols.g:220:1: ruleScopeSpec : {...}? ( ^( SCOPE ACTION ) | ^( SCOPE (ids+= ID )+ ) ) ;
public final void ruleScopeSpec() throws RecognitionException {
GrammarAST ACTION15=null;
GrammarAST ids=null;
List list_ids=null;
try {
// CollectSymbols.g:216:2: ({...}? ( ^( SCOPE ACTION ) | ^( SCOPE (ids+= ID )+ ) ) )
// CollectSymbols.g:216:4: {...}? ( ^( SCOPE ACTION ) | ^( SCOPE (ids+= ID )+ ) )
// CollectSymbols.g:221:2: ({...}? ( ^( SCOPE ACTION ) | ^( SCOPE (ids+= ID )+ ) ) )
// CollectSymbols.g:221:4: {...}? ( ^( SCOPE ACTION ) | ^( SCOPE (ids+= ID )+ ) )
{
if ( !((inContext("RULE"))) ) {
if (state.backtracking>0) {state.failed=true; return ;}
throw new FailedPredicateException(input, "ruleScopeSpec", "inContext(\"RULE\")");
}
// CollectSymbols.g:217:3: ( ^( SCOPE ACTION ) | ^( SCOPE (ids+= ID )+ ) )
int alt7=2;
int LA7_0 = input.LA(1);
// CollectSymbols.g:222:3: ( ^( SCOPE ACTION ) | ^( SCOPE (ids+= ID )+ ) )
int alt10=2;
int LA10_0 = input.LA(1);
if ( (LA7_0==SCOPE) ) {
int LA7_1 = input.LA(2);
if ( (LA10_0==SCOPE) ) {
int LA10_1 = input.LA(2);
if ( (LA7_1==DOWN) ) {
int LA7_2 = input.LA(3);
if ( (LA10_1==DOWN) ) {
int LA10_2 = input.LA(3);
if ( (LA7_2==ACTION) ) {
alt7=1;
if ( (LA10_2==ACTION) ) {
alt10=1;
}
else if ( (LA7_2==ID) ) {
alt7=2;
else if ( (LA10_2==ID) ) {
alt10=2;
}
else {
if (state.backtracking>0) {state.failed=true; return ;}
NoViableAltException nvae =
new NoViableAltException("", 7, 2, input);
new NoViableAltException("", 10, 2, input);
throw nvae;
}
@ -1019,7 +1108,7 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter {
else {
if (state.backtracking>0) {state.failed=true; return ;}
NoViableAltException nvae =
new NoViableAltException("", 7, 1, input);
new NoViableAltException("", 10, 1, input);
throw nvae;
}
@ -1027,18 +1116,18 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter {
else {
if (state.backtracking>0) {state.failed=true; return ;}
NoViableAltException nvae =
new NoViableAltException("", 7, 0, input);
new NoViableAltException("", 10, 0, input);
throw nvae;
}
switch (alt7) {
switch (alt10) {
case 1 :
// CollectSymbols.g:217:5: ^( SCOPE ACTION )
// CollectSymbols.g:222:5: ^( SCOPE ACTION )
{
match(input,SCOPE,FOLLOW_SCOPE_in_ruleScopeSpec566); if (state.failed) return ;
match(input,SCOPE,FOLLOW_SCOPE_in_ruleScopeSpec625); if (state.failed) return ;
match(input, Token.DOWN, null); if (state.failed) return ;
ACTION15=(GrammarAST)match(input,ACTION,FOLLOW_ACTION_in_ruleScopeSpec568); if (state.failed) return ;
ACTION15=(GrammarAST)match(input,ACTION,FOLLOW_ACTION_in_ruleScopeSpec627); if (state.failed) return ;
match(input, Token.UP, null); if (state.failed) return ;
if ( state.backtracking==1 ) {
@ -1052,28 +1141,28 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter {
}
break;
case 2 :
// CollectSymbols.g:223:5: ^( SCOPE (ids+= ID )+ )
// CollectSymbols.g:228:5: ^( SCOPE (ids+= ID )+ )
{
match(input,SCOPE,FOLLOW_SCOPE_in_ruleScopeSpec581); if (state.failed) return ;
match(input,SCOPE,FOLLOW_SCOPE_in_ruleScopeSpec640); if (state.failed) return ;
match(input, Token.DOWN, null); if (state.failed) return ;
// CollectSymbols.g:223:16: (ids+= ID )+
int cnt6=0;
loop6:
// CollectSymbols.g:228:16: (ids+= ID )+
int cnt9=0;
loop9:
do {
int alt6=2;
int LA6_0 = input.LA(1);
int alt9=2;
int LA9_0 = input.LA(1);
if ( (LA6_0==ID) ) {
alt6=1;
if ( (LA9_0==ID) ) {
alt9=1;
}
switch (alt6) {
switch (alt9) {
case 1 :
// CollectSymbols.g:223:16: ids+= ID
// CollectSymbols.g:228:16: ids+= ID
{
ids=(GrammarAST)match(input,ID,FOLLOW_ID_in_ruleScopeSpec585); if (state.failed) return ;
ids=(GrammarAST)match(input,ID,FOLLOW_ID_in_ruleScopeSpec644); if (state.failed) return ;
if (list_ids==null) list_ids=new ArrayList();
list_ids.add(ids);
@ -1082,13 +1171,13 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter {
break;
default :
if ( cnt6 >= 1 ) break loop6;
if ( cnt9 >= 1 ) break loop9;
if (state.backtracking>0) {state.failed=true; return ;}
EarlyExitException eee =
new EarlyExitException(6, input);
new EarlyExitException(9, input);
throw eee;
}
cnt6++;
cnt9++;
} while (true);
@ -1120,14 +1209,14 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter {
};
// $ANTLR start "rewriteElement"
// CollectSymbols.g:227:1: rewriteElement : {...}? ( TOKEN_REF | RULE_REF | STRING_LITERAL | LABEL ) ;
// CollectSymbols.g:232:1: rewriteElement : {...}? ( TOKEN_REF | RULE_REF | STRING_LITERAL | LABEL ) ;
public final CollectSymbols.rewriteElement_return rewriteElement() throws RecognitionException {
CollectSymbols.rewriteElement_return retval = new CollectSymbols.rewriteElement_return();
retval.start = input.LT(1);
try {
// CollectSymbols.g:229:2: ({...}? ( TOKEN_REF | RULE_REF | STRING_LITERAL | LABEL ) )
// CollectSymbols.g:230:6: {...}? ( TOKEN_REF | RULE_REF | STRING_LITERAL | LABEL )
// CollectSymbols.g:234:2: ({...}? ( TOKEN_REF | RULE_REF | STRING_LITERAL | LABEL ) )
// CollectSymbols.g:235:6: {...}? ( TOKEN_REF | RULE_REF | STRING_LITERAL | LABEL )
{
if ( !((inContext("RESULT ..."))) ) {
if (state.backtracking>0) {state.failed=true; return retval;}
@ -1164,7 +1253,7 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter {
};
// $ANTLR start "labeledElement"
// CollectSymbols.g:234:1: labeledElement : {...}? ( ^( ASSIGN id= ID e= . ) | ^( PLUS_ASSIGN id= ID e= . ) ) ;
// CollectSymbols.g:239:1: labeledElement : {...}? ( ^( ASSIGN id= ID e= . ) | ^( PLUS_ASSIGN id= ID e= . ) ) ;
public final CollectSymbols.labeledElement_return labeledElement() throws RecognitionException {
CollectSymbols.labeledElement_return retval = new CollectSymbols.labeledElement_return();
retval.start = input.LT(1);
@ -1173,38 +1262,38 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter {
GrammarAST e=null;
try {
// CollectSymbols.g:240:2: ({...}? ( ^( ASSIGN id= ID e= . ) | ^( PLUS_ASSIGN id= ID e= . ) ) )
// CollectSymbols.g:240:4: {...}? ( ^( ASSIGN id= ID e= . ) | ^( PLUS_ASSIGN id= ID e= . ) )
// CollectSymbols.g:245:2: ({...}? ( ^( ASSIGN id= ID e= . ) | ^( PLUS_ASSIGN id= ID e= . ) ) )
// CollectSymbols.g:245:4: {...}? ( ^( ASSIGN id= ID e= . ) | ^( PLUS_ASSIGN id= ID e= . ) )
{
if ( !((inContext("RULE ..."))) ) {
if (state.backtracking>0) {state.failed=true; return retval;}
throw new FailedPredicateException(input, "labeledElement", "inContext(\"RULE ...\")");
}
// CollectSymbols.g:241:3: ( ^( ASSIGN id= ID e= . ) | ^( PLUS_ASSIGN id= ID e= . ) )
int alt8=2;
int LA8_0 = input.LA(1);
// CollectSymbols.g:246:3: ( ^( ASSIGN id= ID e= . ) | ^( PLUS_ASSIGN id= ID e= . ) )
int alt11=2;
int LA11_0 = input.LA(1);
if ( (LA8_0==ASSIGN) ) {
alt8=1;
if ( (LA11_0==ASSIGN) ) {
alt11=1;
}
else if ( (LA8_0==PLUS_ASSIGN) ) {
alt8=2;
else if ( (LA11_0==PLUS_ASSIGN) ) {
alt11=2;
}
else {
if (state.backtracking>0) {state.failed=true; return retval;}
NoViableAltException nvae =
new NoViableAltException("", 8, 0, input);
new NoViableAltException("", 11, 0, input);
throw nvae;
}
switch (alt8) {
switch (alt11) {
case 1 :
// CollectSymbols.g:241:5: ^( ASSIGN id= ID e= . )
// CollectSymbols.g:246:5: ^( ASSIGN id= ID e= . )
{
match(input,ASSIGN,FOLLOW_ASSIGN_in_labeledElement649); if (state.failed) return retval;
match(input,ASSIGN,FOLLOW_ASSIGN_in_labeledElement708); if (state.failed) return retval;
match(input, Token.DOWN, null); if (state.failed) return retval;
id=(GrammarAST)match(input,ID,FOLLOW_ID_in_labeledElement653); if (state.failed) return retval;
id=(GrammarAST)match(input,ID,FOLLOW_ID_in_labeledElement712); if (state.failed) return retval;
e=(GrammarAST)input.LT(1);
matchAny(input); if (state.failed) return retval;
@ -1213,12 +1302,12 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter {
}
break;
case 2 :
// CollectSymbols.g:242:5: ^( PLUS_ASSIGN id= ID e= . )
// CollectSymbols.g:247:5: ^( PLUS_ASSIGN id= ID e= . )
{
match(input,PLUS_ASSIGN,FOLLOW_PLUS_ASSIGN_in_labeledElement665); if (state.failed) return retval;
match(input,PLUS_ASSIGN,FOLLOW_PLUS_ASSIGN_in_labeledElement724); if (state.failed) return retval;
match(input, Token.DOWN, null); if (state.failed) return retval;
id=(GrammarAST)match(input,ID,FOLLOW_ID_in_labeledElement669); if (state.failed) return retval;
id=(GrammarAST)match(input,ID,FOLLOW_ID_in_labeledElement728); if (state.failed) return retval;
e=(GrammarAST)input.LT(1);
matchAny(input); if (state.failed) return retval;
@ -1254,7 +1343,7 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter {
};
// $ANTLR start "terminal"
// CollectSymbols.g:246:1: terminal : ({...}? STRING_LITERAL | TOKEN_REF );
// CollectSymbols.g:251:1: terminal : ({...}? STRING_LITERAL | TOKEN_REF );
public final CollectSymbols.terminal_return terminal() throws RecognitionException {
CollectSymbols.terminal_return retval = new CollectSymbols.terminal_return();
retval.start = input.LT(1);
@ -1263,32 +1352,32 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter {
GrammarAST TOKEN_REF17=null;
try {
// CollectSymbols.g:247:5: ({...}? STRING_LITERAL | TOKEN_REF )
int alt9=2;
int LA9_0 = input.LA(1);
// CollectSymbols.g:252:5: ({...}? STRING_LITERAL | TOKEN_REF )
int alt12=2;
int LA12_0 = input.LA(1);
if ( (LA9_0==STRING_LITERAL) ) {
alt9=1;
if ( (LA12_0==STRING_LITERAL) ) {
alt12=1;
}
else if ( (LA9_0==TOKEN_REF) ) {
alt9=2;
else if ( (LA12_0==TOKEN_REF) ) {
alt12=2;
}
else {
if (state.backtracking>0) {state.failed=true; return retval;}
NoViableAltException nvae =
new NoViableAltException("", 9, 0, input);
new NoViableAltException("", 12, 0, input);
throw nvae;
}
switch (alt9) {
switch (alt12) {
case 1 :
// CollectSymbols.g:247:7: {...}? STRING_LITERAL
// CollectSymbols.g:252:7: {...}? STRING_LITERAL
{
if ( !((!inContext("TOKENS ASSIGN"))) ) {
if (state.backtracking>0) {state.failed=true; return retval;}
throw new FailedPredicateException(input, "terminal", "!inContext(\"TOKENS ASSIGN\")");
}
STRING_LITERAL16=(GrammarAST)match(input,STRING_LITERAL,FOLLOW_STRING_LITERAL_in_terminal695); if (state.failed) return retval;
STRING_LITERAL16=(GrammarAST)match(input,STRING_LITERAL,FOLLOW_STRING_LITERAL_in_terminal754); if (state.failed) return retval;
if ( state.backtracking==1 ) {
terminals.add(((GrammarAST)retval.start));
@ -1302,9 +1391,9 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter {
}
break;
case 2 :
// CollectSymbols.g:255:7: TOKEN_REF
// CollectSymbols.g:260:7: TOKEN_REF
{
TOKEN_REF17=(GrammarAST)match(input,TOKEN_REF,FOLLOW_TOKEN_REF_in_terminal710); if (state.failed) return retval;
TOKEN_REF17=(GrammarAST)match(input,TOKEN_REF,FOLLOW_TOKEN_REF_in_terminal769); if (state.failed) return retval;
if ( state.backtracking==1 ) {
terminals.add(TOKEN_REF17);
@ -1332,31 +1421,31 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter {
// $ANTLR start "ruleref"
// CollectSymbols.g:265:1: ruleref : ({...}?r= RULE_REF | r= RULE_REF ) ;
// CollectSymbols.g:270:1: ruleref : ({...}?r= RULE_REF | r= RULE_REF ) ;
public final void ruleref() throws RecognitionException {
GrammarAST r=null;
try {
// CollectSymbols.g:267:5: ( ({...}?r= RULE_REF | r= RULE_REF ) )
// CollectSymbols.g:267:7: ({...}?r= RULE_REF | r= RULE_REF )
// CollectSymbols.g:272:5: ( ({...}?r= RULE_REF | r= RULE_REF ) )
// CollectSymbols.g:272:7: ({...}?r= RULE_REF | r= RULE_REF )
{
// CollectSymbols.g:267:7: ({...}?r= RULE_REF | r= RULE_REF )
int alt10=2;
int LA10_0 = input.LA(1);
// CollectSymbols.g:272:7: ({...}?r= RULE_REF | r= RULE_REF )
int alt13=2;
int LA13_0 = input.LA(1);
if ( (LA10_0==RULE_REF) ) {
int LA10_1 = input.LA(2);
if ( (LA13_0==RULE_REF) ) {
int LA13_1 = input.LA(2);
if ( ((inContext("DOT ..."))) ) {
alt10=1;
alt13=1;
}
else if ( (true) ) {
alt10=2;
alt13=2;
}
else {
if (state.backtracking>0) {state.failed=true; return ;}
NoViableAltException nvae =
new NoViableAltException("", 10, 1, input);
new NoViableAltException("", 13, 1, input);
throw nvae;
}
@ -1364,19 +1453,19 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter {
else {
if (state.backtracking>0) {state.failed=true; return ;}
NoViableAltException nvae =
new NoViableAltException("", 10, 0, input);
new NoViableAltException("", 13, 0, input);
throw nvae;
}
switch (alt10) {
switch (alt13) {
case 1 :
// CollectSymbols.g:267:9: {...}?r= RULE_REF
// CollectSymbols.g:272:9: {...}?r= RULE_REF
{
if ( !((inContext("DOT ..."))) ) {
if (state.backtracking>0) {state.failed=true; return ;}
throw new FailedPredicateException(input, "ruleref", "inContext(\"DOT ...\")");
}
r=(GrammarAST)match(input,RULE_REF,FOLLOW_RULE_REF_in_ruleref747); if (state.failed) return ;
r=(GrammarAST)match(input,RULE_REF,FOLLOW_RULE_REF_in_ruleref806); if (state.failed) return ;
if ( state.backtracking==1 ) {
qualifiedRulerefs.add((GrammarAST)r.getParent());
}
@ -1384,9 +1473,9 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter {
}
break;
case 2 :
// CollectSymbols.g:269:8: r= RULE_REF
// CollectSymbols.g:274:8: r= RULE_REF
{
r=(GrammarAST)match(input,RULE_REF,FOLLOW_RULE_REF_in_ruleref760); if (state.failed) return ;
r=(GrammarAST)match(input,RULE_REF,FOLLOW_RULE_REF_in_ruleref819); if (state.failed) return ;
}
break;
@ -1419,7 +1508,7 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter {
protected DFA1 dfa1 = new DFA1(this);
protected DFA5 dfa5 = new DFA5(this);
protected DFA4 dfa4 = new DFA4(this);
static final String DFA1_eotS =
"\41\uffff";
static final String DFA1_eofS =
@ -1435,7 +1524,7 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter {
"\uffff\1\11\1\13\1\uffff\1\10\2\uffff\1\1\1\uffff\1\2\3\uffff\1"+
"\7";
static final String DFA1_specialS =
"\10\uffff\1\4\1\3\1\0\23\uffff\1\1\1\2\1\uffff}>";
"\10\uffff\1\4\1\2\1\0\23\uffff\1\1\1\3\1\uffff}>";
static final String[] DFA1_transitionS = {
"\1\6\1\uffff\1\16\4\uffff\1\1\11\uffff\1\7\1\uffff\1\20\1\17"+
"\12\uffff\1\3\4\uffff\1\14\10\uffff\1\2\2\uffff\1\12\1\10\3"+
@ -1542,21 +1631,6 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter {
if ( s>=0 ) return s;
break;
case 2 :
int LA1_31 = input.LA(1);
int index1_31 = input.index();
input.rewind();
s = -1;
if ( ((inContext("TOKENS"))) ) {s = 4;}
else if ( ((inContext("RULE ..."))) ) {s = 12;}
input.seek(index1_31);
if ( s>=0 ) return s;
break;
case 3 :
int LA1_9 = input.LA(1);
@ -1571,6 +1645,21 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter {
input.seek(index1_9);
if ( s>=0 ) return s;
break;
case 3 :
int LA1_31 = input.LA(1);
int index1_31 = input.index();
input.rewind();
s = -1;
if ( ((inContext("TOKENS"))) ) {s = 4;}
else if ( ((inContext("RULE ..."))) ) {s = 12;}
input.seek(index1_31);
if ( s>=0 ) return s;
break;
case 4 :
int LA1_8 = input.LA(1);
@ -1594,59 +1683,74 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter {
throw nvae;
}
}
static final String DFA5_eotS =
"\7\uffff";
static final String DFA5_eofS =
"\7\uffff";
static final String DFA5_minS =
"\1\4\1\2\1\uffff\1\4\1\2\1\3\1\uffff";
static final String DFA5_maxS =
"\1\145\1\3\1\uffff\3\145\1\uffff";
static final String DFA5_acceptS =
"\2\uffff\1\2\3\uffff\1\1";
static final String DFA5_specialS =
"\7\uffff}>";
static final String[] DFA5_transitionS = {
"\106\2\1\1\33\2",
"\1\3\1\2",
static final String DFA4_eotS =
"\26\uffff";
static final String DFA4_eofS =
"\26\uffff";
static final String DFA4_minS =
"\1\4\2\2\1\uffff\2\4\2\2\1\uffff\1\3\3\2\3\4\2\2\2\3\1\uffff\1\3";
static final String DFA4_maxS =
"\3\145\1\uffff\4\145\1\uffff\13\145\1\uffff\1\145";
static final String DFA4_acceptS =
"\3\uffff\1\1\4\uffff\1\2\13\uffff\1\2\1\uffff";
static final String DFA4_specialS =
"\26\uffff}>";
static final String[] DFA4_transitionS = {
"\106\3\1\1\1\3\1\2\31\3",
"\1\4\1\uffff\142\3",
"\1\5\1\uffff\142\3",
"",
"\142\4",
"\1\6\1\5\142\4",
"\1\2\142\6",
""
"\142\6",
"\142\7",
"\2\10\142\6",
"\1\10\1\11\142\7",
"",
"\1\10\106\14\1\12\1\14\1\13\31\14",
"\1\15\1\10\106\14\1\12\1\14\1\13\31\14",
"\1\16\1\10\106\14\1\12\1\14\1\13\31\14",
"\1\17\1\10\106\14\1\12\1\14\1\13\31\14",
"\142\20",
"\142\21",
"\142\22",
"\1\3\1\23\142\20",
"\1\3\1\24\142\21",
"\1\25\142\22",
"\1\24\106\14\1\12\1\14\1\13\31\14",
"",
"\1\24\106\14\1\12\1\14\1\13\31\14"
};
static final short[] DFA5_eot = DFA.unpackEncodedString(DFA5_eotS);
static final short[] DFA5_eof = DFA.unpackEncodedString(DFA5_eofS);
static final char[] DFA5_min = DFA.unpackEncodedStringToUnsignedChars(DFA5_minS);
static final char[] DFA5_max = DFA.unpackEncodedStringToUnsignedChars(DFA5_maxS);
static final short[] DFA5_accept = DFA.unpackEncodedString(DFA5_acceptS);
static final short[] DFA5_special = DFA.unpackEncodedString(DFA5_specialS);
static final short[][] DFA5_transition;
static final short[] DFA4_eot = DFA.unpackEncodedString(DFA4_eotS);
static final short[] DFA4_eof = DFA.unpackEncodedString(DFA4_eofS);
static final char[] DFA4_min = DFA.unpackEncodedStringToUnsignedChars(DFA4_minS);
static final char[] DFA4_max = DFA.unpackEncodedStringToUnsignedChars(DFA4_maxS);
static final short[] DFA4_accept = DFA.unpackEncodedString(DFA4_acceptS);
static final short[] DFA4_special = DFA.unpackEncodedString(DFA4_specialS);
static final short[][] DFA4_transition;
static {
int numStates = DFA5_transitionS.length;
DFA5_transition = new short[numStates][];
int numStates = DFA4_transitionS.length;
DFA4_transition = new short[numStates][];
for (int i=0; i<numStates; i++) {
DFA5_transition[i] = DFA.unpackEncodedString(DFA5_transitionS[i]);
DFA4_transition[i] = DFA.unpackEncodedString(DFA4_transitionS[i]);
}
}
class DFA5 extends DFA {
class DFA4 extends DFA {
public DFA5(BaseRecognizer recognizer) {
public DFA4(BaseRecognizer recognizer) {
this.recognizer = recognizer;
this.decisionNumber = 5;
this.eot = DFA5_eot;
this.eof = DFA5_eof;
this.min = DFA5_min;
this.max = DFA5_max;
this.accept = DFA5_accept;
this.special = DFA5_special;
this.transition = DFA5_transition;
this.decisionNumber = 4;
this.eot = DFA4_eot;
this.eof = DFA4_eof;
this.min = DFA4_min;
this.max = DFA4_max;
this.accept = DFA4_accept;
this.special = DFA4_special;
this.transition = DFA4_transition;
}
public String getDescription() {
return "145:22: ( ^( RULEMODIFIERS (m= . )+ ) )?";
return "()* loopback of 146:17: ( options {greedy=false; } : . )*";
}
}
@ -1680,34 +1784,35 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter {
public static final BitSet FOLLOW_STRING_LITERAL_in_tokensSection314 = new BitSet(new long[]{0x0000000000000008L});
public static final BitSet FOLLOW_ID_in_tokensSection328 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_RULE_in_rule357 = new BitSet(new long[]{0x0000000000000004L});
public static final BitSet FOLLOW_ID_in_rule361 = new BitSet(new long[]{0xFFFFFFFFFFFFFFF0L,0x0000003FFFFFFFFFL});
public static final BitSet FOLLOW_RULEMODIFIERS_in_rule365 = new BitSet(new long[]{0x0000000000000004L});
public static final BitSet FOLLOW_set_in_setAlt401 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_RULE_in_finishRule425 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_AT_in_ruleNamedAction441 = new BitSet(new long[]{0x0000000000000004L});
public static final BitSet FOLLOW_ID_in_ruleNamedAction443 = new BitSet(new long[]{0x0000000000010000L});
public static final BitSet FOLLOW_ACTION_in_ruleNamedAction445 = new BitSet(new long[]{0x0000000000000008L});
public static final BitSet FOLLOW_ACTION_in_ruleAction465 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_CATCH_in_exceptionHandler481 = new BitSet(new long[]{0x0000000000000004L});
public static final BitSet FOLLOW_ARG_ACTION_in_exceptionHandler483 = new BitSet(new long[]{0x0000000000010000L});
public static final BitSet FOLLOW_ACTION_in_exceptionHandler485 = new BitSet(new long[]{0x0000000000000008L});
public static final BitSet FOLLOW_FINALLY_in_finallyClause502 = new BitSet(new long[]{0x0000000000000004L});
public static final BitSet FOLLOW_ACTION_in_finallyClause504 = new BitSet(new long[]{0x0000000000000008L});
public static final BitSet FOLLOW_ARG_ACTION_in_ruleArg524 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_RETURNS_in_ruleReturns541 = new BitSet(new long[]{0x0000000000000004L});
public static final BitSet FOLLOW_ARG_ACTION_in_ruleReturns543 = new BitSet(new long[]{0x0000000000000008L});
public static final BitSet FOLLOW_SCOPE_in_ruleScopeSpec566 = new BitSet(new long[]{0x0000000000000004L});
public static final BitSet FOLLOW_ACTION_in_ruleScopeSpec568 = new BitSet(new long[]{0x0000000000000008L});
public static final BitSet FOLLOW_SCOPE_in_ruleScopeSpec581 = new BitSet(new long[]{0x0000000000000004L});
public static final BitSet FOLLOW_ID_in_ruleScopeSpec585 = new BitSet(new long[]{0x0000000000000008L,0x0000000000400000L});
public static final BitSet FOLLOW_set_in_rewriteElement613 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_ASSIGN_in_labeledElement649 = new BitSet(new long[]{0x0000000000000004L});
public static final BitSet FOLLOW_ID_in_labeledElement653 = new BitSet(new long[]{0xFFFFFFFFFFFFFFF0L,0x0000003FFFFFFFFFL});
public static final BitSet FOLLOW_PLUS_ASSIGN_in_labeledElement665 = new BitSet(new long[]{0x0000000000000004L});
public static final BitSet FOLLOW_ID_in_labeledElement669 = new BitSet(new long[]{0xFFFFFFFFFFFFFFF0L,0x0000003FFFFFFFFFL});
public static final BitSet FOLLOW_STRING_LITERAL_in_terminal695 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_TOKEN_REF_in_terminal710 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_RULE_REF_in_ruleref747 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_RULE_REF_in_ruleref760 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_ID_in_rule369 = new BitSet(new long[]{0xFFFFFFFFFFFFFFF0L,0x0000003FFFFFFFFFL});
public static final BitSet FOLLOW_RULEMODIFIERS_in_rule393 = new BitSet(new long[]{0x0000000000000004L});
public static final BitSet FOLLOW_BLOCK_in_rule416 = new BitSet(new long[]{0x0000000000000004L});
public static final BitSet FOLLOW_set_in_setAlt460 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_RULE_in_finishRule484 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_AT_in_ruleNamedAction500 = new BitSet(new long[]{0x0000000000000004L});
public static final BitSet FOLLOW_ID_in_ruleNamedAction502 = new BitSet(new long[]{0x0000000000010000L});
public static final BitSet FOLLOW_ACTION_in_ruleNamedAction504 = new BitSet(new long[]{0x0000000000000008L});
public static final BitSet FOLLOW_ACTION_in_ruleAction524 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_CATCH_in_exceptionHandler540 = new BitSet(new long[]{0x0000000000000004L});
public static final BitSet FOLLOW_ARG_ACTION_in_exceptionHandler542 = new BitSet(new long[]{0x0000000000010000L});
public static final BitSet FOLLOW_ACTION_in_exceptionHandler544 = new BitSet(new long[]{0x0000000000000008L});
public static final BitSet FOLLOW_FINALLY_in_finallyClause561 = new BitSet(new long[]{0x0000000000000004L});
public static final BitSet FOLLOW_ACTION_in_finallyClause563 = new BitSet(new long[]{0x0000000000000008L});
public static final BitSet FOLLOW_ARG_ACTION_in_ruleArg583 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_RETURNS_in_ruleReturns600 = new BitSet(new long[]{0x0000000000000004L});
public static final BitSet FOLLOW_ARG_ACTION_in_ruleReturns602 = new BitSet(new long[]{0x0000000000000008L});
public static final BitSet FOLLOW_SCOPE_in_ruleScopeSpec625 = new BitSet(new long[]{0x0000000000000004L});
public static final BitSet FOLLOW_ACTION_in_ruleScopeSpec627 = new BitSet(new long[]{0x0000000000000008L});
public static final BitSet FOLLOW_SCOPE_in_ruleScopeSpec640 = new BitSet(new long[]{0x0000000000000004L});
public static final BitSet FOLLOW_ID_in_ruleScopeSpec644 = new BitSet(new long[]{0x0000000000000008L,0x0000000000400000L});
public static final BitSet FOLLOW_set_in_rewriteElement672 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_ASSIGN_in_labeledElement708 = new BitSet(new long[]{0x0000000000000004L});
public static final BitSet FOLLOW_ID_in_labeledElement712 = new BitSet(new long[]{0xFFFFFFFFFFFFFFF0L,0x0000003FFFFFFFFFL});
public static final BitSet FOLLOW_PLUS_ASSIGN_in_labeledElement724 = new BitSet(new long[]{0x0000000000000004L});
public static final BitSet FOLLOW_ID_in_labeledElement728 = new BitSet(new long[]{0xFFFFFFFFFFFFFFF0L,0x0000003FFFFFFFFFL});
public static final BitSet FOLLOW_STRING_LITERAL_in_terminal754 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_TOKEN_REF_in_terminal769 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_RULE_REF_in_ruleref806 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_RULE_REF_in_ruleref819 = new BitSet(new long[]{0x0000000000000002L});
}

View File

@ -197,7 +197,8 @@ public class DOTGenerator {
work.remove(0);
}
return dot.render();
String output = dot.render();
return Utils.sortLinesInString(output);
}
/** Do a depth-first walk of the state machine graph and

View File

@ -6,8 +6,6 @@ import org.antlr.runtime.RecognitionException;
import org.antlr.runtime.Token;
import org.antlr.v4.automata.DFA;
import org.antlr.v4.automata.DFAState;
import org.antlr.v4.automata.NFAState;
import org.antlr.v4.misc.IntSet;
import org.antlr.v4.parse.v4ParserException;
import org.stringtemplate.v4.ST;
import org.stringtemplate.v4.STErrorListener;
@ -354,20 +352,6 @@ public class ErrorManager {
state.get().listener.error(msg);
}
public static void recursionOverflow(String fileName,
DFAState d, NFAState s, int altNum, int depth) {
state.get().errors++;
Message msg = new RecursionOverflowMessage(fileName, d, s, altNum, depth);
state.get().listener.error(msg);
}
public static void multipleRecursiveAlts(String fileName,
DFAState d, IntSet recursiveAltSet) {
state.get().errors++;
Message msg = new MultipleRecursiveAltsMessage(fileName, d, recursiveAltSet);
state.get().listener.error(msg);
}
public static void analysisTimeout() {
state.get().errors++;
Message msg = new AnalysisTimeoutMessage();

View File

@ -134,11 +134,11 @@ public enum ErrorType {
AMBIGUITY(ErrorSeverity.ERROR, true, true),
UNREACHABLE_ALTS(ErrorSeverity.ERROR, true, true),
MULTIPLE_RECURSIVE_ALTS(ErrorSeverity.ERROR, true, true),
//MULTIPLE_RECURSIVE_ALTS(ErrorSeverity.ERROR, true, true),
INSUFFICIENT_PREDICATES(ErrorSeverity.ERROR, true, true),
// these next 3 can happen in recursion-limited LL(*)
RECURSION_OVERFLOW(ErrorSeverity.ERROR, true, true),
//RECURSION_OVERFLOW(ErrorSeverity.ERROR, true, true),
LEFT_RECURSION_CYCLES(ErrorSeverity.ERROR, true, true),
ANALYSIS_TIMEOUT(ErrorSeverity.ERROR, true, true),

View File

@ -7,11 +7,12 @@ import org.antlr.v4.automata.DFA;
import org.antlr.v4.automata.Label;
import org.antlr.v4.automata.NFA;
import org.antlr.v4.codegen.Target;
import org.antlr.v4.misc.IntSet;
import org.antlr.v4.misc.IntervalSet;
import org.antlr.v4.misc.Utils;
import org.antlr.v4.parse.ANTLRLexer;
import org.antlr.v4.parse.ANTLRParser;
import org.antlr.v4.parse.GrammarASTAdaptor;
import org.antlr.v4.semantics.SemanticPipeline;
import java.util.*;
@ -131,17 +132,6 @@ public class Grammar implements AttributeResolver {
this.name = ((GrammarAST)ast.getChild(0)).getText();
}
initTokenSymbolTables();
if ( this.ast==null || this.ast.hasErrors ) return;
Tool antlr = new Tool();
SemanticPipeline sem = new SemanticPipeline(this);
sem.process();
if ( getImportedGrammars()!=null ) { // process imported grammars (if any)
for (Grammar imp : getImportedGrammars()) {
antlr.process(imp);
}
}
}
protected void initTokenSymbolTables() {
@ -383,6 +373,39 @@ public class Grammar implements AttributeResolver {
return tokenName;
}
/** What is the max char value possible for this grammar's target? Use
* unicode max if no target defined.
*/
public int getMaxCharValue() {
return Label.MAX_CHAR_VALUE;
// if ( generator!=null ) {
// return generator.target.getMaxCharValue(generator);
// }
// else {
// return Label.MAX_CHAR_VALUE;
// }
}
/** Return a set of all possible token or char types for this grammar */
public IntSet getTokenTypes() {
if ( getType()==ANTLRParser.LEXER ) {
return getAllCharValues();
}
return IntervalSet.of(Token.MIN_TOKEN_TYPE, getMaxTokenType());
}
/** Return min to max char as defined by the target.
* If no target, use max unicode char value.
*/
public IntSet getAllCharValues() {
return IntervalSet.of(Label.MIN_CHAR_VALUE, getMaxCharValue());
}
/** How many token types have been allocated so far? */
public int getMaxTokenType() {
return maxTokenType;
}
/** Return a new unique integer in the token type space */
public int getNewTokenType() {
maxTokenType++;

View File

@ -18,11 +18,13 @@ public class GrammarAST extends CommonTree {
public GrammarAST(int type) { super(new CommonToken(type, ANTLRParser.tokenNames[type])); }
public GrammarAST(int type, Token t) {
this(new CommonToken(type, t.getText()));
token.setInputStream(t.getInputStream());
token.setLine(t.getLine());
token.setCharPositionInLine(t.getCharPositionInLine());
}
public GrammarAST(int type, Token t, String text) {
this(new CommonToken(type, text));
token.setInputStream(t.getInputStream());
token.setLine(t.getLine());
token.setCharPositionInLine(t.getCharPositionInLine());
}

View File

@ -1,31 +0,0 @@
package org.antlr.v4.tool;
import org.antlr.v4.automata.DFA;
import org.antlr.v4.automata.DFAState;
import org.antlr.v4.misc.IntSet;
import java.util.HashMap;
import java.util.Map;
public class MultipleRecursiveAltsMessage extends Message {
public DFAState d;
public DFA dfa;
public IntSet recursiveAltSet;
public MultipleRecursiveAltsMessage(String fileName, DFAState d, IntSet recursiveAltSet) {
super(ErrorType.MULTIPLE_RECURSIVE_ALTS);
this.d = d;
this.dfa = d.dfa;
this.recursiveAltSet = recursiveAltSet;
this.line = dfa.decisionNFAStartState.ast.getLine();
this.charPosition = dfa.decisionNFAStartState.ast.getCharPositionInLine();
this.fileName = fileName;
Map<String, Object> info = new HashMap<String, Object>();
info.put("dfa", dfa);
info.put("ruleName", dfa.decisionNFAStartState.rule.name);
info.put("alts", recursiveAltSet);
args = new Object[] {info}; // pass this whole object in to message
}
}

View File

@ -1,46 +0,0 @@
package org.antlr.v4.tool;
import org.antlr.v4.analysis.MachineProbe;
import org.antlr.v4.automata.DFA;
import org.antlr.v4.automata.DFAState;
import org.antlr.v4.automata.NFAState;
import org.antlr.v4.misc.IntSet;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class RecursionOverflowMessage extends Message {
DFA dfa;
DFAState d;
NFAState s;
int altNum;
int depth;
public RecursionOverflowMessage(String fileName, DFAState d, NFAState s, int altNum, int depth) {
super(ErrorType.RECURSION_OVERFLOW);
this.d = d;
this.dfa = d.dfa;
this.s = s;
this.altNum = altNum;
this.depth = depth;
this.line = dfa.decisionNFAStartState.ast.getLine();
this.charPosition = dfa.decisionNFAStartState.ast.getCharPositionInLine();
this.fileName = fileName;
MachineProbe probe = new MachineProbe(dfa);
List<IntSet> labels = probe.getEdgeLabels(d);
String input = probe.getInputSequenceDisplay(dfa.g, labels);
Map<String, Object> info = new HashMap<String, Object>();
info.put("dfa", dfa);
info.put("dfaState", d);
info.put("alt", altNum);
info.put("depth", depth);
info.put("input", input);
info.put("nfaState", s);
info.put("sourceRule", s.rule);
info.put("targetRule", s.transition(0).target.rule);
args = new Object[] {info}; // pass this whole object in to message
}
}

View File

@ -33,6 +33,11 @@ import org.antlr.runtime.RecognitionException;
import org.antlr.runtime.Token;
import org.antlr.runtime.TokenSource;
import org.antlr.v4.Tool;
import org.antlr.v4.automata.LexerNFAFactory;
import org.antlr.v4.automata.NFA;
import org.antlr.v4.automata.ParserNFAFactory;
import org.antlr.v4.parse.ANTLRParser;
import org.antlr.v4.semantics.SemanticPipeline;
import org.antlr.v4.tool.ANTLRErrorListener;
import org.antlr.v4.tool.ErrorManager;
import org.antlr.v4.tool.Grammar;
@ -99,6 +104,24 @@ public abstract class BaseTest {
return tool;
}
NFA createNFA(Grammar g) {
if ( g.ast!=null && !g.ast.hasErrors ) {
System.out.println(g.ast.toStringTree());
Tool antlr = new Tool();
SemanticPipeline sem = new SemanticPipeline(g);
sem.process();
if ( g.getImportedGrammars()!=null ) { // process imported grammars (if any)
for (Grammar imp : g.getImportedGrammars()) {
antlr.process(imp);
}
}
}
ParserNFAFactory f = new ParserNFAFactory(g);
if ( g.getType()== ANTLRParser.LEXER ) f = new LexerNFAFactory(g);
return f.createNFA();
}
protected boolean compile(String fileName) {
String compiler = "javac";
String classpathOption = "-classpath";
@ -427,11 +450,24 @@ public abstract class BaseTest {
String[] lines = input.split("\n");
String fileName = getFilenameFromFirstLineOfGrammar(lines[0]);
Grammar g = new Grammar(fileName, input);
g.loadImportedGrammars();
if ( printTree ) {
if ( g.ast!=null ) System.out.println(g.ast.toStringTree());
else System.out.println("null tree");
}
if ( g.ast!=null && !g.ast.hasErrors ) {
Tool antlr = new Tool();
SemanticPipeline sem = new SemanticPipeline(g);
sem.process();
if ( g.getImportedGrammars()!=null ) { // process imported grammars (if any)
for (Grammar imp : g.getImportedGrammars()) {
antlr.process(imp);
}
}
}
//g.loadImportedGrammars();
}
catch (RecognitionException re) {
re.printStackTrace(System.err);
@ -779,18 +815,6 @@ public abstract class BaseTest {
return lines[0].substring(prefix.length(),lines[0].length());
}
public String sortLinesInString(String s) {
String lines[] = s.split("\n");
Arrays.sort(lines);
List<String> linesL = Arrays.asList(lines);
StringBuffer buf = new StringBuffer();
for (String l : linesL) {
buf.append(l);
buf.append('\n');
}
return buf.toString();
}
/**
* When looking at a result set that consists of a Map/HashTable
* we cannot rely on the output order, as the hashing algorithm or other aspects

View File

@ -0,0 +1,20 @@
package org.antlr.v4.test;
import org.antlr.v4.tool.Grammar;
import org.junit.Test;
public class TestDFAConstruction extends BaseTest {
@Test
public void testA() throws Exception {
Grammar g = new Grammar(
"parser grammar P;\n"+
"a : A;");
String expecting =
"RuleStart_a_0->s2\n" +
"s2-A->s3\n" +
"s3->RuleStop_a_1\n" +
"RuleStop_a_1-EOF->s4\n";
//checkRule(g, "a", expecting);
}
}

View File

@ -0,0 +1,67 @@
package org.antlr.v4.test;
import org.antlr.v4.analysis.LinearApproximator;
import org.antlr.v4.automata.DFA;
import org.antlr.v4.automata.DecisionState;
import org.antlr.v4.automata.NFA;
import org.antlr.v4.automata.NFAState;
import org.antlr.v4.tool.Grammar;
import org.junit.Test;
public class TestLinearApproximateLookahead extends BaseTest {
@Test
public void testLL1Block() throws Exception {
String g =
"parser grammar P;\n"+
"a : A | B ;";
String expecting =
"s0-A->:s1=>1\n" +
"s0-B->:s2=>2\n";
checkRule(g, "a", expecting);
}
@Test
public void testLL2Block() throws Exception {
String g =
"parser grammar P;\n"+
"a : A B | A C ;";
String expecting =
"s0-A->s1\n" +
"s0-A->s3\n" +
"s1-B->:s2=>1\n" +
"s3-C->:s4=>2\n";
checkRule(g, "a", expecting);
}
@Test
public void testNonDetLL1Block() throws Exception {
String g =
"parser grammar P;\n"+
"a : A | B | A ;";
String expecting = null;
checkRule(g, "a", expecting);
}
@Test
public void testNonDetLL2Block() throws Exception {
String g =
"parser grammar P;\n"+
"a : A B | A B | C ;";
String expecting = null;
checkRule(g, "a", expecting);
}
void checkRule(String gtext, String ruleName, String expecting)
throws Exception
{
Grammar g = new Grammar(gtext);
NFA nfa = createNFA(g);
NFAState s = nfa.ruleToStartState.get(g.getRule(ruleName));
DecisionState blk = (DecisionState)s.transition(0).target;
LinearApproximator lin = new LinearApproximator(g, blk.decision);
DFA dfa = lin.createDFA(blk);
String result = null;
if ( dfa!=null ) result = dfa.toString();
assertEquals(expecting, result);
}
}

View File

@ -1,7 +1,9 @@
package org.antlr.v4.test;
import org.antlr.v4.Tool;
import org.antlr.v4.automata.*;
import org.antlr.v4.parse.ANTLRParser;
import org.antlr.v4.semantics.SemanticPipeline;
import org.antlr.v4.tool.Grammar;
import org.antlr.v4.tool.Rule;
import org.junit.Test;
@ -264,13 +266,7 @@ public class TestNFAConstruction extends BaseTest {
"lexer grammar t;\n"+
"A : (options {greedy=false;}:'0'..'9')+ ;\n"); // TODO: check after doing greedy
String expecting =
"RuleStart_A_0->PlusBlockStart_4\n" +
"PlusBlockStart_4->s2\n" +
"s2-'0'..'9'->s3\n" +
"s3->LoopBack_5\n" +
"LoopBack_5->BlockEnd_6\n" +
"LoopBack_5->s2\n" +
"BlockEnd_6->RuleStop_A_1\n";
"\n";
checkRule(g, "A", expecting);
}
@ -366,7 +362,19 @@ public class TestNFAConstruction extends BaseTest {
"parser grammar P;\n"+
"a : {p1}? A | {p2}? B ;");
String expecting =
"\n";
"RuleStart_a_0->BlockStart_10\n" +
"BlockStart_10->s2\n" +
"BlockStart_10->s6\n" +
"s2-{p1}?->s3\n" +
"s6-{p2}?->s7\n" +
"s3->s4\n" +
"s7->s8\n" +
"s4-A->s5\n" +
"s8-B->s9\n" +
"s5->BlockEnd_11\n" +
"s9->BlockEnd_11\n" +
"BlockEnd_11->RuleStop_a_1\n" +
"RuleStop_a_1-EOF->s12\n";
checkRule(g, "a", expecting);
}
@ -414,10 +422,6 @@ public class TestNFAConstruction extends BaseTest {
"\n";
checkRule(g, "a", expecting);
String expectingGrammarStr =
"1:8: parser grammar P;\n" +
"a : ~ A ;";
assertEquals(expectingGrammarStr, g.toString());
}
@Test public void testNotSingletonBlockSet() throws Exception {
@ -428,11 +432,6 @@ public class TestNFAConstruction extends BaseTest {
String expecting =
"\n";
checkRule(g, "a", expecting);
String expectingGrammarStr =
"1:8: parser grammar P;\n" +
"a : ~ ( A ) ;";
assertEquals(expectingGrammarStr, g.toString());
}
@Test public void testNotCharSet() throws Exception {
@ -440,14 +439,10 @@ public class TestNFAConstruction extends BaseTest {
"lexer grammar P;\n"+
"A : ~'3' ;\n");
String expecting =
"\n";
"RuleStart_A_1->s5\n" +
"s5-{'\\u0000'..'2', '4'..'\\uFFFE'}->s6\n" +
"s6->RuleStop_A_2\n";
checkRule(g, "A", expecting);
String expectingGrammarStr =
"1:7: lexer grammar P;\n" +
"A : ~ '3' ;\n"+
"Tokens : A ;";
assertEquals(expectingGrammarStr, g.toString());
}
@Test public void testNotBlockSet() throws Exception {
@ -457,12 +452,6 @@ public class TestNFAConstruction extends BaseTest {
String expecting =
"\n";
checkRule(g, "A", expecting);
String expectingGrammarStr =
"1:7: lexer grammar P;\n" +
"A : ~ ( '3' | 'b' ) ;\n" +
"Tokens : A ;";
assertEquals(expectingGrammarStr, g.toString());
}
@Test public void testNotSetLoop() throws Exception {
@ -472,12 +461,6 @@ public class TestNFAConstruction extends BaseTest {
String expecting =
"\n";
checkRule(g, "A", expecting);
String expectingGrammarStr =
"1:7: lexer grammar P;\n" +
"A : (~ ( '3' ) )* ;\n" +
"Tokens : A ;";
assertEquals(expectingGrammarStr, g.toString());
}
@Test public void testNotBlockSetLoop() throws Exception {
@ -487,28 +470,8 @@ public class TestNFAConstruction extends BaseTest {
String expecting =
"\n";
checkRule(g, "A", expecting);
String expectingGrammarStr =
"1:7: lexer grammar P;\n" +
"A : (~ ( '3' | 'b' ) )* ;\n" +
"Tokens : A ;";
assertEquals(expectingGrammarStr, g.toString());
}
// @Test public void testSetsInCombinedGrammarSentToLexer() throws Exception {
// // not sure this belongs in this test suite, but whatever.
// Grammar g = new Grammar(
// "grammar t;\n"+
// "A : '{' ~('}')* '}';\n");
// String result = g.getLexerGrammar();
// String expecting =
// "lexer grammar t;\n" +
// "\n" +
// "// $ANTLR src \"<string>\" 2\n"+
// "A : '{' ~('}')* '}';\n";
// assertEquals(result, expecting);
// }
@Test public void testLabeledNotSet() throws Exception {
Grammar g = new Grammar(
"parser grammar P;\n"+
@ -521,11 +484,6 @@ public class TestNFAConstruction extends BaseTest {
".s3->:s4\n" +
":s4-EOF->.s5\n";
checkRule(g, "a", expecting);
String expectingGrammarStr =
"1:8: parser grammar P;\n" +
"a : t=~ A ;";
assertEquals(expectingGrammarStr, g.toString());
}
@Test public void testLabeledNotCharSet() throws Exception {
@ -539,12 +497,6 @@ public class TestNFAConstruction extends BaseTest {
".s3->:s4\n" +
":s4-<EOT>->.s5\n";
checkRule(g, "A", expecting);
String expectingGrammarStr =
"1:7: lexer grammar P;\n" +
"A : t=~ '3' ;\n"+
"Tokens : A ;";
assertEquals(expectingGrammarStr, g.toString());
}
@Test public void testLabeledNotBlockSet() throws Exception {
@ -558,12 +510,6 @@ public class TestNFAConstruction extends BaseTest {
".s3->:s4\n" +
":s4-<EOT>->.s5\n";
checkRule(g, "A", expecting);
String expectingGrammarStr =
"1:7: lexer grammar P;\n" +
"A : t=~ ( '3' | 'b' ) ;\n" +
"Tokens : A ;";
assertEquals(expectingGrammarStr, g.toString());
}
@Test public void testEscapedCharLiteral() throws Exception {
@ -911,6 +857,18 @@ public class TestNFAConstruction extends BaseTest {
}
void checkRule(Grammar g, String ruleName, String expecting) {
if ( g.ast!=null && !g.ast.hasErrors ) {
System.out.println(g.ast.toStringTree());
Tool antlr = new Tool();
SemanticPipeline sem = new SemanticPipeline(g);
sem.process();
if ( g.getImportedGrammars()!=null ) { // process imported grammars (if any)
for (Grammar imp : g.getImportedGrammars()) {
antlr.process(imp);
}
}
}
ParserNFAFactory f = new ParserNFAFactory(g);
if ( g.getType()== ANTLRParser.LEXER ) f = new LexerNFAFactory(g);
NFA nfa = f.createNFA();