From 55899d032cd5bcbeee76c57826e98d8569a19ab4 Mon Sep 17 00:00:00 2001 From: parrt Date: Tue, 20 Apr 2010 15:29:43 -0800 Subject: [PATCH] got modes sort of working [git-p4: depot-paths = "//depot/code/antlr4/main/": change = 6818] --- tool/src/org/antlr/v4/Tool.java | 12 +- .../antlr/v4/analysis/AnalysisPipeline.java | 24 +- .../v4/analysis/LexerNFAToDFAConverter.java | 11 +- .../antlr/v4/automata/LexerNFAFactory.java | 34 +- tool/src/org/antlr/v4/automata/NFA.java | 2 + .../org/antlr/v4/automata/NFASerializer.java | 3 + .../antlr/v4/automata/ParserNFAFactory.java | 9 +- tool/src/org/antlr/v4/misc/IntervalSet.java | 3 +- .../v4/semantics/BasicSemanticChecks.java | 18 +- .../org/antlr/v4/semantics/CollectSymbols.g | 9 +- .../antlr/v4/semantics/CollectSymbols.java | 1026 +++++++++-------- tool/src/org/antlr/v4/tool/Grammar.java | 20 +- .../org/antlr/v4/tool/LabelElementPair.java | 4 +- tool/src/org/antlr/v4/tool/LexerGrammar.java | 35 + tool/src/org/antlr/v4/tool/Rule.java | 9 +- tool/test/org/antlr/v4/test/BaseTest.java | 20 +- .../v4/test/TestLexerDFAConstruction.java | 25 +- .../antlr/v4/test/TestNFAConstruction.java | 96 +- 18 files changed, 787 insertions(+), 573 deletions(-) create mode 100644 tool/src/org/antlr/v4/tool/LexerGrammar.java diff --git a/tool/src/org/antlr/v4/Tool.java b/tool/src/org/antlr/v4/Tool.java index a549cdc9f..42536dda4 100644 --- a/tool/src/org/antlr/v4/Tool.java +++ b/tool/src/org/antlr/v4/Tool.java @@ -355,13 +355,13 @@ public class Tool { if ( ((GrammarRootAST)t).hasErrors ) return; GrammarRootAST ast = (GrammarRootAST)t; - Grammar g = new Grammar(this, ast); + Grammar g = createGrammar(ast); g.fileName = grammarFileNames.get(0); process(g); if ( ast!=null && ast.grammarType==ANTLRParser.COMBINED && !ast.hasErrors ) { lexerAST = extractImplicitLexer(g); // alters ast if ( lexerAST!=null ) { - Grammar lexerg = new Grammar(this, lexerAST); + LexerGrammar lexerg = new LexerGrammar(this, lexerAST); lexerg.fileName = grammarFileNames.get(0); g.implicitLexer = lexerg; lexerg.implicitLexerOwner = g; @@ -370,6 +370,11 @@ public class Tool { } } + public Grammar createGrammar(GrammarRootAST ast) { + if ( ast.grammarType==ANTLRParser.LEXER ) return new LexerGrammar(this, ast); + else return new Grammar(this, ast); + } + public void process(Grammar g) { grammars.put(g.name, g); g.loadImportedGrammars(); @@ -390,12 +395,11 @@ public class Tool { // BUILD NFA FROM AST NFAFactory factory = new ParserNFAFactory(g); - if ( g.getType()==ANTLRParser.LEXER ) factory = new LexerNFAFactory(g); + if ( g.isLexer() ) factory = new LexerNFAFactory((LexerGrammar)g); g.nfa = factory.createNFA(); if ( generate_NFA_dot ) generateNFAs(g); - // PERFORM GRAMMAR ANALYSIS ON NFA: BUILD DECISION DFAs AnalysisPipeline anal = new AnalysisPipeline(g); anal.process(); diff --git a/tool/src/org/antlr/v4/analysis/AnalysisPipeline.java b/tool/src/org/antlr/v4/analysis/AnalysisPipeline.java index 77fd23030..4e1542b1d 100644 --- a/tool/src/org/antlr/v4/analysis/AnalysisPipeline.java +++ b/tool/src/org/antlr/v4/analysis/AnalysisPipeline.java @@ -2,8 +2,8 @@ package org.antlr.v4.analysis; import org.antlr.v4.automata.DFA; import org.antlr.v4.automata.DecisionState; -import org.antlr.v4.parse.ANTLRParser; import org.antlr.v4.tool.Grammar; +import org.antlr.v4.tool.LexerGrammar; public class AnalysisPipeline { public Grammar g; @@ -18,13 +18,21 @@ public class AnalysisPipeline { lr.check(); if ( lr.listOfRecursiveCycles.size()>0 ) return; // bail out - if ( g.getType() == ANTLRParser.LEXER ) { - LexerNFAToDFAConverter conv = new LexerNFAToDFAConverter(g); - DFA dfa = conv.createDFA(); - g.setLookaheadDFA(0, dfa); // only one decision + // BUILD DFA FOR EACH DECISION + if ( g.isLexer() ) processLexer(); + else processParserOrTreeParser(); + } + + void processLexer() { + LexerGrammar lg = (LexerGrammar)g; + int d = 0; + for (String modeName : lg.modes.keySet()) { + LexerNFAToDFAConverter conv = new LexerNFAToDFAConverter(lg); + DFA dfa = conv.createDFA(modeName); + g.setLookaheadDFA(d, dfa); + d++; if ( g.tool.minimizeDFA ) { - System.out.println("MINIMIZE"); int before = dfa.stateSet.size(); DFAMinimizer dmin = new DFAMinimizer(dfa); dfa.minimized = dmin.minimize(); @@ -33,10 +41,10 @@ public class AnalysisPipeline { System.out.println("DFA minimized from "+before+" to "+after+" states"); } } - return; } + } - // BUILD DFA FOR EACH DECISION IN NONLEXER + void processParserOrTreeParser() { for (DecisionState s : g.nfa.decisionToNFAState) { System.out.println("\nDECISION "+s.decision); diff --git a/tool/src/org/antlr/v4/analysis/LexerNFAToDFAConverter.java b/tool/src/org/antlr/v4/analysis/LexerNFAToDFAConverter.java index e6d57ce65..c49afc7f2 100644 --- a/tool/src/org/antlr/v4/analysis/LexerNFAToDFAConverter.java +++ b/tool/src/org/antlr/v4/analysis/LexerNFAToDFAConverter.java @@ -5,6 +5,7 @@ import org.antlr.v4.misc.IntervalSet; import org.antlr.v4.misc.OrderedHashSet; import org.antlr.v4.misc.Utils; import org.antlr.v4.tool.Grammar; +import org.antlr.v4.tool.LexerGrammar; import java.util.*; @@ -26,13 +27,15 @@ public class LexerNFAToDFAConverter { public static boolean debug = false; - public LexerNFAToDFAConverter(Grammar g) { + public LexerNFAToDFAConverter(LexerGrammar g) { this.g = g; - TokensStartState startState = (TokensStartState)g.nfa.states.get(0); - dfa = new DFA(g, startState); } - public DFA createDFA() { + public DFA createDFA() { return createDFA(LexerGrammar.DEFAULT_MODE_NAME); } + + public DFA createDFA(String modeName) { + TokensStartState startState = g.nfa.modeToStartState.get(modeName); + dfa = new DFA(g, startState); closureBusy = new HashSet(); LexerState start = computeStartState(); dfa.startState = start; diff --git a/tool/src/org/antlr/v4/automata/LexerNFAFactory.java b/tool/src/org/antlr/v4/automata/LexerNFAFactory.java index 28d3bab9c..d6ac74111 100644 --- a/tool/src/org/antlr/v4/automata/LexerNFAFactory.java +++ b/tool/src/org/antlr/v4/automata/LexerNFAFactory.java @@ -1,28 +1,38 @@ package org.antlr.v4.automata; import org.antlr.v4.codegen.Target; -import org.antlr.v4.tool.Grammar; import org.antlr.v4.tool.GrammarAST; +import org.antlr.v4.tool.LexerGrammar; import org.antlr.v4.tool.Rule; import org.antlr.v4.tool.TerminalAST; import org.stringtemplate.v4.misc.Misc; -/** */ +import java.util.List; + public class LexerNFAFactory extends ParserNFAFactory { - public LexerNFAFactory(Grammar g) { super(g); } + public LexerNFAFactory(LexerGrammar g) { super(g); } public NFA createNFA() { - // create s0, start state (must be first) - // implied Tokens rule node - NFAState startState = newState(TokensStartState.class, null); + // BUILD ALL START STATES (ONE PER MODE) + for (String modeName : ((LexerGrammar)g).modes.keySet()) { + // create s0, start state; implied Tokens rule node + TokensStartState startState = + (TokensStartState)newState(TokensStartState.class, null); + nfa.modeToStartState.put(modeName, startState); + } - _createNFA(); + // CREATE NFA FOR EACH RULE + _createNFA(g.rules.values()); - // LINK START STATE TO EACH TOKEN RULE - for (Rule r : g.rules.values()) { - if ( !r.isFragment() ) { - RuleStartState s = nfa.ruleToStartState.get(r); - epsilon(startState, s); + // LINK MODE START STATE TO EACH TOKEN RULE + for (String modeName : ((LexerGrammar)g).modes.keySet()) { + List rules = ((LexerGrammar)g).modes.get(modeName); + TokensStartState startState = nfa.modeToStartState.get(modeName); + for (Rule r : rules) { + if ( !r.isFragment() ) { + RuleStartState s = nfa.ruleToStartState.get(r); + epsilon(startState, s); + } } } diff --git a/tool/src/org/antlr/v4/automata/NFA.java b/tool/src/org/antlr/v4/automata/NFA.java index 0bb1e80ff..54b875bac 100644 --- a/tool/src/org/antlr/v4/automata/NFA.java +++ b/tool/src/org/antlr/v4/automata/NFA.java @@ -23,6 +23,8 @@ public class NFA { public Map ruleToStartState = new LinkedHashMap(); public Map ruleToStopState = new LinkedHashMap(); + public Map modeToStartState = + new LinkedHashMap(); int stateNumber = 0; diff --git a/tool/src/org/antlr/v4/automata/NFASerializer.java b/tool/src/org/antlr/v4/automata/NFASerializer.java index caf6f7f33..aec2ea00c 100644 --- a/tool/src/org/antlr/v4/automata/NFASerializer.java +++ b/tool/src/org/antlr/v4/automata/NFASerializer.java @@ -65,6 +65,9 @@ public class NFASerializer { } String getStateString(NFAState s) { + if ( s==null ) { + System.out.println("s==null"); + } int n = s.stateNumber; String stateStr = "s"+n; if ( s instanceof StarBlockStartState ) stateStr = "StarBlockStart_"+n; diff --git a/tool/src/org/antlr/v4/automata/ParserNFAFactory.java b/tool/src/org/antlr/v4/automata/ParserNFAFactory.java index cfba2180b..151fd26e0 100644 --- a/tool/src/org/antlr/v4/automata/ParserNFAFactory.java +++ b/tool/src/org/antlr/v4/automata/ParserNFAFactory.java @@ -11,6 +11,7 @@ import org.antlr.v4.parse.NFABuilder; import org.antlr.v4.tool.*; import java.lang.reflect.Constructor; +import java.util.Collection; import java.util.List; /** NFA construction routines triggered by NFABuilder.g. @@ -25,16 +26,16 @@ public class ParserNFAFactory implements NFAFactory { public ParserNFAFactory(Grammar g) { this.g = g; nfa = new NFA(g); } public NFA createNFA() { - _createNFA(); + _createNFA(g.rules.values()); addEOFTransitionToStartRules(); return nfa; } - public void _createNFA() { + public void _createNFA(Collection rules) { createRuleStartAndStopNFAStates(); GrammarASTAdaptor adaptor = new GrammarASTAdaptor(); - for (Rule r : g.rules.values()) { + for (Rule r : rules) { // find rule's block GrammarAST blk = (GrammarAST)r.ast.getFirstChildWithType(ANTLRParser.BLOCK); CommonTreeNodeStream nodes = new CommonTreeNodeStream(adaptor,blk); @@ -125,7 +126,7 @@ public class ParserNFAFactory implements NFAFactory { public Handle not(GrammarAST n, Handle A) { GrammarAST ast = A.left.ast; int ttype = 0; - if ( g.getType()==ANTLRParser.LEXER ) { + if ( g.isLexer() ) { ttype = Target.getCharValueFromGrammarCharLiteral(ast.getText()); } else { diff --git a/tool/src/org/antlr/v4/misc/IntervalSet.java b/tool/src/org/antlr/v4/misc/IntervalSet.java index 1a6499b52..26d48d25a 100644 --- a/tool/src/org/antlr/v4/misc/IntervalSet.java +++ b/tool/src/org/antlr/v4/misc/IntervalSet.java @@ -28,7 +28,6 @@ package org.antlr.v4.misc; import org.antlr.v4.automata.Label; -import org.antlr.v4.parse.ANTLRParser; import org.antlr.v4.tool.Grammar; import java.util.ArrayList; @@ -596,7 +595,7 @@ public class IntervalSet implements IntSet { } else { if ( g!=null ) { - if ( g.getType()!= ANTLRParser.LEXER ) { + if ( !g.isLexer() ) { for (int i=a; i<=b; i++) { if ( i>a ) buf.append(", "); buf.append(g.getTokenDisplayName(i)); diff --git a/tool/src/org/antlr/v4/semantics/BasicSemanticChecks.java b/tool/src/org/antlr/v4/semantics/BasicSemanticChecks.java index 505b3d91c..3a2e7abea 100644 --- a/tool/src/org/antlr/v4/semantics/BasicSemanticChecks.java +++ b/tool/src/org/antlr/v4/semantics/BasicSemanticChecks.java @@ -141,7 +141,7 @@ public class BasicSemanticChecks { } void checkMode(Token modeNameToken) { - if ( g.getType()!=ANTLRParser.LEXER ) { + if ( !g.isLexer() ) { g.tool.errMgr.grammarError(ErrorType.MODE_NOT_IN_LEXER, g.fileName, modeNameToken, modeNameToken.getText(), g); } @@ -170,11 +170,11 @@ public class BasicSemanticChecks { void checkInvalidRuleDef(Token ruleID) { String fileName = ruleID.getInputStream().getSourceName(); - if ( g.getType()==ANTLRParser.LEXER && Character.isLowerCase(ruleID.getText().charAt(0)) ) { + if ( g.isLexer() && Character.isLowerCase(ruleID.getText().charAt(0)) ) { g.tool.errMgr.grammarError(ErrorType.PARSER_RULES_NOT_ALLOWED, fileName, ruleID, ruleID.getText()); } - if ( (g.getType()==ANTLRParser.PARSER||g.getType()==ANTLRParser.TREE) && + if ( (g.isParser()||g.isTreeGrammar()) && Character.isUpperCase(ruleID.getText().charAt(0)) ) { g.tool.errMgr.grammarError(ErrorType.LEXER_RULES_NOT_ALLOWED, @@ -184,7 +184,7 @@ public class BasicSemanticChecks { void checkInvalidRuleRef(Token ruleID) { String fileName = ruleID.getInputStream().getSourceName(); - if ( g.getType()==ANTLRParser.LEXER && Character.isLowerCase(ruleID.getText().charAt(0)) ) { + if ( g.isLexer() && Character.isLowerCase(ruleID.getText().charAt(0)) ) { g.tool.errMgr.grammarError(ErrorType.PARSER_RULES_NOT_ALLOWED, fileName, ruleID, ruleID.getText()); } @@ -198,7 +198,7 @@ public class BasicSemanticChecks { tokenID, tokenID.getText()); } - if ( g.getType()!=ANTLRParser.COMBINED ) { + if ( !g.isCombined() ) { g.tool.errMgr.grammarError(ErrorType.CANNOT_ALIAS_TOKENS, fileName, tokenID, @@ -212,7 +212,7 @@ public class BasicSemanticChecks { */ void checkTokenArgs(Token tokenID) { String fileName = tokenID.getInputStream().getSourceName(); - if ( g.getType()!=ANTLRParser.LEXER ) { + if ( !g.isLexer() ) { g.tool.errMgr.grammarError(ErrorType.ARGS_ON_TOKEN_REF, fileName, tokenID, tokenID.getText()); } @@ -315,7 +315,7 @@ public class BasicSemanticChecks { Token altStart, int alt) { - if ( g.getType()==ANTLRParser.TREE && + if ( g.isTreeGrammar() && options!=null && options.get("output")!=null && options.get("output").equals("template") && options.get("rewrite")!=null && @@ -377,7 +377,7 @@ public class BasicSemanticChecks { if ( options==null ) return; String fileName = root.token.getInputStream().getSourceName(); String filter = options.get("filter"); - if ( g.getType()==ANTLRParser.TREE && filter!=null && filter.equals("true") ) { + if ( g.isTreeGrammar() && filter!=null && filter.equals("true") ) { // check for conflicting options // filter => backtrack=true (can't be false) // filter&&output!=AST => error @@ -424,7 +424,7 @@ public class BasicSemanticChecks { importID, g, delegate); } - if ( g.getType()==ANTLRParser.COMBINED && + if ( g.isCombined() && (delegate.name.equals(g.name+Grammar.getGrammarTypeToFileNameSuffix(ANTLRParser.LEXER))|| delegate.name.equals(g.name+Grammar.getGrammarTypeToFileNameSuffix(ANTLRParser.PARSER))) ) { diff --git a/tool/src/org/antlr/v4/semantics/CollectSymbols.g b/tool/src/org/antlr/v4/semantics/CollectSymbols.g index 9ac59bdd5..89c6715cd 100644 --- a/tool/src/org/antlr/v4/semantics/CollectSymbols.g +++ b/tool/src/org/antlr/v4/semantics/CollectSymbols.g @@ -72,7 +72,8 @@ import org.stringtemplate.v4.misc.MultiMap; } @members { -Rule currentRule = null; +Rule currentRule; +String currentMode = Grammar.DEFAULT_MODE_NAME; int currentAlt = 1; // 1..n public List rules = new ArrayList(); public List rulerefs = new ArrayList(); @@ -95,6 +96,7 @@ topdown : globalScope | globalNamedAction | tokensSection + | mode | rule | ruleArg | ruleReturns @@ -140,6 +142,8 @@ tokensSection ) ; +mode: ^(MODE ID .+) {currentMode = $ID.text;} ; + rule @init {List modifiers = new ArrayList();} : ^( RULE @@ -151,13 +155,14 @@ rule { int numAlts = $RULE.getFirstChildWithType(BLOCK).getChildCount(); Rule r = new Rule(g, $name.text, (GrammarASTWithOptions)$RULE, numAlts); + if ( g.isLexer() ) r.mode = currentMode; if ( modifiers.size()>0 ) r.modifiers = modifiers; rules.add(r); currentRule = r; currentAlt = 1; } ; - + setAlt : {inContext("RULE BLOCK")}? ( ALT | ALT_REWRITE ) {currentAlt = $start.getChildIndex()+1;} diff --git a/tool/src/org/antlr/v4/semantics/CollectSymbols.java b/tool/src/org/antlr/v4/semantics/CollectSymbols.java index ea7b17d00..b18a99c1d 100644 --- a/tool/src/org/antlr/v4/semantics/CollectSymbols.java +++ b/tool/src/org/antlr/v4/semantics/CollectSymbols.java @@ -1,4 +1,4 @@ -// $ANTLR ${project.version} ${buildNumber} CollectSymbols.g 2010-04-19 15:55:57 +// $ANTLR ${project.version} ${buildNumber} CollectSymbols.g 2010-04-19 17:33:29 /* [The "BSD license"] @@ -37,6 +37,7 @@ import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; + /** Collects rules, terminals, strings, actions, scopes etc... from AST * Side-effects: None */ @@ -162,7 +163,8 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter { public String getGrammarFileName() { return "CollectSymbols.g"; } - Rule currentRule = null; + Rule currentRule; + String currentMode = LexerGrammar.DEFAULT_MODE_NAME; int currentAlt = 1; // 1..n public List rules = new ArrayList(); public List rulerefs = new ArrayList(); @@ -182,15 +184,15 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter { // $ANTLR start "topdown" - // CollectSymbols.g:93:1: topdown : ( globalScope | globalNamedAction | tokensSection | rule | ruleArg | ruleReturns | ruleNamedAction | ruleScopeSpec | ruleref | rewriteElement | terminal | labeledElement | setAlt | ruleAction | finallyClause | exceptionHandler ); + // CollectSymbols.g:94:1: topdown : ( globalScope | globalNamedAction | tokensSection | mode | rule | ruleArg | ruleReturns | ruleNamedAction | ruleScopeSpec | ruleref | rewriteElement | terminal | labeledElement | setAlt | ruleAction | finallyClause | exceptionHandler ); public final void topdown() throws RecognitionException { try { - // CollectSymbols.g:95:5: ( globalScope | globalNamedAction | tokensSection | rule | ruleArg | ruleReturns | ruleNamedAction | ruleScopeSpec | ruleref | rewriteElement | terminal | labeledElement | setAlt | ruleAction | finallyClause | exceptionHandler ) - int alt1=16; + // CollectSymbols.g:96:5: ( globalScope | globalNamedAction | tokensSection | mode | rule | ruleArg | ruleReturns | ruleNamedAction | ruleScopeSpec | ruleref | rewriteElement | terminal | labeledElement | setAlt | ruleAction | finallyClause | exceptionHandler ) + int alt1=17; alt1 = dfa1.predict(input); switch (alt1) { case 1 : - // CollectSymbols.g:95:7: globalScope + // CollectSymbols.g:96:7: globalScope { pushFollow(FOLLOW_globalScope_in_topdown97); globalScope(); @@ -201,7 +203,7 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter { } break; case 2 : - // CollectSymbols.g:96:7: globalNamedAction + // CollectSymbols.g:97:7: globalNamedAction { pushFollow(FOLLOW_globalNamedAction_in_topdown105); globalNamedAction(); @@ -212,7 +214,7 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter { } break; case 3 : - // CollectSymbols.g:97:7: tokensSection + // CollectSymbols.g:98:7: tokensSection { pushFollow(FOLLOW_tokensSection_in_topdown113); tokensSection(); @@ -223,10 +225,10 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter { } break; case 4 : - // CollectSymbols.g:98:7: rule + // CollectSymbols.g:99:7: mode { - pushFollow(FOLLOW_rule_in_topdown121); - rule(); + pushFollow(FOLLOW_mode_in_topdown121); + mode(); state._fsp--; if (state.failed) return ; @@ -234,10 +236,10 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter { } break; case 5 : - // CollectSymbols.g:99:7: ruleArg + // CollectSymbols.g:100:7: rule { - pushFollow(FOLLOW_ruleArg_in_topdown129); - ruleArg(); + pushFollow(FOLLOW_rule_in_topdown129); + rule(); state._fsp--; if (state.failed) return ; @@ -245,10 +247,10 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter { } break; case 6 : - // CollectSymbols.g:100:7: ruleReturns + // CollectSymbols.g:101:7: ruleArg { - pushFollow(FOLLOW_ruleReturns_in_topdown137); - ruleReturns(); + pushFollow(FOLLOW_ruleArg_in_topdown137); + ruleArg(); state._fsp--; if (state.failed) return ; @@ -256,10 +258,10 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter { } break; case 7 : - // CollectSymbols.g:101:7: ruleNamedAction + // CollectSymbols.g:102:7: ruleReturns { - pushFollow(FOLLOW_ruleNamedAction_in_topdown145); - ruleNamedAction(); + pushFollow(FOLLOW_ruleReturns_in_topdown145); + ruleReturns(); state._fsp--; if (state.failed) return ; @@ -267,10 +269,10 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter { } break; case 8 : - // CollectSymbols.g:102:7: ruleScopeSpec + // CollectSymbols.g:103:7: ruleNamedAction { - pushFollow(FOLLOW_ruleScopeSpec_in_topdown153); - ruleScopeSpec(); + pushFollow(FOLLOW_ruleNamedAction_in_topdown153); + ruleNamedAction(); state._fsp--; if (state.failed) return ; @@ -278,10 +280,10 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter { } break; case 9 : - // CollectSymbols.g:103:7: ruleref + // CollectSymbols.g:104:7: ruleScopeSpec { - pushFollow(FOLLOW_ruleref_in_topdown161); - ruleref(); + pushFollow(FOLLOW_ruleScopeSpec_in_topdown161); + ruleScopeSpec(); state._fsp--; if (state.failed) return ; @@ -289,10 +291,10 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter { } break; case 10 : - // CollectSymbols.g:104:7: rewriteElement + // CollectSymbols.g:105:7: ruleref { - pushFollow(FOLLOW_rewriteElement_in_topdown169); - rewriteElement(); + pushFollow(FOLLOW_ruleref_in_topdown169); + ruleref(); state._fsp--; if (state.failed) return ; @@ -300,10 +302,10 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter { } break; case 11 : - // CollectSymbols.g:106:7: terminal + // CollectSymbols.g:106:7: rewriteElement { - pushFollow(FOLLOW_terminal_in_topdown190); - terminal(); + pushFollow(FOLLOW_rewriteElement_in_topdown177); + rewriteElement(); state._fsp--; if (state.failed) return ; @@ -311,10 +313,10 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter { } break; case 12 : - // CollectSymbols.g:107:7: labeledElement + // CollectSymbols.g:108:7: terminal { - pushFollow(FOLLOW_labeledElement_in_topdown198); - labeledElement(); + pushFollow(FOLLOW_terminal_in_topdown198); + terminal(); state._fsp--; if (state.failed) return ; @@ -322,10 +324,10 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter { } break; case 13 : - // CollectSymbols.g:108:7: setAlt + // CollectSymbols.g:109:7: labeledElement { - pushFollow(FOLLOW_setAlt_in_topdown206); - setAlt(); + pushFollow(FOLLOW_labeledElement_in_topdown206); + labeledElement(); state._fsp--; if (state.failed) return ; @@ -333,10 +335,10 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter { } break; case 14 : - // CollectSymbols.g:109:7: ruleAction + // CollectSymbols.g:110:7: setAlt { - pushFollow(FOLLOW_ruleAction_in_topdown214); - ruleAction(); + pushFollow(FOLLOW_setAlt_in_topdown214); + setAlt(); state._fsp--; if (state.failed) return ; @@ -344,10 +346,10 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter { } break; case 15 : - // CollectSymbols.g:110:7: finallyClause + // CollectSymbols.g:111:7: ruleAction { - pushFollow(FOLLOW_finallyClause_in_topdown222); - finallyClause(); + pushFollow(FOLLOW_ruleAction_in_topdown222); + ruleAction(); state._fsp--; if (state.failed) return ; @@ -355,9 +357,20 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter { } break; case 16 : - // CollectSymbols.g:111:7: exceptionHandler + // CollectSymbols.g:112:7: finallyClause { - pushFollow(FOLLOW_exceptionHandler_in_topdown230); + pushFollow(FOLLOW_finallyClause_in_topdown230); + finallyClause(); + + state._fsp--; + if (state.failed) return ; + + } + break; + case 17 : + // CollectSymbols.g:113:7: exceptionHandler + { + pushFollow(FOLLOW_exceptionHandler_in_topdown238); exceptionHandler(); state._fsp--; @@ -380,13 +393,13 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter { // $ANTLR start "bottomup" - // CollectSymbols.g:114:1: bottomup : finishRule ; + // CollectSymbols.g:116:1: bottomup : finishRule ; public final void bottomup() throws RecognitionException { try { - // CollectSymbols.g:115:2: ( finishRule ) - // CollectSymbols.g:115:4: finishRule + // CollectSymbols.g:117:2: ( finishRule ) + // CollectSymbols.g:117:4: finishRule { - pushFollow(FOLLOW_finishRule_in_bottomup241); + pushFollow(FOLLOW_finishRule_in_bottomup249); finishRule(); state._fsp--; @@ -407,24 +420,24 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter { // $ANTLR start "globalScope" - // CollectSymbols.g:118:1: globalScope : {...}? ^( SCOPE ID ACTION ) ; + // CollectSymbols.g:120:1: globalScope : {...}? ^( SCOPE ID ACTION ) ; public final void globalScope() throws RecognitionException { GrammarAST ACTION1=null; GrammarAST ID2=null; try { - // CollectSymbols.g:119:2: ({...}? ^( SCOPE ID ACTION ) ) - // CollectSymbols.g:119:4: {...}? ^( SCOPE ID ACTION ) + // CollectSymbols.g:121:2: ({...}? ^( SCOPE ID ACTION ) ) + // CollectSymbols.g:121:4: {...}? ^( SCOPE ID ACTION ) { if ( !((inContext("GRAMMAR"))) ) { if (state.backtracking>0) {state.failed=true; return ;} throw new FailedPredicateException(input, "globalScope", "inContext(\"GRAMMAR\")"); } - match(input,SCOPE,FOLLOW_SCOPE_in_globalScope255); if (state.failed) return ; + match(input,SCOPE,FOLLOW_SCOPE_in_globalScope263); if (state.failed) return ; match(input, Token.DOWN, null); if (state.failed) return ; - ID2=(GrammarAST)match(input,ID,FOLLOW_ID_in_globalScope257); if (state.failed) return ; - ACTION1=(GrammarAST)match(input,ACTION,FOLLOW_ACTION_in_globalScope259); if (state.failed) return ; + ID2=(GrammarAST)match(input,ID,FOLLOW_ID_in_globalScope265); if (state.failed) return ; + ACTION1=(GrammarAST)match(input,ACTION,FOLLOW_ACTION_in_globalScope267); if (state.failed) return ; match(input, Token.UP, null); if (state.failed) return ; if ( state.backtracking==1 ) { @@ -451,23 +464,23 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter { // $ANTLR start "globalNamedAction" - // CollectSymbols.g:128:1: globalNamedAction : {...}? ^( AT ( ID )? ID ACTION ) ; + // CollectSymbols.g:130:1: globalNamedAction : {...}? ^( AT ( ID )? ID ACTION ) ; public final void globalNamedAction() throws RecognitionException { GrammarAST AT3=null; GrammarAST ACTION4=null; try { - // CollectSymbols.g:129:2: ({...}? ^( AT ( ID )? ID ACTION ) ) - // CollectSymbols.g:129:4: {...}? ^( AT ( ID )? ID ACTION ) + // CollectSymbols.g:131:2: ({...}? ^( AT ( ID )? ID ACTION ) ) + // CollectSymbols.g:131:4: {...}? ^( AT ( ID )? ID ACTION ) { if ( !((inContext("GRAMMAR"))) ) { if (state.backtracking>0) {state.failed=true; return ;} throw new FailedPredicateException(input, "globalNamedAction", "inContext(\"GRAMMAR\")"); } - AT3=(GrammarAST)match(input,AT,FOLLOW_AT_in_globalNamedAction278); if (state.failed) return ; + AT3=(GrammarAST)match(input,AT,FOLLOW_AT_in_globalNamedAction286); if (state.failed) return ; match(input, Token.DOWN, null); if (state.failed) return ; - // CollectSymbols.g:129:33: ( ID )? + // CollectSymbols.g:131:33: ( ID )? int alt2=2; int LA2_0 = input.LA(1); @@ -480,17 +493,17 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter { } switch (alt2) { case 1 : - // CollectSymbols.g:129:33: ID + // CollectSymbols.g:131:33: ID { - match(input,ID,FOLLOW_ID_in_globalNamedAction280); if (state.failed) return ; + match(input,ID,FOLLOW_ID_in_globalNamedAction288); if (state.failed) return ; } break; } - match(input,ID,FOLLOW_ID_in_globalNamedAction283); if (state.failed) return ; - ACTION4=(GrammarAST)match(input,ACTION,FOLLOW_ACTION_in_globalNamedAction285); if (state.failed) return ; + match(input,ID,FOLLOW_ID_in_globalNamedAction291); if (state.failed) return ; + ACTION4=(GrammarAST)match(input,ACTION,FOLLOW_ACTION_in_globalNamedAction293); if (state.failed) return ; match(input, Token.UP, null); if (state.failed) return ; if ( state.backtracking==1 ) { @@ -512,21 +525,21 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter { // $ANTLR start "tokensSection" - // CollectSymbols.g:133:1: tokensSection : {...}? ( ^( ASSIGN t= ID STRING_LITERAL ) | t= ID ) ; + // CollectSymbols.g:135:1: tokensSection : {...}? ( ^( ASSIGN t= ID STRING_LITERAL ) | t= ID ) ; public final void tokensSection() throws RecognitionException { GrammarAST t=null; GrammarAST ASSIGN5=null; GrammarAST STRING_LITERAL6=null; try { - // CollectSymbols.g:134:2: ({...}? ( ^( ASSIGN t= ID STRING_LITERAL ) | t= ID ) ) - // CollectSymbols.g:134:4: {...}? ( ^( ASSIGN t= ID STRING_LITERAL ) | t= ID ) + // CollectSymbols.g:136:2: ({...}? ( ^( ASSIGN t= ID STRING_LITERAL ) | t= ID ) ) + // CollectSymbols.g:136:4: {...}? ( ^( ASSIGN t= ID STRING_LITERAL ) | t= ID ) { if ( !((inContext("TOKENS"))) ) { if (state.backtracking>0) {state.failed=true; return ;} throw new FailedPredicateException(input, "tokensSection", "inContext(\"TOKENS\")"); } - // CollectSymbols.g:135:3: ( ^( ASSIGN t= ID STRING_LITERAL ) | t= ID ) + // CollectSymbols.g:137:3: ( ^( ASSIGN t= ID STRING_LITERAL ) | t= ID ) int alt3=2; int LA3_0 = input.LA(1); @@ -545,13 +558,13 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter { } switch (alt3) { case 1 : - // CollectSymbols.g:135:5: ^( ASSIGN t= ID STRING_LITERAL ) + // CollectSymbols.g:137:5: ^( ASSIGN t= ID STRING_LITERAL ) { - ASSIGN5=(GrammarAST)match(input,ASSIGN,FOLLOW_ASSIGN_in_tokensSection308); if (state.failed) return ; + ASSIGN5=(GrammarAST)match(input,ASSIGN,FOLLOW_ASSIGN_in_tokensSection316); if (state.failed) return ; match(input, Token.DOWN, null); if (state.failed) return ; - t=(GrammarAST)match(input,ID,FOLLOW_ID_in_tokensSection312); if (state.failed) return ; - STRING_LITERAL6=(GrammarAST)match(input,STRING_LITERAL,FOLLOW_STRING_LITERAL_in_tokensSection314); if (state.failed) return ; + t=(GrammarAST)match(input,ID,FOLLOW_ID_in_tokensSection320); if (state.failed) return ; + STRING_LITERAL6=(GrammarAST)match(input,STRING_LITERAL,FOLLOW_STRING_LITERAL_in_tokensSection322); if (state.failed) return ; match(input, Token.UP, null); if (state.failed) return ; if ( state.backtracking==1 ) { @@ -562,9 +575,9 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter { } break; case 2 : - // CollectSymbols.g:138:5: t= ID + // CollectSymbols.g:140:5: t= ID { - t=(GrammarAST)match(input,ID,FOLLOW_ID_in_tokensSection328); if (state.failed) return ; + t=(GrammarAST)match(input,ID,FOLLOW_ID_in_tokensSection336); if (state.failed) return ; if ( state.backtracking==1 ) { terminals.add(t); tokenIDRefs.add(t); tokensDefs.add(t); } @@ -589,30 +602,37 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter { // $ANTLR end "tokensSection" - // $ANTLR start "rule" - // CollectSymbols.g:143:1: rule : ^( RULE name= ID ( options {greedy=false; } : . )* ( ^( RULEMODIFIERS (m= . )+ ) )? ^( BLOCK ( . )+ ) ( . )* ) ; - public final void rule() throws RecognitionException { - GrammarAST name=null; - GrammarAST RULE7=null; - GrammarAST m=null; + // $ANTLR start "mode" + // CollectSymbols.g:145:1: mode : ^( MODE ID ( . )+ ) ; + public final void mode() throws RecognitionException { + GrammarAST ID7=null; - List modifiers = new ArrayList(); try { - // CollectSymbols.g:145:2: ( ^( RULE name= ID ( options {greedy=false; } : . )* ( ^( RULEMODIFIERS (m= . )+ ) )? ^( BLOCK ( . )+ ) ( . )* ) ) - // CollectSymbols.g:145:6: ^( RULE name= ID ( options {greedy=false; } : . )* ( ^( RULEMODIFIERS (m= . )+ ) )? ^( BLOCK ( . )+ ) ( . )* ) + // CollectSymbols.g:145:5: ( ^( MODE ID ( . )+ ) ) + // CollectSymbols.g:145:7: ^( MODE ID ( . )+ ) { - RULE7=(GrammarAST)match(input,RULE,FOLLOW_RULE_in_rule357); if (state.failed) return ; + match(input,MODE,FOLLOW_MODE_in_mode355); if (state.failed) return ; match(input, Token.DOWN, null); if (state.failed) return ; - name=(GrammarAST)match(input,ID,FOLLOW_ID_in_rule369); if (state.failed) return ; - // CollectSymbols.g:146:17: ( options {greedy=false; } : . )* + ID7=(GrammarAST)match(input,ID,FOLLOW_ID_in_mode357); if (state.failed) return ; + // CollectSymbols.g:145:17: ( . )+ + int cnt4=0; loop4: do { int alt4=2; - alt4 = dfa4.predict(input); + int LA4_0 = input.LA(1); + + if ( ((LA4_0>=SEMPRED && LA4_0<=ALT_REWRITE)) ) { + alt4=1; + } + else if ( (LA4_0==UP) ) { + alt4=2; + } + + switch (alt4) { case 1 : - // CollectSymbols.g:146:42: . + // CollectSymbols.g:145:17: . { matchAny(input); if (state.failed) return ; @@ -620,39 +640,99 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter { break; default : - break loop4; + if ( cnt4 >= 1 ) break loop4; + if (state.backtracking>0) {state.failed=true; return ;} + EarlyExitException eee = + new EarlyExitException(4, input); + throw eee; + } + cnt4++; + } while (true); + + + match(input, Token.UP, null); if (state.failed) return ; + if ( state.backtracking==1 ) { + currentMode = (ID7!=null?ID7.getText():null); + } + + } + + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + } + finally { + } + return ; + } + // $ANTLR end "mode" + + + // $ANTLR start "rule" + // CollectSymbols.g:147:1: rule : ^( RULE name= ID ( options {greedy=false; } : . )* ( ^( RULEMODIFIERS (m= . )+ ) )? ^( BLOCK ( . )+ ) ( . )* ) ; + public final void rule() throws RecognitionException { + GrammarAST name=null; + GrammarAST RULE8=null; + GrammarAST m=null; + + List modifiers = new ArrayList(); + try { + // CollectSymbols.g:149:2: ( ^( RULE name= ID ( options {greedy=false; } : . )* ( ^( RULEMODIFIERS (m= . )+ ) )? ^( BLOCK ( . )+ ) ( . )* ) ) + // CollectSymbols.g:149:6: ^( RULE name= ID ( options {greedy=false; } : . )* ( ^( RULEMODIFIERS (m= . )+ ) )? ^( BLOCK ( . )+ ) ( . )* ) + { + RULE8=(GrammarAST)match(input,RULE,FOLLOW_RULE_in_rule382); if (state.failed) return ; + + match(input, Token.DOWN, null); if (state.failed) return ; + name=(GrammarAST)match(input,ID,FOLLOW_ID_in_rule394); if (state.failed) return ; + // CollectSymbols.g:150:17: ( options {greedy=false; } : . )* + loop5: + do { + int alt5=2; + alt5 = dfa5.predict(input); + switch (alt5) { + case 1 : + // CollectSymbols.g:150:42: . + { + matchAny(input); if (state.failed) return ; + + } + break; + + default : + break loop5; } } while (true); - // CollectSymbols.g:147:9: ( ^( RULEMODIFIERS (m= . )+ ) )? - int alt6=2; - int LA6_0 = input.LA(1); + // CollectSymbols.g:151:9: ( ^( RULEMODIFIERS (m= . )+ ) )? + int alt7=2; + int LA7_0 = input.LA(1); - if ( (LA6_0==RULEMODIFIERS) ) { - alt6=1; + if ( (LA7_0==RULEMODIFIERS) ) { + alt7=1; } - switch (alt6) { + switch (alt7) { case 1 : - // CollectSymbols.g:147:10: ^( RULEMODIFIERS (m= . )+ ) + // CollectSymbols.g:151:10: ^( RULEMODIFIERS (m= . )+ ) { - match(input,RULEMODIFIERS,FOLLOW_RULEMODIFIERS_in_rule393); if (state.failed) return ; + match(input,RULEMODIFIERS,FOLLOW_RULEMODIFIERS_in_rule418); if (state.failed) return ; match(input, Token.DOWN, null); if (state.failed) return ; - // CollectSymbols.g:147:26: (m= . )+ - int cnt5=0; - loop5: + // CollectSymbols.g:151:26: (m= . )+ + int cnt6=0; + loop6: do { - int alt5=2; - int LA5_0 = input.LA(1); + int alt6=2; + int LA6_0 = input.LA(1); - if ( ((LA5_0>=SEMPRED && LA5_0<=ALT_REWRITE)) ) { - alt5=1; + if ( ((LA6_0>=SEMPRED && LA6_0<=ALT_REWRITE)) ) { + alt6=1; } - switch (alt5) { + switch (alt6) { case 1 : - // CollectSymbols.g:147:27: m= . + // CollectSymbols.g:151:27: m= . { m=(GrammarAST)input.LT(1); matchAny(input); if (state.failed) return ; @@ -664,13 +744,13 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter { break; default : - if ( cnt5 >= 1 ) break loop5; + if ( cnt6 >= 1 ) break loop6; if (state.backtracking>0) {state.failed=true; return ;} EarlyExitException eee = - new EarlyExitException(5, input); + new EarlyExitException(6, input); throw eee; } - cnt5++; + cnt6++; } while (true); @@ -681,46 +761,11 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter { } - match(input,BLOCK,FOLLOW_BLOCK_in_rule416); if (state.failed) return ; + match(input,BLOCK,FOLLOW_BLOCK_in_rule441); if (state.failed) return ; match(input, Token.DOWN, null); if (state.failed) return ; - // CollectSymbols.g:148:17: ( . )+ - int cnt7=0; - loop7: - do { - int alt7=2; - int LA7_0 = input.LA(1); - - if ( ((LA7_0>=SEMPRED && LA7_0<=ALT_REWRITE)) ) { - alt7=1; - } - else if ( (LA7_0==UP) ) { - alt7=2; - } - - - switch (alt7) { - case 1 : - // CollectSymbols.g:148:17: . - { - matchAny(input); if (state.failed) return ; - - } - break; - - default : - if ( cnt7 >= 1 ) break loop7; - if (state.backtracking>0) {state.failed=true; return ;} - EarlyExitException eee = - new EarlyExitException(7, input); - throw eee; - } - cnt7++; - } while (true); - - - match(input, Token.UP, null); if (state.failed) return ; - // CollectSymbols.g:149:9: ( . )* + // CollectSymbols.g:152:17: ( . )+ + int cnt8=0; loop8: do { int alt8=2; @@ -736,7 +781,7 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter { switch (alt8) { case 1 : - // CollectSymbols.g:149:9: . + // CollectSymbols.g:152:17: . { matchAny(input); if (state.failed) return ; @@ -744,7 +789,42 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter { break; default : - break loop8; + if ( cnt8 >= 1 ) break loop8; + if (state.backtracking>0) {state.failed=true; return ;} + EarlyExitException eee = + new EarlyExitException(8, input); + throw eee; + } + cnt8++; + } while (true); + + + match(input, Token.UP, null); if (state.failed) return ; + // CollectSymbols.g:153:9: ( . )* + loop9: + do { + int alt9=2; + int LA9_0 = input.LA(1); + + if ( ((LA9_0>=SEMPRED && LA9_0<=ALT_REWRITE)) ) { + alt9=1; + } + else if ( (LA9_0==UP) ) { + alt9=2; + } + + + switch (alt9) { + case 1 : + // CollectSymbols.g:153:9: . + { + matchAny(input); if (state.failed) return ; + + } + break; + + default : + break loop9; } } while (true); @@ -752,8 +832,10 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter { match(input, Token.UP, null); if (state.failed) return ; if ( state.backtracking==1 ) { - int numAlts = RULE7.getFirstChildWithType(BLOCK).getChildCount(); - Rule r = new Rule(g, (name!=null?name.getText():null), (GrammarASTWithOptions)RULE7, numAlts); + int numAlts = RULE8.getFirstChildWithType(BLOCK).getChildCount(); + Rule r = new Rule(g, (name!=null?name.getText():null), (GrammarASTWithOptions)RULE8, numAlts); + if ( g.isLexer() ) r.mode = currentMode; + else r.mode = LexerGrammar.DEFAULT_MODE_NAME; if ( modifiers.size()>0 ) r.modifiers = modifiers; rules.add(r); currentRule = r; @@ -778,14 +860,14 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter { }; // $ANTLR start "setAlt" - // CollectSymbols.g:161:1: setAlt : {...}? ( ALT | ALT_REWRITE ) ; + // CollectSymbols.g:167:1: setAlt : {...}? ( ALT | ALT_REWRITE ) ; public final CollectSymbols.setAlt_return setAlt() throws RecognitionException { CollectSymbols.setAlt_return retval = new CollectSymbols.setAlt_return(); retval.start = input.LT(1); try { - // CollectSymbols.g:162:2: ({...}? ( ALT | ALT_REWRITE ) ) - // CollectSymbols.g:162:4: {...}? ( ALT | ALT_REWRITE ) + // CollectSymbols.g:168:2: ({...}? ( ALT | ALT_REWRITE ) ) + // CollectSymbols.g:168:4: {...}? ( ALT | ALT_REWRITE ) { if ( !((inContext("RULE BLOCK"))) ) { if (state.backtracking>0) {state.failed=true; return retval;} @@ -820,13 +902,13 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter { // $ANTLR start "finishRule" - // CollectSymbols.g:166:1: finishRule : RULE ; + // CollectSymbols.g:172:1: finishRule : RULE ; public final void finishRule() throws RecognitionException { try { - // CollectSymbols.g:167:2: ( RULE ) - // CollectSymbols.g:167:4: RULE + // CollectSymbols.g:173:2: ( RULE ) + // CollectSymbols.g:173:4: RULE { - match(input,RULE,FOLLOW_RULE_in_finishRule484); if (state.failed) return ; + match(input,RULE,FOLLOW_RULE_in_finishRule508); if (state.failed) return ; if ( state.backtracking==1 ) { currentRule = null; } @@ -846,30 +928,30 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter { // $ANTLR start "ruleNamedAction" - // CollectSymbols.g:170:1: ruleNamedAction : {...}? ^( AT ID ACTION ) ; + // CollectSymbols.g:176:1: ruleNamedAction : {...}? ^( AT ID ACTION ) ; public final void ruleNamedAction() throws RecognitionException { - GrammarAST ID8=null; - GrammarAST ACTION9=null; + GrammarAST ID9=null; + GrammarAST ACTION10=null; try { - // CollectSymbols.g:171:2: ({...}? ^( AT ID ACTION ) ) - // CollectSymbols.g:171:4: {...}? ^( AT ID ACTION ) + // CollectSymbols.g:177:2: ({...}? ^( AT ID ACTION ) ) + // CollectSymbols.g:177:4: {...}? ^( AT ID ACTION ) { if ( !((inContext("RULE"))) ) { if (state.backtracking>0) {state.failed=true; return ;} throw new FailedPredicateException(input, "ruleNamedAction", "inContext(\"RULE\")"); } - match(input,AT,FOLLOW_AT_in_ruleNamedAction500); if (state.failed) return ; + match(input,AT,FOLLOW_AT_in_ruleNamedAction524); if (state.failed) return ; match(input, Token.DOWN, null); if (state.failed) return ; - ID8=(GrammarAST)match(input,ID,FOLLOW_ID_in_ruleNamedAction502); if (state.failed) return ; - ACTION9=(GrammarAST)match(input,ACTION,FOLLOW_ACTION_in_ruleNamedAction504); if (state.failed) return ; + ID9=(GrammarAST)match(input,ID,FOLLOW_ID_in_ruleNamedAction526); if (state.failed) return ; + ACTION10=(GrammarAST)match(input,ACTION,FOLLOW_ACTION_in_ruleNamedAction528); if (state.failed) return ; match(input, Token.UP, null); if (state.failed) return ; if ( state.backtracking==1 ) { - currentRule.namedActions.put((ID8!=null?ID8.getText():null),(ActionAST)ACTION9); - ((ActionAST)ACTION9).resolver = currentRule; + currentRule.namedActions.put((ID9!=null?ID9.getText():null),(ActionAST)ACTION10); + ((ActionAST)ACTION10).resolver = currentRule; } @@ -888,24 +970,24 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter { // $ANTLR start "ruleAction" - // CollectSymbols.g:178:1: ruleAction : {...}? ACTION ; + // CollectSymbols.g:184:1: ruleAction : {...}? ACTION ; public final void ruleAction() throws RecognitionException { - GrammarAST ACTION10=null; + GrammarAST ACTION11=null; try { - // CollectSymbols.g:179:2: ({...}? ACTION ) - // CollectSymbols.g:179:4: {...}? ACTION + // CollectSymbols.g:185:2: ({...}? ACTION ) + // CollectSymbols.g:185:4: {...}? ACTION { if ( !((inContext("RULE ...")&&!inContext("SCOPE")&& !inContext("CATCH")&&!inContext("FINALLY")&&!inContext("AT"))) ) { if (state.backtracking>0) {state.failed=true; return ;} throw new FailedPredicateException(input, "ruleAction", "inContext(\"RULE ...\")&&!inContext(\"SCOPE\")&&\n\t\t !inContext(\"CATCH\")&&!inContext(\"FINALLY\")&&!inContext(\"AT\")"); } - ACTION10=(GrammarAST)match(input,ACTION,FOLLOW_ACTION_in_ruleAction524); if (state.failed) return ; + ACTION11=(GrammarAST)match(input,ACTION,FOLLOW_ACTION_in_ruleAction548); if (state.failed) return ; if ( state.backtracking==1 ) { - currentRule.alt[currentAlt].actions.add((ActionAST)ACTION10); - ((ActionAST)ACTION10).resolver = currentRule.alt[currentAlt]; + currentRule.alt[currentAlt].actions.add((ActionAST)ACTION11); + ((ActionAST)ACTION11).resolver = currentRule.alt[currentAlt]; } @@ -924,55 +1006,19 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter { // $ANTLR start "exceptionHandler" - // CollectSymbols.g:188:1: exceptionHandler : ^( CATCH ARG_ACTION ACTION ) ; + // CollectSymbols.g:194:1: exceptionHandler : ^( CATCH ARG_ACTION ACTION ) ; public final void exceptionHandler() throws RecognitionException { - GrammarAST ACTION11=null; - - try { - // CollectSymbols.g:189:2: ( ^( CATCH ARG_ACTION ACTION ) ) - // CollectSymbols.g:189:4: ^( CATCH ARG_ACTION ACTION ) - { - match(input,CATCH,FOLLOW_CATCH_in_exceptionHandler540); if (state.failed) return ; - - match(input, Token.DOWN, null); if (state.failed) return ; - match(input,ARG_ACTION,FOLLOW_ARG_ACTION_in_exceptionHandler542); if (state.failed) return ; - ACTION11=(GrammarAST)match(input,ACTION,FOLLOW_ACTION_in_exceptionHandler544); if (state.failed) return ; - - match(input, Token.UP, null); if (state.failed) return ; - if ( state.backtracking==1 ) { - - currentRule.exceptionActions.add((ActionAST)ACTION11); - ((ActionAST)ACTION11).resolver = currentRule; - - } - - } - - } - catch (RecognitionException re) { - reportError(re); - recover(input,re); - } - finally { - } - return ; - } - // $ANTLR end "exceptionHandler" - - - // $ANTLR start "finallyClause" - // CollectSymbols.g:196:1: finallyClause : ^( FINALLY ACTION ) ; - public final void finallyClause() throws RecognitionException { GrammarAST ACTION12=null; try { - // CollectSymbols.g:197:2: ( ^( FINALLY ACTION ) ) - // CollectSymbols.g:197:4: ^( FINALLY ACTION ) + // CollectSymbols.g:195:2: ( ^( CATCH ARG_ACTION ACTION ) ) + // CollectSymbols.g:195:4: ^( CATCH ARG_ACTION ACTION ) { - match(input,FINALLY,FOLLOW_FINALLY_in_finallyClause561); if (state.failed) return ; + match(input,CATCH,FOLLOW_CATCH_in_exceptionHandler564); if (state.failed) return ; match(input, Token.DOWN, null); if (state.failed) return ; - ACTION12=(GrammarAST)match(input,ACTION,FOLLOW_ACTION_in_finallyClause563); if (state.failed) return ; + match(input,ARG_ACTION,FOLLOW_ARG_ACTION_in_exceptionHandler566); if (state.failed) return ; + ACTION12=(GrammarAST)match(input,ACTION,FOLLOW_ACTION_in_exceptionHandler568); if (state.failed) return ; match(input, Token.UP, null); if (state.failed) return ; if ( state.backtracking==1 ) { @@ -993,27 +1039,63 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter { } return ; } + // $ANTLR end "exceptionHandler" + + + // $ANTLR start "finallyClause" + // CollectSymbols.g:202:1: finallyClause : ^( FINALLY ACTION ) ; + public final void finallyClause() throws RecognitionException { + GrammarAST ACTION13=null; + + try { + // CollectSymbols.g:203:2: ( ^( FINALLY ACTION ) ) + // CollectSymbols.g:203:4: ^( FINALLY ACTION ) + { + match(input,FINALLY,FOLLOW_FINALLY_in_finallyClause585); if (state.failed) return ; + + match(input, Token.DOWN, null); if (state.failed) return ; + ACTION13=(GrammarAST)match(input,ACTION,FOLLOW_ACTION_in_finallyClause587); if (state.failed) return ; + + match(input, Token.UP, null); if (state.failed) return ; + if ( state.backtracking==1 ) { + + currentRule.exceptionActions.add((ActionAST)ACTION13); + ((ActionAST)ACTION13).resolver = currentRule; + + } + + } + + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + } + finally { + } + return ; + } // $ANTLR end "finallyClause" // $ANTLR start "ruleArg" - // CollectSymbols.g:204:1: ruleArg : {...}? ARG_ACTION ; + // CollectSymbols.g:210:1: ruleArg : {...}? ARG_ACTION ; public final void ruleArg() throws RecognitionException { - GrammarAST ARG_ACTION13=null; + GrammarAST ARG_ACTION14=null; try { - // CollectSymbols.g:205:2: ({...}? ARG_ACTION ) - // CollectSymbols.g:205:4: {...}? ARG_ACTION + // CollectSymbols.g:211:2: ({...}? ARG_ACTION ) + // CollectSymbols.g:211:4: {...}? ARG_ACTION { if ( !((inContext("RULE"))) ) { if (state.backtracking>0) {state.failed=true; return ;} throw new FailedPredicateException(input, "ruleArg", "inContext(\"RULE\")"); } - ARG_ACTION13=(GrammarAST)match(input,ARG_ACTION,FOLLOW_ARG_ACTION_in_ruleArg583); if (state.failed) return ; + ARG_ACTION14=(GrammarAST)match(input,ARG_ACTION,FOLLOW_ARG_ACTION_in_ruleArg607); if (state.failed) return ; if ( state.backtracking==1 ) { - currentRule.args = ScopeParser.parseTypeList((ARG_ACTION13!=null?ARG_ACTION13.getText():null)); - currentRule.args.ast = ARG_ACTION13; + currentRule.args = ScopeParser.parseTypeList((ARG_ACTION14!=null?ARG_ACTION14.getText():null)); + currentRule.args.ast = ARG_ACTION14; } @@ -1032,24 +1114,24 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter { // $ANTLR start "ruleReturns" - // CollectSymbols.g:212:1: ruleReturns : ^( RETURNS ARG_ACTION ) ; + // CollectSymbols.g:218:1: ruleReturns : ^( RETURNS ARG_ACTION ) ; public final void ruleReturns() throws RecognitionException { - GrammarAST ARG_ACTION14=null; + GrammarAST ARG_ACTION15=null; try { - // CollectSymbols.g:213:2: ( ^( RETURNS ARG_ACTION ) ) - // CollectSymbols.g:213:4: ^( RETURNS ARG_ACTION ) + // CollectSymbols.g:219:2: ( ^( RETURNS ARG_ACTION ) ) + // CollectSymbols.g:219:4: ^( RETURNS ARG_ACTION ) { - match(input,RETURNS,FOLLOW_RETURNS_in_ruleReturns600); if (state.failed) return ; + match(input,RETURNS,FOLLOW_RETURNS_in_ruleReturns624); if (state.failed) return ; match(input, Token.DOWN, null); if (state.failed) return ; - ARG_ACTION14=(GrammarAST)match(input,ARG_ACTION,FOLLOW_ARG_ACTION_in_ruleReturns602); if (state.failed) return ; + ARG_ACTION15=(GrammarAST)match(input,ARG_ACTION,FOLLOW_ARG_ACTION_in_ruleReturns626); if (state.failed) return ; match(input, Token.UP, null); if (state.failed) return ; if ( state.backtracking==1 ) { - currentRule.retvals = ScopeParser.parseTypeList((ARG_ACTION14!=null?ARG_ACTION14.getText():null)); - currentRule.retvals.ast = ARG_ACTION14; + currentRule.retvals = ScopeParser.parseTypeList((ARG_ACTION15!=null?ARG_ACTION15.getText():null)); + currentRule.retvals.ast = ARG_ACTION15; } @@ -1068,40 +1150,40 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter { // $ANTLR start "ruleScopeSpec" - // CollectSymbols.g:220:1: ruleScopeSpec : {...}? ( ^( SCOPE ACTION ) | ^( SCOPE (ids+= ID )+ ) ) ; + // CollectSymbols.g:226:1: ruleScopeSpec : {...}? ( ^( SCOPE ACTION ) | ^( SCOPE (ids+= ID )+ ) ) ; public final void ruleScopeSpec() throws RecognitionException { - GrammarAST ACTION15=null; + GrammarAST ACTION16=null; GrammarAST ids=null; List list_ids=null; try { - // CollectSymbols.g:221:2: ({...}? ( ^( SCOPE ACTION ) | ^( SCOPE (ids+= ID )+ ) ) ) - // CollectSymbols.g:221:4: {...}? ( ^( SCOPE ACTION ) | ^( SCOPE (ids+= ID )+ ) ) + // CollectSymbols.g:227:2: ({...}? ( ^( SCOPE ACTION ) | ^( SCOPE (ids+= ID )+ ) ) ) + // CollectSymbols.g:227:4: {...}? ( ^( SCOPE ACTION ) | ^( SCOPE (ids+= ID )+ ) ) { if ( !((inContext("RULE"))) ) { if (state.backtracking>0) {state.failed=true; return ;} throw new FailedPredicateException(input, "ruleScopeSpec", "inContext(\"RULE\")"); } - // CollectSymbols.g:222:3: ( ^( SCOPE ACTION ) | ^( SCOPE (ids+= ID )+ ) ) - int alt10=2; - int LA10_0 = input.LA(1); + // CollectSymbols.g:228:3: ( ^( SCOPE ACTION ) | ^( SCOPE (ids+= ID )+ ) ) + int alt11=2; + int LA11_0 = input.LA(1); - if ( (LA10_0==SCOPE) ) { - int LA10_1 = input.LA(2); + if ( (LA11_0==SCOPE) ) { + int LA11_1 = input.LA(2); - if ( (LA10_1==DOWN) ) { - int LA10_2 = input.LA(3); + if ( (LA11_1==DOWN) ) { + int LA11_2 = input.LA(3); - if ( (LA10_2==ACTION) ) { - alt10=1; + if ( (LA11_2==ACTION) ) { + alt11=1; } - else if ( (LA10_2==ID) ) { - alt10=2; + else if ( (LA11_2==ID) ) { + alt11=2; } else { if (state.backtracking>0) {state.failed=true; return ;} NoViableAltException nvae = - new NoViableAltException("", 10, 2, input); + new NoViableAltException("", 11, 2, input); throw nvae; } @@ -1109,7 +1191,7 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter { else { if (state.backtracking>0) {state.failed=true; return ;} NoViableAltException nvae = - new NoViableAltException("", 10, 1, input); + new NoViableAltException("", 11, 1, input); throw nvae; } @@ -1117,53 +1199,53 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter { else { if (state.backtracking>0) {state.failed=true; return ;} NoViableAltException nvae = - new NoViableAltException("", 10, 0, input); + new NoViableAltException("", 11, 0, input); throw nvae; } - switch (alt10) { + switch (alt11) { case 1 : - // CollectSymbols.g:222:5: ^( SCOPE ACTION ) + // CollectSymbols.g:228:5: ^( SCOPE ACTION ) { - match(input,SCOPE,FOLLOW_SCOPE_in_ruleScopeSpec625); if (state.failed) return ; + match(input,SCOPE,FOLLOW_SCOPE_in_ruleScopeSpec649); if (state.failed) return ; match(input, Token.DOWN, null); if (state.failed) return ; - ACTION15=(GrammarAST)match(input,ACTION,FOLLOW_ACTION_in_ruleScopeSpec627); if (state.failed) return ; + ACTION16=(GrammarAST)match(input,ACTION,FOLLOW_ACTION_in_ruleScopeSpec651); if (state.failed) return ; match(input, Token.UP, null); if (state.failed) return ; if ( state.backtracking==1 ) { - currentRule.scope = ScopeParser.parseDynamicScope((ACTION15!=null?ACTION15.getText():null)); + currentRule.scope = ScopeParser.parseDynamicScope((ACTION16!=null?ACTION16.getText():null)); currentRule.scope.name = currentRule.name; - currentRule.scope.ast = ACTION15; + currentRule.scope.ast = ACTION16; } } break; case 2 : - // CollectSymbols.g:228:5: ^( SCOPE (ids+= ID )+ ) + // CollectSymbols.g:234:5: ^( SCOPE (ids+= ID )+ ) { - match(input,SCOPE,FOLLOW_SCOPE_in_ruleScopeSpec640); if (state.failed) return ; + match(input,SCOPE,FOLLOW_SCOPE_in_ruleScopeSpec664); if (state.failed) return ; match(input, Token.DOWN, null); if (state.failed) return ; - // CollectSymbols.g:228:16: (ids+= ID )+ - int cnt9=0; - loop9: + // CollectSymbols.g:234:16: (ids+= ID )+ + int cnt10=0; + loop10: do { - int alt9=2; - int LA9_0 = input.LA(1); + int alt10=2; + int LA10_0 = input.LA(1); - if ( (LA9_0==ID) ) { - alt9=1; + if ( (LA10_0==ID) ) { + alt10=1; } - switch (alt9) { + switch (alt10) { case 1 : - // CollectSymbols.g:228:16: ids+= ID + // CollectSymbols.g:234:16: ids+= ID { - ids=(GrammarAST)match(input,ID,FOLLOW_ID_in_ruleScopeSpec644); if (state.failed) return ; + ids=(GrammarAST)match(input,ID,FOLLOW_ID_in_ruleScopeSpec668); if (state.failed) return ; if (list_ids==null) list_ids=new ArrayList(); list_ids.add(ids); @@ -1172,13 +1254,13 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter { break; default : - if ( cnt9 >= 1 ) break loop9; + if ( cnt10 >= 1 ) break loop10; if (state.backtracking>0) {state.failed=true; return ;} EarlyExitException eee = - new EarlyExitException(9, input); + new EarlyExitException(10, input); throw eee; } - cnt9++; + cnt10++; } while (true); @@ -1210,14 +1292,14 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter { }; // $ANTLR start "rewriteElement" - // CollectSymbols.g:232:1: rewriteElement : {...}? ( TOKEN_REF | RULE_REF | STRING_LITERAL | LABEL ) ; + // CollectSymbols.g:238:1: rewriteElement : {...}? ( TOKEN_REF | RULE_REF | STRING_LITERAL | LABEL ) ; public final CollectSymbols.rewriteElement_return rewriteElement() throws RecognitionException { CollectSymbols.rewriteElement_return retval = new CollectSymbols.rewriteElement_return(); retval.start = input.LT(1); try { - // CollectSymbols.g:234:2: ({...}? ( TOKEN_REF | RULE_REF | STRING_LITERAL | LABEL ) ) - // CollectSymbols.g:235:6: {...}? ( TOKEN_REF | RULE_REF | STRING_LITERAL | LABEL ) + // CollectSymbols.g:240:2: ({...}? ( TOKEN_REF | RULE_REF | STRING_LITERAL | LABEL ) ) + // CollectSymbols.g:241:6: {...}? ( TOKEN_REF | RULE_REF | STRING_LITERAL | LABEL ) { if ( !((inContext("RESULT ..."))) ) { if (state.backtracking>0) {state.failed=true; return retval;} @@ -1254,7 +1336,7 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter { }; // $ANTLR start "labeledElement" - // CollectSymbols.g:239:1: labeledElement : {...}? ( ^( ASSIGN id= ID e= . ) | ^( PLUS_ASSIGN id= ID e= . ) ) ; + // CollectSymbols.g:245:1: labeledElement : {...}? ( ^( ASSIGN id= ID e= . ) | ^( PLUS_ASSIGN id= ID e= . ) ) ; public final CollectSymbols.labeledElement_return labeledElement() throws RecognitionException { CollectSymbols.labeledElement_return retval = new CollectSymbols.labeledElement_return(); retval.start = input.LT(1); @@ -1263,38 +1345,38 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter { GrammarAST e=null; try { - // CollectSymbols.g:245:2: ({...}? ( ^( ASSIGN id= ID e= . ) | ^( PLUS_ASSIGN id= ID e= . ) ) ) - // CollectSymbols.g:245:4: {...}? ( ^( ASSIGN id= ID e= . ) | ^( PLUS_ASSIGN id= ID e= . ) ) + // CollectSymbols.g:251:2: ({...}? ( ^( ASSIGN id= ID e= . ) | ^( PLUS_ASSIGN id= ID e= . ) ) ) + // CollectSymbols.g:251:4: {...}? ( ^( ASSIGN id= ID e= . ) | ^( PLUS_ASSIGN id= ID e= . ) ) { if ( !((inContext("RULE ..."))) ) { if (state.backtracking>0) {state.failed=true; return retval;} throw new FailedPredicateException(input, "labeledElement", "inContext(\"RULE ...\")"); } - // CollectSymbols.g:246:3: ( ^( ASSIGN id= ID e= . ) | ^( PLUS_ASSIGN id= ID e= . ) ) - int alt11=2; - int LA11_0 = input.LA(1); + // CollectSymbols.g:252:3: ( ^( ASSIGN id= ID e= . ) | ^( PLUS_ASSIGN id= ID e= . ) ) + int alt12=2; + int LA12_0 = input.LA(1); - if ( (LA11_0==ASSIGN) ) { - alt11=1; + if ( (LA12_0==ASSIGN) ) { + alt12=1; } - else if ( (LA11_0==PLUS_ASSIGN) ) { - alt11=2; + else if ( (LA12_0==PLUS_ASSIGN) ) { + alt12=2; } else { if (state.backtracking>0) {state.failed=true; return retval;} NoViableAltException nvae = - new NoViableAltException("", 11, 0, input); + new NoViableAltException("", 12, 0, input); throw nvae; } - switch (alt11) { + switch (alt12) { case 1 : - // CollectSymbols.g:246:5: ^( ASSIGN id= ID e= . ) + // CollectSymbols.g:252:5: ^( ASSIGN id= ID e= . ) { - match(input,ASSIGN,FOLLOW_ASSIGN_in_labeledElement708); if (state.failed) return retval; + match(input,ASSIGN,FOLLOW_ASSIGN_in_labeledElement732); if (state.failed) return retval; match(input, Token.DOWN, null); if (state.failed) return retval; - id=(GrammarAST)match(input,ID,FOLLOW_ID_in_labeledElement712); if (state.failed) return retval; + id=(GrammarAST)match(input,ID,FOLLOW_ID_in_labeledElement736); if (state.failed) return retval; e=(GrammarAST)input.LT(1); matchAny(input); if (state.failed) return retval; @@ -1303,12 +1385,12 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter { } break; case 2 : - // CollectSymbols.g:247:5: ^( PLUS_ASSIGN id= ID e= . ) + // CollectSymbols.g:253:5: ^( PLUS_ASSIGN id= ID e= . ) { - match(input,PLUS_ASSIGN,FOLLOW_PLUS_ASSIGN_in_labeledElement724); if (state.failed) return retval; + match(input,PLUS_ASSIGN,FOLLOW_PLUS_ASSIGN_in_labeledElement748); if (state.failed) return retval; match(input, Token.DOWN, null); if (state.failed) return retval; - id=(GrammarAST)match(input,ID,FOLLOW_ID_in_labeledElement728); if (state.failed) return retval; + id=(GrammarAST)match(input,ID,FOLLOW_ID_in_labeledElement752); if (state.failed) return retval; e=(GrammarAST)input.LT(1); matchAny(input); if (state.failed) return retval; @@ -1344,47 +1426,47 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter { }; // $ANTLR start "terminal" - // CollectSymbols.g:251:1: terminal : ({...}? STRING_LITERAL | TOKEN_REF ); + // CollectSymbols.g:257:1: terminal : ({...}? STRING_LITERAL | TOKEN_REF ); public final CollectSymbols.terminal_return terminal() throws RecognitionException { CollectSymbols.terminal_return retval = new CollectSymbols.terminal_return(); retval.start = input.LT(1); - GrammarAST STRING_LITERAL16=null; - GrammarAST TOKEN_REF17=null; + GrammarAST STRING_LITERAL17=null; + GrammarAST TOKEN_REF18=null; try { - // CollectSymbols.g:252:5: ({...}? STRING_LITERAL | TOKEN_REF ) - int alt12=2; - int LA12_0 = input.LA(1); + // CollectSymbols.g:258:5: ({...}? STRING_LITERAL | TOKEN_REF ) + int alt13=2; + int LA13_0 = input.LA(1); - if ( (LA12_0==STRING_LITERAL) ) { - alt12=1; + if ( (LA13_0==STRING_LITERAL) ) { + alt13=1; } - else if ( (LA12_0==TOKEN_REF) ) { - alt12=2; + else if ( (LA13_0==TOKEN_REF) ) { + alt13=2; } else { if (state.backtracking>0) {state.failed=true; return retval;} NoViableAltException nvae = - new NoViableAltException("", 12, 0, input); + new NoViableAltException("", 13, 0, input); throw nvae; } - switch (alt12) { + switch (alt13) { case 1 : - // CollectSymbols.g:252:7: {...}? STRING_LITERAL + // CollectSymbols.g:258:7: {...}? STRING_LITERAL { if ( !((!inContext("TOKENS ASSIGN"))) ) { if (state.backtracking>0) {state.failed=true; return retval;} throw new FailedPredicateException(input, "terminal", "!inContext(\"TOKENS ASSIGN\")"); } - STRING_LITERAL16=(GrammarAST)match(input,STRING_LITERAL,FOLLOW_STRING_LITERAL_in_terminal754); if (state.failed) return retval; + STRING_LITERAL17=(GrammarAST)match(input,STRING_LITERAL,FOLLOW_STRING_LITERAL_in_terminal778); if (state.failed) return retval; if ( state.backtracking==1 ) { terminals.add(((GrammarAST)retval.start)); - strings.add((STRING_LITERAL16!=null?STRING_LITERAL16.getText():null)); + strings.add((STRING_LITERAL17!=null?STRING_LITERAL17.getText():null)); if ( currentRule!=null ) { - currentRule.alt[currentAlt].tokenRefs.map((STRING_LITERAL16!=null?STRING_LITERAL16.getText():null), STRING_LITERAL16); + currentRule.alt[currentAlt].tokenRefs.map((STRING_LITERAL17!=null?STRING_LITERAL17.getText():null), STRING_LITERAL17); } } @@ -1392,15 +1474,15 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter { } break; case 2 : - // CollectSymbols.g:260:7: TOKEN_REF + // CollectSymbols.g:266:7: TOKEN_REF { - TOKEN_REF17=(GrammarAST)match(input,TOKEN_REF,FOLLOW_TOKEN_REF_in_terminal769); if (state.failed) return retval; + TOKEN_REF18=(GrammarAST)match(input,TOKEN_REF,FOLLOW_TOKEN_REF_in_terminal793); if (state.failed) return retval; if ( state.backtracking==1 ) { - terminals.add(TOKEN_REF17); - tokenIDRefs.add(TOKEN_REF17); + terminals.add(TOKEN_REF18); + tokenIDRefs.add(TOKEN_REF18); if ( currentRule!=null ) { - currentRule.alt[currentAlt].tokenRefs.map((TOKEN_REF17!=null?TOKEN_REF17.getText():null), TOKEN_REF17); + currentRule.alt[currentAlt].tokenRefs.map((TOKEN_REF18!=null?TOKEN_REF18.getText():null), TOKEN_REF18); } } @@ -1422,31 +1504,31 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter { // $ANTLR start "ruleref" - // CollectSymbols.g:270:1: ruleref : ({...}?r= RULE_REF | r= RULE_REF ) ; + // CollectSymbols.g:276:1: ruleref : ({...}?r= RULE_REF | r= RULE_REF ) ; public final void ruleref() throws RecognitionException { GrammarAST r=null; try { - // CollectSymbols.g:272:5: ( ({...}?r= RULE_REF | r= RULE_REF ) ) - // CollectSymbols.g:272:7: ({...}?r= RULE_REF | r= RULE_REF ) + // CollectSymbols.g:278:5: ( ({...}?r= RULE_REF | r= RULE_REF ) ) + // CollectSymbols.g:278:7: ({...}?r= RULE_REF | r= RULE_REF ) { - // CollectSymbols.g:272:7: ({...}?r= RULE_REF | r= RULE_REF ) - int alt13=2; - int LA13_0 = input.LA(1); + // CollectSymbols.g:278:7: ({...}?r= RULE_REF | r= RULE_REF ) + int alt14=2; + int LA14_0 = input.LA(1); - if ( (LA13_0==RULE_REF) ) { - int LA13_1 = input.LA(2); + if ( (LA14_0==RULE_REF) ) { + int LA14_1 = input.LA(2); if ( ((inContext("DOT ..."))) ) { - alt13=1; + alt14=1; } else if ( (true) ) { - alt13=2; + alt14=2; } else { if (state.backtracking>0) {state.failed=true; return ;} NoViableAltException nvae = - new NoViableAltException("", 13, 1, input); + new NoViableAltException("", 14, 1, input); throw nvae; } @@ -1454,19 +1536,19 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter { else { if (state.backtracking>0) {state.failed=true; return ;} NoViableAltException nvae = - new NoViableAltException("", 13, 0, input); + new NoViableAltException("", 14, 0, input); throw nvae; } - switch (alt13) { + switch (alt14) { case 1 : - // CollectSymbols.g:272:9: {...}?r= RULE_REF + // CollectSymbols.g:278:9: {...}?r= RULE_REF { if ( !((inContext("DOT ..."))) ) { if (state.backtracking>0) {state.failed=true; return ;} throw new FailedPredicateException(input, "ruleref", "inContext(\"DOT ...\")"); } - r=(GrammarAST)match(input,RULE_REF,FOLLOW_RULE_REF_in_ruleref806); if (state.failed) return ; + r=(GrammarAST)match(input,RULE_REF,FOLLOW_RULE_REF_in_ruleref830); if (state.failed) return ; if ( state.backtracking==1 ) { qualifiedRulerefs.add((GrammarAST)r.getParent()); } @@ -1474,9 +1556,9 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter { } break; case 2 : - // CollectSymbols.g:274:8: r= RULE_REF + // CollectSymbols.g:280:8: r= RULE_REF { - r=(GrammarAST)match(input,RULE_REF,FOLLOW_RULE_REF_in_ruleref819); if (state.failed) return ; + r=(GrammarAST)match(input,RULE_REF,FOLLOW_RULE_REF_in_ruleref843); if (state.failed) return ; } break; @@ -1509,31 +1591,32 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter { protected DFA1 dfa1 = new DFA1(this); - protected DFA4 dfa4 = new DFA4(this); + protected DFA5 dfa5 = new DFA5(this); static final String DFA1_eotS = - "\41\uffff"; + "\42\uffff"; static final String DFA1_eofS = - "\41\uffff"; + "\42\uffff"; static final String DFA1_minS = - "\1\16\3\2\4\uffff\3\0\6\uffff\1\20\2\127\2\uffff\1\3\1\uffff\1\20"+ + "\1\16\3\2\5\uffff\3\0\6\uffff\1\20\2\127\2\uffff\1\3\1\uffff\1\20"+ "\1\4\1\uffff\1\3\1\uffff\1\2\2\0\1\uffff"; static final String DFA1_maxS = - "\1\146\3\2\4\uffff\3\0\6\uffff\3\127\2\uffff\1\127\1\uffff\1\127"+ + "\1\146\3\2\5\uffff\3\0\6\uffff\3\127\2\uffff\1\127\1\uffff\1\127"+ "\1\146\1\uffff\1\3\1\uffff\1\3\2\0\1\uffff"; static final String DFA1_acceptS = - "\4\uffff\1\3\1\4\1\5\1\6\3\uffff\1\12\1\14\1\15\1\16\1\17\1\20\3"+ - "\uffff\1\11\1\13\1\uffff\1\10\2\uffff\1\1\1\uffff\1\2\3\uffff\1"+ - "\7"; + "\4\uffff\1\3\1\4\1\5\1\6\1\7\3\uffff\1\13\1\15\1\16\1\17\1\20\1"+ + "\21\3\uffff\1\12\1\14\1\uffff\1\11\2\uffff\1\1\1\uffff\1\2\3\uffff"+ + "\1\10"; static final String DFA1_specialS = - "\10\uffff\1\4\1\2\1\0\23\uffff\1\1\1\3\1\uffff}>"; + "\11\uffff\1\3\1\4\1\1\23\uffff\1\2\1\0\1\uffff}>"; static final String[] DFA1_transitionS = { - "\1\6\1\uffff\1\16\4\uffff\1\1\11\uffff\1\7\1\uffff\1\20\1\17"+ - "\13\uffff\1\3\4\uffff\1\14\10\uffff\1\2\2\uffff\1\12\1\10\3"+ - "\uffff\1\11\4\uffff\1\5\13\uffff\1\15\1\uffff\1\4\5\uffff\1"+ - "\13\10\uffff\1\15", - "\1\21", + "\1\7\1\uffff\1\17\4\uffff\1\1\11\uffff\1\10\1\uffff\1\21\1\20"+ + "\1\uffff\1\5\11\uffff\1\3\4\uffff\1\15\10\uffff\1\2\2\uffff"+ + "\1\13\1\11\3\uffff\1\12\4\uffff\1\6\13\uffff\1\16\1\uffff\1"+ + "\4\5\uffff\1\14\10\uffff\1\16", "\1\22", "\1\23", + "\1\24", + "", "", "", "", @@ -1547,19 +1630,19 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter { "", "", "", - "\1\27\106\uffff\1\26", - "\1\30", + "\1\30\106\uffff\1\27", "\1\31", + "\1\32", "", "", - "\1\27\14\uffff\1\32\106\uffff\1\27", + "\1\30\14\uffff\1\33\106\uffff\1\30", "", - "\1\33\106\uffff\1\34", - "\100\14\1\35\42\14", + "\1\34\106\uffff\1\35", + "\100\15\1\36\42\15", "", - "\1\36", + "\1\37", "", - "\1\14\1\37", + "\1\15\1\40", "\1\uffff", "\1\uffff", "" @@ -1595,85 +1678,85 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter { this.transition = DFA1_transition; } public String getDescription() { - return "93:1: topdown : ( globalScope | globalNamedAction | tokensSection | rule | ruleArg | ruleReturns | ruleNamedAction | ruleScopeSpec | ruleref | rewriteElement | terminal | labeledElement | setAlt | ruleAction | finallyClause | exceptionHandler );"; + return "94:1: topdown : ( globalScope | globalNamedAction | tokensSection | mode | rule | ruleArg | ruleReturns | ruleNamedAction | ruleScopeSpec | ruleref | rewriteElement | terminal | labeledElement | setAlt | ruleAction | finallyClause | exceptionHandler );"; } public int specialStateTransition(int s, IntStream _input) throws NoViableAltException { TreeNodeStream input = (TreeNodeStream)_input; int _s = s; switch ( s ) { case 0 : - int LA1_10 = input.LA(1); + int LA1_32 = input.LA(1); - int index1_10 = input.index(); + int index1_32 = input.index(); input.rewind(); s = -1; - if ( ((inContext("RESULT ..."))) ) {s = 11;} + if ( ((inContext("TOKENS"))) ) {s = 4;} - else if ( (true) ) {s = 21;} + else if ( ((inContext("RULE ..."))) ) {s = 13;} - input.seek(index1_10); + input.seek(index1_32); if ( s>=0 ) return s; break; case 1 : - int LA1_30 = input.LA(1); + int LA1_11 = input.LA(1); - int index1_30 = input.index(); + int index1_11 = input.index(); input.rewind(); s = -1; - if ( ((inContext("GRAMMAR"))) ) {s = 28;} + if ( ((inContext("RESULT ..."))) ) {s = 12;} - else if ( ((inContext("RULE"))) ) {s = 32;} + else if ( (true) ) {s = 22;} - input.seek(index1_30); + input.seek(index1_11); if ( s>=0 ) return s; break; case 2 : - int LA1_9 = input.LA(1); - - - int index1_9 = input.index(); - input.rewind(); - s = -1; - if ( ((inContext("RESULT ..."))) ) {s = 11;} - - else if ( ((!inContext("TOKENS ASSIGN"))) ) {s = 21;} - - - input.seek(index1_9); - if ( s>=0 ) return s; - break; - case 3 : int LA1_31 = input.LA(1); int index1_31 = input.index(); input.rewind(); s = -1; - if ( ((inContext("TOKENS"))) ) {s = 4;} + if ( ((inContext("GRAMMAR"))) ) {s = 29;} - else if ( ((inContext("RULE ..."))) ) {s = 12;} + else if ( ((inContext("RULE"))) ) {s = 33;} input.seek(index1_31); if ( s>=0 ) return s; break; - case 4 : - int LA1_8 = input.LA(1); + case 3 : + int LA1_9 = input.LA(1); - int index1_8 = input.index(); + int index1_9 = input.index(); input.rewind(); s = -1; - if ( (!(((inContext("RESULT ..."))))) ) {s = 20;} + if ( (!(((inContext("RESULT ..."))))) ) {s = 21;} - else if ( ((inContext("RESULT ..."))) ) {s = 11;} + else if ( ((inContext("RESULT ..."))) ) {s = 12;} - input.seek(index1_8); + input.seek(index1_9); + if ( s>=0 ) return s; + break; + case 4 : + int LA1_10 = input.LA(1); + + + int index1_10 = input.index(); + input.rewind(); + s = -1; + if ( ((inContext("RESULT ..."))) ) {s = 12;} + + else if ( ((!inContext("TOKENS ASSIGN"))) ) {s = 22;} + + + input.seek(index1_10); if ( s>=0 ) return s; break; } @@ -1684,19 +1767,19 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter { throw nvae; } } - static final String DFA4_eotS = + static final String DFA5_eotS = "\26\uffff"; - static final String DFA4_eofS = + static final String DFA5_eofS = "\26\uffff"; - static final String DFA4_minS = + static final String DFA5_minS = "\1\4\2\2\1\uffff\2\4\2\2\1\uffff\1\3\3\2\3\4\2\2\2\3\1\uffff\1\3"; - static final String DFA4_maxS = + static final String DFA5_maxS = "\3\146\1\uffff\4\146\1\uffff\13\146\1\uffff\1\146"; - static final String DFA4_acceptS = + static final String DFA5_acceptS = "\3\uffff\1\1\4\uffff\1\2\13\uffff\1\2\1\uffff"; - static final String DFA4_specialS = + static final String DFA5_specialS = "\26\uffff}>"; - static final String[] DFA4_transitionS = { + static final String[] DFA5_transitionS = { "\107\3\1\1\1\3\1\2\31\3", "\1\4\1\uffff\143\3", "\1\5\1\uffff\143\3", @@ -1721,37 +1804,37 @@ public class CollectSymbols extends org.antlr.v4.runtime.tree.TreeFilter { "\1\24\107\14\1\12\1\14\1\13\31\14" }; - static final short[] DFA4_eot = DFA.unpackEncodedString(DFA4_eotS); - static final short[] DFA4_eof = DFA.unpackEncodedString(DFA4_eofS); - static final char[] DFA4_min = DFA.unpackEncodedStringToUnsignedChars(DFA4_minS); - static final char[] DFA4_max = DFA.unpackEncodedStringToUnsignedChars(DFA4_maxS); - static final short[] DFA4_accept = DFA.unpackEncodedString(DFA4_acceptS); - static final short[] DFA4_special = DFA.unpackEncodedString(DFA4_specialS); - static final short[][] DFA4_transition; + static final short[] DFA5_eot = DFA.unpackEncodedString(DFA5_eotS); + static final short[] DFA5_eof = DFA.unpackEncodedString(DFA5_eofS); + static final char[] DFA5_min = DFA.unpackEncodedStringToUnsignedChars(DFA5_minS); + static final char[] DFA5_max = DFA.unpackEncodedStringToUnsignedChars(DFA5_maxS); + static final short[] DFA5_accept = DFA.unpackEncodedString(DFA5_acceptS); + static final short[] DFA5_special = DFA.unpackEncodedString(DFA5_specialS); + static final short[][] DFA5_transition; static { - int numStates = DFA4_transitionS.length; - DFA4_transition = new short[numStates][]; + int numStates = DFA5_transitionS.length; + DFA5_transition = new short[numStates][]; for (int i=0; i tokenNameToTypeMap = new LinkedHashMap(); @@ -108,7 +108,7 @@ public class Grammar implements AttributeResolver { this.tool = tool; this.ast = ast; this.name = ((GrammarAST)ast.getChild(0)).getText(); - initTokenSymbolTables(); + initTokenSymbolTables(); } /** For testing */ @@ -192,7 +192,7 @@ public class Grammar implements AttributeResolver { GrammarAST root = tool.load(importedGrammarName+".g"); if ( root instanceof GrammarASTErrorNode ) return; // came back as error node GrammarRootAST ast = (GrammarRootAST)root; - Grammar g = new Grammar(tool, ast); + Grammar g = tool.createGrammar(ast); g.fileName = importedGrammarName+".g"; g.parent = this; importedGrammars.add(g); @@ -314,8 +314,7 @@ public class Grammar implements AttributeResolver { buf.append(name); qualifiedName = buf.toString(); } - if ( getType()==ANTLRParser.COMBINED || - (getType()==ANTLRParser.LEXER && implicitLexer!=null) ) + if ( isCombined() || (isLexer() && implicitLexer!=null) ) { suffix = Grammar.getGrammarTypeToFileNameSuffix(getType()); } @@ -356,7 +355,7 @@ public class Grammar implements AttributeResolver { String tokenName = null; int index=0; // inside any target's char range and is lexer grammar? - if ( getType()==ANTLRParser.LEXER && + if ( isLexer() && ttype >= Label.MIN_CHAR_VALUE && ttype <= Label.MAX_CHAR_VALUE ) { return Target.getANTLRCharLiteralForChar(ttype); @@ -401,7 +400,7 @@ public class Grammar implements AttributeResolver { /** Return a set of all possible token or char types for this grammar */ public IntSet getTokenTypes() { - if ( getType()==ANTLRParser.LEXER ) { + if ( isLexer() ) { return getAllCharValues(); } return IntervalSet.of(Token.MIN_TOKEN_TYPE, getMaxTokenType()); @@ -513,6 +512,11 @@ public class Grammar implements AttributeResolver { return 0; } + public boolean isLexer() { return getType()==ANTLRParser.LEXER; } + public boolean isParser() { return getType()==ANTLRParser.PARSER; } + public boolean isTreeGrammar() { return getType()==ANTLRParser.TREE; } + public boolean isCombined() { return getType()==ANTLRParser.COMBINED; } + public String getTypeString() { if ( ast==null ) return null; return ANTLRParser.tokenNames[getType()].toLowerCase(); diff --git a/tool/src/org/antlr/v4/tool/LabelElementPair.java b/tool/src/org/antlr/v4/tool/LabelElementPair.java index e0b177f75..e76171e35 100644 --- a/tool/src/org/antlr/v4/tool/LabelElementPair.java +++ b/tool/src/org/antlr/v4/tool/LabelElementPair.java @@ -29,12 +29,12 @@ public class LabelElementPair { } // now reset if lexer and string - if ( g.getType() == ANTLRParser.LEXER ) { + if ( g.isLexer() ) { if ( element.getFirstDescendantWithType(ANTLRParser.STRING_LITERAL)!=null ) { if ( labelOp==ANTLRParser.ASSIGN ) type = LabelType.LEXER_STRING_LABEL; } } - else if ( g.getType() == ANTLRParser.TREE ) { + else if ( g.isTreeGrammar() ) { if ( element.getFirstDescendantWithType(ANTLRParser.WILDCARD)!=null ) { if ( labelOp==ANTLRParser.ASSIGN ) type = LabelType.WILDCARD_TREE_LABEL; else type = LabelType.WILDCARD_TREE_LIST_LABEL; diff --git a/tool/src/org/antlr/v4/tool/LexerGrammar.java b/tool/src/org/antlr/v4/tool/LexerGrammar.java new file mode 100644 index 000000000..6bd8ccac4 --- /dev/null +++ b/tool/src/org/antlr/v4/tool/LexerGrammar.java @@ -0,0 +1,35 @@ +package org.antlr.v4.tool; + +import org.antlr.misc.MultiMap; +import org.antlr.runtime.RecognitionException; +import org.antlr.v4.Tool; + +/** */ +public class LexerGrammar extends Grammar { + public static final String DEFAULT_MODE_NAME = "DEFAULT_MODE"; + + public MultiMap modes = new MultiMap(); + //public Map modeToDecision = new HashMap(); + + public LexerGrammar(Tool tool, GrammarRootAST ast) { + super(tool, ast); + } + + public LexerGrammar(String grammarText) throws RecognitionException { + super(grammarText); + } + + public LexerGrammar(String grammarText, ANTLRToolListener listener) throws RecognitionException { + super(grammarText, listener); + } + + public LexerGrammar(String fileName, String grammarText, ANTLRToolListener listener) throws RecognitionException { + super(fileName, grammarText, listener); + } + + @Override + public void defineRule(Rule r) { + super.defineRule(r); + modes.map(r.mode, r); + } +} diff --git a/tool/src/org/antlr/v4/tool/Rule.java b/tool/src/org/antlr/v4/tool/Rule.java index a251fe626..33e819a8e 100644 --- a/tool/src/org/antlr/v4/tool/Rule.java +++ b/tool/src/org/antlr/v4/tool/Rule.java @@ -46,9 +46,14 @@ public class Rule implements AttributeResolver { public AttributeDict args; public AttributeDict retvals; public AttributeDict scope; // scope { int i; } - /** A list of scope names used by this rule */ + + /** A list of scope names used by this rule */ public List useScopes; - public Grammar g; + + public Grammar g; + + /** If we're in a lexer grammar, we might be in a mode */ + public String mode; /** Map a name to an action for this rule like @init {...}. * The code generator will use this to fill holes in the rule template. diff --git a/tool/test/org/antlr/v4/test/BaseTest.java b/tool/test/org/antlr/v4/test/BaseTest.java index ae723b0c9..46c3de5d9 100644 --- a/tool/test/org/antlr/v4/test/BaseTest.java +++ b/tool/test/org/antlr/v4/test/BaseTest.java @@ -37,12 +37,8 @@ import org.antlr.v4.analysis.DFAMinimizer; import org.antlr.v4.analysis.LexerNFAToDFAConverter; import org.antlr.v4.analysis.PredictionDFAFactory; import org.antlr.v4.automata.*; -import org.antlr.v4.parse.ANTLRParser; import org.antlr.v4.semantics.SemanticPipeline; -import org.antlr.v4.tool.AmbiguityMessage; -import org.antlr.v4.tool.Grammar; -import org.antlr.v4.tool.Message; -import org.antlr.v4.tool.UnreachableAltsMessage; +import org.antlr.v4.tool.*; import org.junit.After; import org.junit.Assert; import org.junit.Before; @@ -119,7 +115,7 @@ public abstract class BaseTest { } ParserNFAFactory f = new ParserNFAFactory(g); - if ( g.getType()== ANTLRParser.LEXER ) f = new LexerNFAFactory(g); + if ( g.isLexer() ) f = new LexerNFAFactory((LexerGrammar)g); return f.createNFA(); } @@ -179,13 +175,19 @@ public abstract class BaseTest { List checkLexerDFA(String gtext, String expecting) throws Exception + { + return checkLexerDFA(gtext, LexerGrammar.DEFAULT_MODE_NAME, expecting); + } + + List checkLexerDFA(String gtext, String modeName, String expecting) + throws Exception { ErrorQueue equeue = new ErrorQueue(); - Grammar g = new Grammar(gtext, equeue); + LexerGrammar g = new LexerGrammar(gtext, equeue); g.nfa = createNFA(g); LexerNFAToDFAConverter conv = new LexerNFAToDFAConverter(g); - DFA dfa = conv.createDFA(); - g.setLookaheadDFA(0, dfa); // only one decision + DFA dfa = conv.createDFA(modeName); + g.setLookaheadDFA(0, dfa); // only one decision to worry about String result = null; if ( dfa!=null ) result = dfa.toString(); diff --git a/tool/test/org/antlr/v4/test/TestLexerDFAConstruction.java b/tool/test/org/antlr/v4/test/TestLexerDFAConstruction.java index 22e64c381..c3d92311f 100644 --- a/tool/test/org/antlr/v4/test/TestLexerDFAConstruction.java +++ b/tool/test/org/antlr/v4/test/TestLexerDFAConstruction.java @@ -68,25 +68,20 @@ public class TestLexerDFAConstruction extends BaseTest { checkLexerDFA(g, expecting); } - @Test public void testAplusNonGreedy() throws Exception { + @Test public void testMode() throws Exception { String g = - "lexer grammar t;\n"+ - "A : (options {greedy=false;}:'0'..'9')+ '000' ;\n"; + "lexer grammar L;\n"+ + "A : 'a' ;\n" + + "X : 'x' ;\n" + + "mode FOO;\n" + + "B : 'b' ;\n" + + "C : 'c' ;\n"; String expecting = - "\n"; - checkLexerDFA(g, expecting); + "s0-'b'->:s1=> B\n" + + "s0-'c'->:s2=> C\n"; + checkLexerDFA(g, "FOO", expecting); } - @Test public void testDotNonGreedy() throws Exception { - String g = - "lexer grammar t;\n"+ - "A : (options {greedy=false;}:.)+ '000' ;\n"; - String expecting = - "\n"; - checkLexerDFA(g, expecting); - } - - public void _template() throws Exception { String g = diff --git a/tool/test/org/antlr/v4/test/TestNFAConstruction.java b/tool/test/org/antlr/v4/test/TestNFAConstruction.java index fcdb27974..d206cb38a 100644 --- a/tool/test/org/antlr/v4/test/TestNFAConstruction.java +++ b/tool/test/org/antlr/v4/test/TestNFAConstruction.java @@ -2,9 +2,9 @@ package org.antlr.v4.test; import org.antlr.v4.Tool; import org.antlr.v4.automata.*; -import org.antlr.v4.parse.ANTLRParser; import org.antlr.v4.semantics.SemanticPipeline; import org.antlr.v4.tool.Grammar; +import org.antlr.v4.tool.LexerGrammar; import org.antlr.v4.tool.Rule; import org.junit.Test; @@ -56,7 +56,7 @@ public class TestNFAConstruction extends BaseTest { } @Test public void testRange() throws Exception { - Grammar g = new Grammar( + LexerGrammar g = new LexerGrammar( "lexer grammar P;\n"+ "A : 'a'..'c' ;" ); @@ -64,11 +64,11 @@ public class TestNFAConstruction extends BaseTest { "RuleStart_A_1->s3\n" + "s3-'a'..'c'->s4\n" + "s4->RuleStop_A_2\n"; - checkRule(g, "A", expecting); + checkTokensRule(g, "A", expecting); } @Test public void testRangeOrRange() throws Exception { - Grammar g = new Grammar( + LexerGrammar g = new LexerGrammar( "lexer grammar P;\n"+ "A : ('a'..'c' 'h' | 'q' 'j'..'l') ;" ); @@ -85,7 +85,7 @@ public class TestNFAConstruction extends BaseTest { "s6->BlockEnd_12\n" + "s10->BlockEnd_12\n" + "BlockEnd_12->RuleStop_A_2\n"; - checkRule(g, "A", expecting); + checkTokensRule(g, "A", expecting); } @Test public void testStringLiteralInParser() throws Exception { @@ -269,21 +269,6 @@ public class TestNFAConstruction extends BaseTest { checkRule(g, "a", expecting); } - @Test public void testAplusNonGreedy() throws Exception { - Grammar g = new Grammar( - "lexer grammar t;\n"+ - "A : (options {greedy=false;}:'0'..'9')+ ;\n"); - String expecting = - "RuleStart_A_1->PlusBlockStart_5\n" + - "PlusBlockStart_5->s3\n" + - "s3-'0'..'9'->s4\n" + - "s4->LoopBack_6\n" + - "LoopBack_6->BlockEnd_7\n" + - "LoopBack_6->s3\n" + - "BlockEnd_7->RuleStop_A_2\n"; - checkRule(g, "A", expecting); - } - @Test public void testAorBorEmptyPlus() throws Exception { Grammar g = new Grammar( "parser grammar P;\n"+ @@ -871,6 +856,75 @@ public class TestNFAConstruction extends BaseTest { checkRule(g, "a", expecting); } */ + + @Test public void testDefaultMode() throws Exception { + LexerGrammar g = new LexerGrammar( + "lexer grammar L;\n"+ + "A : 'a' ;\n" + + "X : 'x' ;\n" + + "mode FOO;\n" + + "B : 'b' ;\n" + + "C : 'c' ;\n"); + String expecting = + "BlockStart_0->RuleStart_A_2\n" + + "BlockStart_0->RuleStart_X_4\n" + + "RuleStart_A_2->s10\n" + + "RuleStart_X_4->s12\n" + + "s10-'a'->s11\n" + + "s12-'x'->s13\n" + + "s11->RuleStop_A_3\n" + + "s13->RuleStop_X_5\n"; + checkTokensRule(g, "DEFAULT_MODE", expecting); + } + + @Test public void testMode() throws Exception { + LexerGrammar g = new LexerGrammar( + "lexer grammar L;\n"+ + "A : 'a' ;\n" + + "X : 'x' ;\n" + + "mode FOO;\n" + + "B : 'b' ;\n" + + "C : 'c' ;\n"); + String expecting = + "BlockStart_1->RuleStart_B_6\n" + + "BlockStart_1->RuleStart_C_8\n" + + "RuleStart_B_6->s14\n" + + "RuleStart_C_8->s16\n" + + "s14-'b'->s15\n" + + "s16-'c'->s17\n" + + "s15->RuleStop_B_7\n" + + "s17->RuleStop_C_9\n"; + checkTokensRule(g, "FOO", expecting); + } + + void checkTokensRule(LexerGrammar g, String modeName, String expecting) { + if ( g.ast!=null && !g.ast.hasErrors ) { + System.out.println(g.ast.toStringTree()); + Tool antlr = new Tool(); + SemanticPipeline sem = new SemanticPipeline(g); + sem.process(); + if ( g.getImportedGrammars()!=null ) { // process imported grammars (if any) + for (Grammar imp : g.getImportedGrammars()) { + antlr.process(imp); + } + } + } + + if ( g.modes.get(modeName)==null ) { + System.err.println("no such mode "+modeName); + return; + } + + ParserNFAFactory f = new LexerNFAFactory((LexerGrammar)g); + NFA nfa = f.createNFA(); + NFAState startState = nfa.modeToStartState.get(modeName); + NFASerializer serializer = new NFASerializer(g, startState); + String result = serializer.toString(); + + //System.out.print(result); + assertEquals(expecting, result); + } + void checkRule(Grammar g, String ruleName, String expecting) { if ( g.ast!=null && !g.ast.hasErrors ) { System.out.println(g.ast.toStringTree()); @@ -885,7 +939,6 @@ public class TestNFAConstruction extends BaseTest { } ParserNFAFactory f = new ParserNFAFactory(g); - if ( g.getType()== ANTLRParser.LEXER ) f = new LexerNFAFactory(g); NFA nfa = f.createNFA(); Rule r = g.getRule(ruleName); NFAState startState = nfa.ruleToStartState.get(r); @@ -893,7 +946,6 @@ public class TestNFAConstruction extends BaseTest { String result = serializer.toString(); //System.out.print(result); - System.out.println("test NFA checkRule: thread name: "+Thread.currentThread().getName()); assertEquals(expecting, result); } }