From 061fff09f9d3104ec5b58beee06447357511691f Mon Sep 17 00:00:00 2001 From: parrt Date: Fri, 17 Jun 2011 16:05:00 -0800 Subject: [PATCH] adding new files [git-p4: depot-paths = "//depot/code/antlr4/main/": change = 8659] --- .../Java/src/org/antlr/v4/runtime/Parser.java | 4 +- .../src/org/antlr/v4/runtime/atn/ATN.java | 21 +- .../antlr/v4/runtime/atn/ATNInterpreter.java | 5 +- .../v4/runtime/atn/ActionTransition.java | 4 - .../antlr/v4/runtime/atn/AtomTransition.java | 22 - .../v4/runtime/atn/EpsilonTransition.java | 4 - .../v4/runtime/atn/NotSetTransition.java | 3 +- .../v4/runtime/atn/PredicateTransition.java | 28 +- .../antlr/v4/runtime/atn/RangeTransition.java | 7 +- .../antlr/v4/runtime/atn/RuleTransition.java | 4 - .../antlr/v4/runtime/atn/SetTransition.java | 22 - .../org/antlr/v4/runtime/atn/ThreadState.java | 41 - .../org/antlr/v4/runtime/atn/Transition.java | 5 +- .../v4/runtime/atn/WildcardTransition.java | 3 - tool/src/org/antlr/v4/Tool.java | 8 + .../antlr/v4/analysis/AnalysisPipeline.java | 56 + .../org/antlr/v4/automata/ATNSerializer.java | 17 + .../antlr/v4/automata/ParserATNFactory.java | 2 +- .../antlr/v4/codegen/ActionTranslator.java | 254 ++ .../org/antlr/v4/codegen/CodeGenerator.java | 180 ++ .../antlr/v4/codegen/OutputModelFactory.java | 167 ++ .../org/antlr/v4/codegen/ParserFactory.java | 58 + .../org/antlr/v4/codegen/RuleContextDecl.java | 12 + .../antlr/v4/codegen/SourceGenTriggers.java | 1869 +++++++++++++++ .../org/antlr/v4/codegen/model/Action.java | 29 + .../org/antlr/v4/codegen/model/AddToList.java | 15 + .../org/antlr/v4/codegen/model/AltBlock.java | 20 + .../v4/codegen/model/CaptureNextToken.java | 6 + .../codegen/model/CaptureNextTokenType.java | 7 + .../org/antlr/v4/codegen/model/Choice.java | 58 + .../org/antlr/v4/codegen/model/CodeBlock.java | 27 + tool/src/org/antlr/v4/codegen/model/Decl.java | 29 + .../antlr/v4/codegen/model/ForcedAction.java | 10 + .../antlr/v4/codegen/model/LL1AltBlock.java | 25 + .../org/antlr/v4/codegen/model/LL1Loop.java | 36 + .../model/LL1OptionalBlockSingleAlt.java | 36 + .../antlr/v4/codegen/model/LL1StarBlock.java | 37 + .../codegen/model/LL1StarBlockSingleAlt.java | 27 + .../org/antlr/v4/codegen/model/LabeledOp.java | 8 + .../src/org/antlr/v4/codegen/model/Lexer.java | 46 + .../antlr/v4/codegen/model/MatchToken.java | 46 + .../antlr/v4/codegen/model/OptionalBlock.java | 16 + .../org/antlr/v4/codegen/model/Parser.java | 63 + .../antlr/v4/codegen/model/ParserFile.java | 35 + .../antlr/v4/codegen/model/RuleElement.java | 14 + .../antlr/v4/codegen/model/RuleFunction.java | 118 + .../org/antlr/v4/codegen/model/SemPred.java | 9 + .../src/org/antlr/v4/codegen/model/SrcOp.java | 17 + .../org/antlr/v4/codegen/model/StarBlock.java | 22 + .../antlr/v4/codegen/model/StructDecl.java | 27 + tool/src/org/antlr/v4/codegen/model/Sync.java | 22 + .../org/antlr/v4/codegen/model/TestSet.java | 15 + .../antlr/v4/codegen/model/TestSetInline.java | 21 + .../model/ThrowEarlyExitException.java | 12 + .../v4/codegen/model/ThrowNoViableAlt.java | 14 + .../model/ThrowRecognitionException.java | 24 + .../org/antlr/v4/codegen/model/TokenDecl.java | 10 + .../antlr/v4/codegen/model/TokenListDecl.java | 10 + .../v4/codegen/model/actions/ActionText.java | 10 + .../v4/codegen/model/actions/ArgRef.java | 10 + .../model/actions/DynScopeAttrRef.java | 12 + .../v4/codegen/model/actions/DynScopeRef.java | 10 + .../v4/codegen/model/actions/RetValueRef.java | 11 + .../model/actions/RulePropertyRef.java | 10 + .../model/actions/RulePropertyRef_st.java | 8 + .../model/actions/RulePropertyRef_start.java | 8 + .../model/actions/RulePropertyRef_stop.java | 8 + .../model/actions/RulePropertyRef_text.java | 8 + .../model/actions/RulePropertyRef_tree.java | 8 + .../v4/codegen/model/actions/SetAttr.java | 23 + .../model/actions/SetDynScopeAttr.java | 25 + .../model/actions/SetDynScopeAttr_index.java | 21 + .../model/actions/TokenPropertyRef.java | 10 + .../model/actions/TokenPropertyRef_int.java | 8 + .../model/actions/TokenPropertyRef_text.java | 8 + .../model/actions/TokenPropertyRef_tree.java | 8 + .../model/actions/TokenPropertyRef_type.java | 8 + .../antlr/v4/codegen/model/ast/MakeRoot.java | 8 + tool/src/org/antlr/v4/codegen/model/dbg.java | 5 + .../org/antlr/v4/parse/TokenVocabParser.java | 9 +- .../org/antlr/v4/semantics/ActionSniffer.java | 52 + .../antlr/v4/semantics/AttributeChecks.java | 219 ++ .../v4/semantics/BasicSemanticChecks.java | 442 ++++ .../v4/semantics/BasicSemanticTriggers.g | 240 ++ .../v4/semantics/BasicSemanticTriggers.java | 1741 ++++++++++++++ .../v4/semantics/BasicSemanticTriggers.tokens | 99 + .../BlankActionSplitterListener.java | 57 + .../org/antlr/v4/semantics/CollectSymbols.g | 310 +++ .../antlr/v4/semantics/CollectSymbols.java | 2037 +++++++++++++++++ .../antlr/v4/semantics/CollectSymbols.tokens | 99 + .../antlr/v4/semantics/SemanticPipeline.java | 175 ++ .../org/antlr/v4/semantics/SymbolChecks.java | 316 +++ .../antlr/v4/semantics/UseDefAnalyzer.java | 69 + tool/src/org/antlr/v4/tool/LexerGrammar.java | 3 +- 94 files changed, 9629 insertions(+), 173 deletions(-) delete mode 100644 runtime/Java/src/org/antlr/v4/runtime/atn/ThreadState.java create mode 100644 tool/src/org/antlr/v4/analysis/AnalysisPipeline.java create mode 100644 tool/src/org/antlr/v4/codegen/ActionTranslator.java create mode 100644 tool/src/org/antlr/v4/codegen/CodeGenerator.java create mode 100644 tool/src/org/antlr/v4/codegen/OutputModelFactory.java create mode 100644 tool/src/org/antlr/v4/codegen/ParserFactory.java create mode 100644 tool/src/org/antlr/v4/codegen/RuleContextDecl.java create mode 100644 tool/src/org/antlr/v4/codegen/SourceGenTriggers.java create mode 100644 tool/src/org/antlr/v4/codegen/model/Action.java create mode 100644 tool/src/org/antlr/v4/codegen/model/AddToList.java create mode 100644 tool/src/org/antlr/v4/codegen/model/AltBlock.java create mode 100644 tool/src/org/antlr/v4/codegen/model/CaptureNextToken.java create mode 100644 tool/src/org/antlr/v4/codegen/model/CaptureNextTokenType.java create mode 100644 tool/src/org/antlr/v4/codegen/model/Choice.java create mode 100644 tool/src/org/antlr/v4/codegen/model/CodeBlock.java create mode 100644 tool/src/org/antlr/v4/codegen/model/Decl.java create mode 100644 tool/src/org/antlr/v4/codegen/model/ForcedAction.java create mode 100644 tool/src/org/antlr/v4/codegen/model/LL1AltBlock.java create mode 100644 tool/src/org/antlr/v4/codegen/model/LL1Loop.java create mode 100644 tool/src/org/antlr/v4/codegen/model/LL1OptionalBlockSingleAlt.java create mode 100644 tool/src/org/antlr/v4/codegen/model/LL1StarBlock.java create mode 100644 tool/src/org/antlr/v4/codegen/model/LL1StarBlockSingleAlt.java create mode 100644 tool/src/org/antlr/v4/codegen/model/LabeledOp.java create mode 100644 tool/src/org/antlr/v4/codegen/model/Lexer.java create mode 100644 tool/src/org/antlr/v4/codegen/model/MatchToken.java create mode 100644 tool/src/org/antlr/v4/codegen/model/OptionalBlock.java create mode 100644 tool/src/org/antlr/v4/codegen/model/Parser.java create mode 100644 tool/src/org/antlr/v4/codegen/model/ParserFile.java create mode 100644 tool/src/org/antlr/v4/codegen/model/RuleElement.java create mode 100644 tool/src/org/antlr/v4/codegen/model/RuleFunction.java create mode 100644 tool/src/org/antlr/v4/codegen/model/SemPred.java create mode 100644 tool/src/org/antlr/v4/codegen/model/SrcOp.java create mode 100644 tool/src/org/antlr/v4/codegen/model/StarBlock.java create mode 100644 tool/src/org/antlr/v4/codegen/model/StructDecl.java create mode 100644 tool/src/org/antlr/v4/codegen/model/Sync.java create mode 100644 tool/src/org/antlr/v4/codegen/model/TestSet.java create mode 100644 tool/src/org/antlr/v4/codegen/model/TestSetInline.java create mode 100644 tool/src/org/antlr/v4/codegen/model/ThrowEarlyExitException.java create mode 100644 tool/src/org/antlr/v4/codegen/model/ThrowNoViableAlt.java create mode 100644 tool/src/org/antlr/v4/codegen/model/ThrowRecognitionException.java create mode 100644 tool/src/org/antlr/v4/codegen/model/TokenDecl.java create mode 100644 tool/src/org/antlr/v4/codegen/model/TokenListDecl.java create mode 100644 tool/src/org/antlr/v4/codegen/model/actions/ActionText.java create mode 100644 tool/src/org/antlr/v4/codegen/model/actions/ArgRef.java create mode 100644 tool/src/org/antlr/v4/codegen/model/actions/DynScopeAttrRef.java create mode 100644 tool/src/org/antlr/v4/codegen/model/actions/DynScopeRef.java create mode 100644 tool/src/org/antlr/v4/codegen/model/actions/RetValueRef.java create mode 100644 tool/src/org/antlr/v4/codegen/model/actions/RulePropertyRef.java create mode 100644 tool/src/org/antlr/v4/codegen/model/actions/RulePropertyRef_st.java create mode 100644 tool/src/org/antlr/v4/codegen/model/actions/RulePropertyRef_start.java create mode 100644 tool/src/org/antlr/v4/codegen/model/actions/RulePropertyRef_stop.java create mode 100644 tool/src/org/antlr/v4/codegen/model/actions/RulePropertyRef_text.java create mode 100644 tool/src/org/antlr/v4/codegen/model/actions/RulePropertyRef_tree.java create mode 100644 tool/src/org/antlr/v4/codegen/model/actions/SetAttr.java create mode 100644 tool/src/org/antlr/v4/codegen/model/actions/SetDynScopeAttr.java create mode 100644 tool/src/org/antlr/v4/codegen/model/actions/SetDynScopeAttr_index.java create mode 100644 tool/src/org/antlr/v4/codegen/model/actions/TokenPropertyRef.java create mode 100644 tool/src/org/antlr/v4/codegen/model/actions/TokenPropertyRef_int.java create mode 100644 tool/src/org/antlr/v4/codegen/model/actions/TokenPropertyRef_text.java create mode 100644 tool/src/org/antlr/v4/codegen/model/actions/TokenPropertyRef_tree.java create mode 100644 tool/src/org/antlr/v4/codegen/model/actions/TokenPropertyRef_type.java create mode 100644 tool/src/org/antlr/v4/codegen/model/ast/MakeRoot.java create mode 100644 tool/src/org/antlr/v4/codegen/model/dbg.java create mode 100644 tool/src/org/antlr/v4/semantics/ActionSniffer.java create mode 100644 tool/src/org/antlr/v4/semantics/AttributeChecks.java create mode 100644 tool/src/org/antlr/v4/semantics/BasicSemanticChecks.java create mode 100644 tool/src/org/antlr/v4/semantics/BasicSemanticTriggers.g create mode 100644 tool/src/org/antlr/v4/semantics/BasicSemanticTriggers.java create mode 100644 tool/src/org/antlr/v4/semantics/BasicSemanticTriggers.tokens create mode 100644 tool/src/org/antlr/v4/semantics/BlankActionSplitterListener.java create mode 100644 tool/src/org/antlr/v4/semantics/CollectSymbols.g create mode 100644 tool/src/org/antlr/v4/semantics/CollectSymbols.java create mode 100644 tool/src/org/antlr/v4/semantics/CollectSymbols.tokens create mode 100644 tool/src/org/antlr/v4/semantics/SemanticPipeline.java create mode 100644 tool/src/org/antlr/v4/semantics/SymbolChecks.java create mode 100644 tool/src/org/antlr/v4/semantics/UseDefAnalyzer.java diff --git a/runtime/Java/src/org/antlr/v4/runtime/Parser.java b/runtime/Java/src/org/antlr/v4/runtime/Parser.java index e8566ff06..d7aeff23d 100644 --- a/runtime/Java/src/org/antlr/v4/runtime/Parser.java +++ b/runtime/Java/src/org/antlr/v4/runtime/Parser.java @@ -27,7 +27,6 @@ */ package org.antlr.v4.runtime; -import org.antlr.v4.runtime.misc.LABitSet; /** A parser for TokenStreams. "parser grammars" result in a subclass * of this. @@ -53,8 +52,7 @@ public class Parser extends BaseRecognizer { } protected Object getMissingSymbol(RecognitionException e, - int expectedTokenType, - LABitSet follow) + int expectedTokenType) { String tokenText = null; if ( expectedTokenType== Token.EOF ) tokenText = ""; diff --git a/runtime/Java/src/org/antlr/v4/runtime/atn/ATN.java b/runtime/Java/src/org/antlr/v4/runtime/atn/ATN.java index ae905a160..b7131cab7 100644 --- a/runtime/Java/src/org/antlr/v4/runtime/atn/ATN.java +++ b/runtime/Java/src/org/antlr/v4/runtime/atn/ATN.java @@ -1,7 +1,6 @@ package org.antlr.v4.runtime.atn; -import org.antlr.v4.automata.ATNSerializer; -import org.antlr.v4.misc.*; +import org.antlr.v4.misc.IntervalSet; import org.antlr.v4.runtime.RuleContext; import org.antlr.v4.tool.*; @@ -85,22 +84,4 @@ public class ATN { public int getNumberOfDecisions() { return decisionToATNState.size(); } - - /** Used by Java target to encode short/int array as chars in string. */ - public String getSerializedAsString() { - return new String(Utils.toCharArray(getSerialized())); - } - - public List getSerialized() { - return new ATNSerializer(this).serialize(); - } - - public char[] getSerializedAsChars() { - return Utils.toCharArray(new ATNSerializer(this).serialize()); - } - - public String getDecoded() { - return new ATNSerializer(this).decode(Utils.toCharArray(getSerialized())); - } - } diff --git a/runtime/Java/src/org/antlr/v4/runtime/atn/ATNInterpreter.java b/runtime/Java/src/org/antlr/v4/runtime/atn/ATNInterpreter.java index f4330345b..67bab8df1 100644 --- a/runtime/Java/src/org/antlr/v4/runtime/atn/ATNInterpreter.java +++ b/runtime/Java/src/org/antlr/v4/runtime/atn/ATNInterpreter.java @@ -2,8 +2,7 @@ package org.antlr.v4.runtime.atn; import org.antlr.v4.misc.*; import org.antlr.v4.parse.ANTLRParser; -import org.antlr.v4.runtime.dfa.*; -import org.antlr.v4.tool.Grammar; +import org.antlr.v4.runtime.dfa.DFAState; import java.util.*; @@ -138,6 +137,7 @@ public abstract class ATNInterpreter { return s; } +/* public static void dump(DFA dfa, Grammar g) { DOTGenerator dot = new DOTGenerator(g); String output = dot.getDOT(dfa, false); @@ -147,4 +147,5 @@ public abstract class ATNInterpreter { public static void dump(DFA dfa) { dump(dfa, null); } + */ } diff --git a/runtime/Java/src/org/antlr/v4/runtime/atn/ActionTransition.java b/runtime/Java/src/org/antlr/v4/runtime/atn/ActionTransition.java index 784e0c487..61d34f679 100644 --- a/runtime/Java/src/org/antlr/v4/runtime/atn/ActionTransition.java +++ b/runtime/Java/src/org/antlr/v4/runtime/atn/ActionTransition.java @@ -22,10 +22,6 @@ public class ActionTransition extends Transition { return true; // we are to be ignored by analysis 'cept for predicates } - public int compareTo(Object o) { - return 0; - } - public String toString() { if ( actionAST!=null ) return "{"+actionAST.getText()+"}"; return "action_"+ruleIndex+":"+actionIndex; diff --git a/runtime/Java/src/org/antlr/v4/runtime/atn/AtomTransition.java b/runtime/Java/src/org/antlr/v4/runtime/atn/AtomTransition.java index 0dd916d64..35356d20c 100644 --- a/runtime/Java/src/org/antlr/v4/runtime/atn/AtomTransition.java +++ b/runtime/Java/src/org/antlr/v4/runtime/atn/AtomTransition.java @@ -19,28 +19,6 @@ public class AtomTransition extends Transition { public IntervalSet label() { return IntervalSet.of(label); } - public int hashCode() { return label; } - - public boolean equals(Object o) { - if ( o==null ) return false; - if ( this == o ) return true; // equals if same object - if ( o.getClass() == SetTransition.class ) { - return IntervalSet.of(label).equals(o); - } - return label!=((AtomTransition)o).label; - } - -// public boolean intersect(Label other) { -// if ( other.getClass() == AtomTransition.class ) { -// return label==((AtomTransition)other).label; -// } -// return ((SetLabel)other).label.member(this.label); -// } - - public int compareTo(Object o) { - return this.label-((AtomTransition)o).label; - } - @Override public String toString(Grammar g) { if (g!=null ) return g.getTokenDisplayName(label); diff --git a/runtime/Java/src/org/antlr/v4/runtime/atn/EpsilonTransition.java b/runtime/Java/src/org/antlr/v4/runtime/atn/EpsilonTransition.java index 1aecdebf4..9d27eefa3 100644 --- a/runtime/Java/src/org/antlr/v4/runtime/atn/EpsilonTransition.java +++ b/runtime/Java/src/org/antlr/v4/runtime/atn/EpsilonTransition.java @@ -7,10 +7,6 @@ public class EpsilonTransition extends Transition { public boolean isEpsilon() { return true; } - public int compareTo(Object o) { - return 0; - } - @Override public String toString(Grammar g) { return "epsilon"; diff --git a/runtime/Java/src/org/antlr/v4/runtime/atn/NotSetTransition.java b/runtime/Java/src/org/antlr/v4/runtime/atn/NotSetTransition.java index fa000d0a4..4c05fd2eb 100644 --- a/runtime/Java/src/org/antlr/v4/runtime/atn/NotSetTransition.java +++ b/runtime/Java/src/org/antlr/v4/runtime/atn/NotSetTransition.java @@ -1,8 +1,7 @@ package org.antlr.v4.runtime.atn; import org.antlr.v4.misc.IntervalSet; -import org.antlr.v4.tool.Grammar; -import org.antlr.v4.tool.GrammarAST; +import org.antlr.v4.tool.*; public class NotSetTransition extends SetTransition { public NotSetTransition(GrammarAST ast, IntervalSet label, ATNState target) { diff --git a/runtime/Java/src/org/antlr/v4/runtime/atn/PredicateTransition.java b/runtime/Java/src/org/antlr/v4/runtime/atn/PredicateTransition.java index bc0e24dd1..f4a04d648 100644 --- a/runtime/Java/src/org/antlr/v4/runtime/atn/PredicateTransition.java +++ b/runtime/Java/src/org/antlr/v4/runtime/atn/PredicateTransition.java @@ -1,8 +1,6 @@ package org.antlr.v4.runtime.atn; -import org.antlr.v4.analysis.SemanticContext; -import org.antlr.v4.tool.Grammar; -import org.antlr.v4.tool.GrammarAST; +import org.antlr.v4.tool.*; /** TODO: this is old comment: * A tree of semantic predicates from the grammar AST if label==SEMPRED. @@ -14,12 +12,10 @@ public class PredicateTransition extends Transition { public int ruleIndex; public int predIndex; public GrammarAST predAST; - public SemanticContext semanticContext; public PredicateTransition(GrammarAST predicateASTNode, ATNState target) { super(target); this.predAST = predicateASTNode; - this.semanticContext = new SemanticContext.Predicate(predicateASTNode); } public PredicateTransition(ATNState target, int ruleIndex, int predIndex) { @@ -30,29 +26,7 @@ public class PredicateTransition extends Transition { public boolean isEpsilon() { return true; } - public int compareTo(Object o) { - return 0; - } - - public int hashCode() { - return semanticContext.hashCode(); - } - - public boolean equals(Object o) { - if ( o==null ) { - return false; - } - if ( this == o ) { - return true; // equals if same object - } - if ( !(o instanceof PredicateTransition) ) { - return false; - } - return semanticContext.equals(((PredicateTransition)o).semanticContext); - } - public String toString() { - if ( semanticContext!=null ) return semanticContext.toString(); if ( predAST!=null ) return predAST.getText(); return "pred-"+ruleIndex+":"+predIndex; } diff --git a/runtime/Java/src/org/antlr/v4/runtime/atn/RangeTransition.java b/runtime/Java/src/org/antlr/v4/runtime/atn/RangeTransition.java index 57193789f..ba20d4673 100644 --- a/runtime/Java/src/org/antlr/v4/runtime/atn/RangeTransition.java +++ b/runtime/Java/src/org/antlr/v4/runtime/atn/RangeTransition.java @@ -1,7 +1,6 @@ package org.antlr.v4.runtime.atn; -import org.antlr.v4.misc.CharSupport; -import org.antlr.v4.misc.IntervalSet; +import org.antlr.v4.misc.*; public class RangeTransition extends Transition { public int from; @@ -15,10 +14,6 @@ public class RangeTransition extends Transition { super(target); } - public int compareTo(Object o) { - return 0; - } - @Override public IntervalSet label() { return IntervalSet.of(from,to); } diff --git a/runtime/Java/src/org/antlr/v4/runtime/atn/RuleTransition.java b/runtime/Java/src/org/antlr/v4/runtime/atn/RuleTransition.java index 71b7371b1..bc0501c69 100644 --- a/runtime/Java/src/org/antlr/v4/runtime/atn/RuleTransition.java +++ b/runtime/Java/src/org/antlr/v4/runtime/atn/RuleTransition.java @@ -34,8 +34,4 @@ public class RuleTransition extends Transition { } public boolean isEpsilon() { return true; } - - public int compareTo(Object o) { - return 0; - } } diff --git a/runtime/Java/src/org/antlr/v4/runtime/atn/SetTransition.java b/runtime/Java/src/org/antlr/v4/runtime/atn/SetTransition.java index 039485897..bacddba96 100644 --- a/runtime/Java/src/org/antlr/v4/runtime/atn/SetTransition.java +++ b/runtime/Java/src/org/antlr/v4/runtime/atn/SetTransition.java @@ -22,28 +22,6 @@ public class SetTransition extends Transition { public IntervalSet label() { return label; } - public int compareTo(Object o) { - return 0; - } - - // public boolean intersect(Label other) { -// if ( other.getClass() == SetTransition.class ) { -// return label.and(((SetTransition)other).label).isNil(); -// } -// return label.member(((AtomTransition)other).label); -// } - - public int hashCode() { return label.hashCode(); } - - public boolean equals(Object o) { - if ( o==null ) return false; - if ( this == o ) return true; // equals if same object - if ( o.getClass() == AtomTransition.class ) { - o = IntervalSet.of(((AtomTransition)o).label); - } - return this.label.equals(((SetTransition)o).label); - } - public String toString(Grammar g) { return label.toString(g); } diff --git a/runtime/Java/src/org/antlr/v4/runtime/atn/ThreadState.java b/runtime/Java/src/org/antlr/v4/runtime/atn/ThreadState.java deleted file mode 100644 index 9771f3099..000000000 --- a/runtime/Java/src/org/antlr/v4/runtime/atn/ThreadState.java +++ /dev/null @@ -1,41 +0,0 @@ -package org.antlr.v4.runtime.atn; - -/** ATN simulation thread state */ -public class ThreadState { - public int addr; - public int alt; // or speculatively matched token type for lexers - public ATNStack context; - public int inputIndex = -1; // char (or token?) index from 0 - public int inputMarker = -1; // accept states track input markers in case we need to rewind - - public ThreadState(int addr, int alt, ATNStack context) { - this.addr = addr; - this.alt = alt; - this.context = context; - } - - public ThreadState(ThreadState t) { - this.addr = t.addr; - this.alt = t.alt; - this.context = t.context; - this.inputIndex = t.inputIndex; - } - - public boolean equals(Object o) { - if ( o==null ) return false; - if ( this==o ) return true; - ThreadState other = (ThreadState)o; - return this.addr==other.addr && - this.alt==other.alt && - this.context.equals(other.context); - } - - public int hashCode() { return addr + context.hashCode(); } - - public String toString() { - if ( context.parent==null ) { - return "("+addr+","+alt+")"; - } - return "("+addr+","+alt+","+context+")"; - } -} diff --git a/runtime/Java/src/org/antlr/v4/runtime/atn/Transition.java b/runtime/Java/src/org/antlr/v4/runtime/atn/Transition.java index 477ae91d5..cb496d8c1 100644 --- a/runtime/Java/src/org/antlr/v4/runtime/atn/Transition.java +++ b/runtime/Java/src/org/antlr/v4/runtime/atn/Transition.java @@ -3,8 +3,7 @@ package org.antlr.v4.runtime.atn; import org.antlr.v4.misc.IntervalSet; import org.antlr.v4.tool.Grammar; -import java.util.HashMap; -import java.util.Map; +import java.util.*; /** An ATN transition between any two ATN states. Subclasses define * atom, set, epsilon, action, predicate, rule transitions. @@ -18,7 +17,7 @@ import java.util.Map; * the states. We'll use the term Edge for the DFA to distinguish them from * ATN transitions. */ -public abstract class Transition implements Comparable { +public abstract class Transition { // constants for serialization public static final int EPSILON = 1; public static final int RANGE = 2; diff --git a/runtime/Java/src/org/antlr/v4/runtime/atn/WildcardTransition.java b/runtime/Java/src/org/antlr/v4/runtime/atn/WildcardTransition.java index 0b6dbf3fb..7ea54c1aa 100644 --- a/runtime/Java/src/org/antlr/v4/runtime/atn/WildcardTransition.java +++ b/runtime/Java/src/org/antlr/v4/runtime/atn/WildcardTransition.java @@ -4,9 +4,6 @@ import org.antlr.v4.tool.Grammar; public class WildcardTransition extends Transition { public WildcardTransition(ATNState target) { super(target); } - public int compareTo(Object o) { - return 0; - } @Override public String toString(Grammar g) { diff --git a/tool/src/org/antlr/v4/Tool.java b/tool/src/org/antlr/v4/Tool.java index 84b82d204..f6758ea9d 100644 --- a/tool/src/org/antlr/v4/Tool.java +++ b/tool/src/org/antlr/v4/Tool.java @@ -2,7 +2,11 @@ package org.antlr.v4; import org.antlr.runtime.*; import org.antlr.tool.DOTGenerator; +import org.antlr.v4.analysis.AnalysisPipeline; +import org.antlr.v4.automata.*; +import org.antlr.v4.codegen.CodeGenPipeline; import org.antlr.v4.parse.*; +import org.antlr.v4.semantics.SemanticPipeline; import org.antlr.v4.tool.*; import java.io.IOException; @@ -65,6 +69,10 @@ public class Tool { new Option("launch_ST_inspector", "-dbgST", "launch StringTemplate visualizer on generated code"), }; + // The internal options are for my use on the command line during dev + public static boolean internalOption_PrintGrammarTree = false; + public static boolean internalOption_ShowATNConfigsInDFA = false; + public final String[] args; protected List grammarFiles = new ArrayList(); diff --git a/tool/src/org/antlr/v4/analysis/AnalysisPipeline.java b/tool/src/org/antlr/v4/analysis/AnalysisPipeline.java new file mode 100644 index 000000000..7cd6a351b --- /dev/null +++ b/tool/src/org/antlr/v4/analysis/AnalysisPipeline.java @@ -0,0 +1,56 @@ +package org.antlr.v4.analysis; + +import org.antlr.v4.misc.IntervalSet; +import org.antlr.v4.runtime.atn.*; +import org.antlr.v4.tool.Grammar; + +import java.util.*; + +public class AnalysisPipeline { + public Grammar g; + + public AnalysisPipeline(Grammar g) { + this.g = g; + } + + public void process() { + // LEFT-RECURSION CHECK + LeftRecursionDetector lr = new LeftRecursionDetector(g.atn); + lr.check(); + if ( lr.listOfRecursiveCycles.size()>0 ) return; // bail out + + // BUILD DFA FOR EACH DECISION + if ( !g.isLexer() ) processParserOrTreeParser(); + } + + void processParserOrTreeParser() { + g.decisionLOOK = + new Vector(g.atn.getNumberOfDecisions()+1); + for (DecisionState s : g.atn.decisionToATNState) { + System.out.println("\nDECISION "+s.decision+" in rule "+s.rule.name); + + LL1Analyzer anal = new LL1Analyzer(g.atn); + IntervalSet[] look = anal.getDecisionLookahead(s); + System.out.println("look="+ Arrays.toString(look)); + g.decisionLOOK.setSize(s.decision+1); + g.decisionLOOK.set(s.decision, look); + System.out.println("LL(1)? "+disjoint(look)); + } + } + + /** Return lookahead depth at which lookahead sets are disjoint or return 0 */ + public static boolean disjoint(IntervalSet[] altLook) { + boolean collision = false; + IntervalSet combined = new IntervalSet(); + for (int a=1; a tokenPropToModelMap = new HashMap() {{ + put("text", TokenPropertyRef_text.class); + put("type", TokenPropertyRef_type.class); + put("line", TokenPropertyRef_line.class); + put("index", TokenPropertyRef_index.class); + put("pos", TokenPropertyRef_pos.class); + put("channel", TokenPropertyRef_channel.class); + put("tree", TokenPropertyRef_tree.class); + put("int", TokenPropertyRef_int.class); + }}; + + ActionAST node; + RuleFunction rf; + List chunks = new ArrayList(); + OutputModelFactory factory; + + public ActionTranslator(OutputModelFactory factory, ActionAST node) { + this.factory = factory; + this.node = node; + } + + public static List translateAction(OutputModelFactory factory, + RuleFunction rf, + Token tokenWithinAction, + ActionAST node) + { + String action = tokenWithinAction.getText(); + int firstCurly = action.indexOf('{'); + int lastCurly = action.lastIndexOf('}'); + if ( firstCurly>=0 && lastCurly>=0 ) { + action = action.substring(firstCurly+1, lastCurly); // trim {...} + } + return translateActionChunk(factory, rf, action, node); + } + + public static List translateActionChunk(OutputModelFactory factory, + RuleFunction rf, + String action, + ActionAST node) + { + Token tokenWithinAction = node.token; + ActionTranslator translator = new ActionTranslator(factory, node); + translator.rf = rf; + System.out.println("translate "+action); + ANTLRStringStream in = new ANTLRStringStream(action); + in.setLine(tokenWithinAction.getLine()); + in.setCharPositionInLine(tokenWithinAction.getCharPositionInLine()); + ActionSplitter trigger = new ActionSplitter(in, translator); + // forces eval, triggers listener methods + trigger.getActionTokens(); + return translator.chunks; + } + + public void attr(String expr, Token x) { + System.out.println("attr "+x); + Attribute a = node.resolver.resolveToAttribute(x.getText(), node); + if ( a!=null ) { + switch ( a.dict.type ) { + case ARG: chunks.add(new ArgRef(x.getText())); break; + case RET: chunks.add(new RetValueRef(x.getText())); break; +// case PREDEFINED_RULE: chunks.add(new RetValueRef(x.getText())); break; +// case PREDEFINED_TREE_RULE: chunks.add(new RetValueRef(x.getText())); break; + } + } + if ( node.resolver.resolvesToToken(x.getText(), node) ) { + chunks.add(new TokenRef(getTokenLabel(x.getText()))); // $label + return; + } + if ( node.resolver.resolvesToListLabel(x.getText(), node) ) { + return; // $ids for ids+=ID etc... + } + if ( node.resolver.resolveToDynamicScope(x.getText(), node)!=null ) { + chunks.add(new DynScopeRef(getDynamicScopeName(x.getText()))); // $S for scope S is ok + return; + } +// switch ( a.dict.type ) { +// case ARG: chunks.add(new ArgRef(x.getText())); break; +// case RET: chunks.add(new RetValueRef(x.getText())); break; +// case PREDEFINED_RULE: chunks.add(new RetValueRef(x.getText())); break; +// case PREDEFINED_LEXER_RULE: chunks.add(new RetValueRef(x.getText())); break; +// case PREDEFINED_TREE_RULE: chunks.add(new RetValueRef(x.getText())); break; +// case GLOBAL_SCOPE: chunks.add(new RetValueRef(x.getText())); break; +// case RULE_SCOPE: chunks.add(new RetValueRef(x.getText())); break; +// case TOKEN: chunks.add(new TokenRef(x.getText())); break; +// } + } + + /** $x.y = expr; */ + public void setQualifiedAttr(String expr, Token x, Token y, Token rhs) { + System.out.println("setQAttr "+x+"."+y+"="+rhs); + // x has to be current rule; just set y attr + List rhsChunks = translateActionChunk(factory,rf,rhs.getText(),node); + chunks.add(new SetAttr(y.getText(), rhsChunks)); + } + + public void qualifiedAttr(String expr, Token x, Token y) { + System.out.println("qattr "+x+"."+y); + Attribute a = node.resolver.resolveToAttribute(x.getText(), y.getText(), node); + switch ( a.dict.type ) { + case ARG: chunks.add(new ArgRef(y.getText())); break; // has to be current rule + case RET: + if ( factory.currentRule.size()>0 && factory.currentRule.peek().name.equals(x.getText()) ) { + chunks.add(new RetValueRef(y.getText())); break; + } + else { + chunks.add(new QRetValueRef(getRuleLabel(x.getText()), y.getText())); break; + } + case PREDEFINED_RULE: chunks.add(getRulePropertyRef(x, y)); break; + case TOKEN: chunks.add(getTokenPropertyRef(x, y)); break; +// case PREDEFINED_LEXER_RULE: chunks.add(new RetValueRef(x.getText())); break; +// case PREDEFINED_TREE_RULE: chunks.add(new RetValueRef(x.getText())); break; + } + } + + public void setAttr(String expr, Token x, Token rhs) { + System.out.println("setAttr "+x+" "+rhs); + List rhsChunks = translateActionChunk(factory,rf,rhs.getText(),node); + chunks.add(new SetAttr(x.getText(), rhsChunks)); + } + + public void dynamicScopeAttr(String expr, Token x, Token y) { + System.out.println("scoped "+x+"."+y); + // we assume valid, just gen code + chunks.add(new DynScopeAttrRef(getDynamicScopeName(x.getText()), y.getText())); + } + + public void setDynamicScopeAttr(String expr, Token x, Token y, Token rhs) { + List rhsChunks = translateActionChunk(factory,rf,rhs.getText(),node); + chunks.add(new SetDynScopeAttr(getDynamicScopeName(x.getText()), y.getText(), rhsChunks)); + } + + public void dynamicNegativeIndexedScopeAttr(String expr, Token x, Token y, Token index) { + List indexChunks = translateActionChunk(factory,rf,index.getText(),node); + chunks.add(new DynScopeAttrRef_negIndex(getDynamicScopeName(x.getText()), y.getText(), indexChunks)); + } + + public void setDynamicNegativeIndexedScopeAttr(String expr, Token x, Token y, Token index, Token rhs) { + List indexChunks = translateActionChunk(factory,rf,index.getText(),node); + List rhsChunks = translateActionChunk(factory,rf,rhs.getText(),node); + chunks.add(new SetDynScopeAttr_negIndex(getDynamicScopeName(x.getText()), y.getText(), indexChunks, rhsChunks)); + } + + public void dynamicAbsoluteIndexedScopeAttr(String expr, Token x, Token y, Token index) { + List indexChunks = translateActionChunk(factory,rf,index.getText(),node); + chunks.add(new DynScopeAttrRef_index(getDynamicScopeName(x.getText()), y.getText(), indexChunks)); + } + + public void setDynamicAbsoluteIndexedScopeAttr(String expr, Token x, Token y, Token index, Token rhs) { + List indexChunks = translateActionChunk(factory,rf,index.getText(),node); + List rhsChunks = translateActionChunk(factory,rf,rhs.getText(),node); + chunks.add(new SetDynScopeAttr_index(getDynamicScopeName(x.getText()), y.getText(), indexChunks, rhsChunks)); + } + + public void templateInstance(String expr) { + } + + public void indirectTemplateInstance(String expr) { + } + + public void setExprAttribute(String expr) { + } + + public void setSTAttribute(String expr) { + } + + public void templateExpr(String expr) { + } + + public void unknownSyntax(Token t) { + } + + public void text(String text) { + chunks.add(new ActionText(text)); + } + + TokenPropertyRef getTokenPropertyRef(Token x, Token y) { + try { + Class c = tokenPropToModelMap.get(y.getText()); + Constructor ctor = c.getConstructor(new Class[] {String.class}); + TokenPropertyRef ref = + (TokenPropertyRef)ctor.newInstance(getRuleLabel(x.getText())); + return ref; + } + catch (Exception e) { + factory.g.tool.errMgr.toolError(ErrorType.INTERNAL_ERROR, e); + } + return null; + } + + RulePropertyRef getRulePropertyRef(Token x, Token y) { + try { + Class c = rulePropToModelMap.get(y.getText()); + Constructor ctor = c.getConstructor(new Class[] {String.class}); + RulePropertyRef ref = + (RulePropertyRef)ctor.newInstance(getRuleLabel(x.getText())); + return ref; + } + catch (Exception e) { + factory.g.tool.errMgr.toolError(ErrorType.INTERNAL_ERROR, e); + } + return null; + } + + public String getTokenLabel(String x) { + if ( node.resolver.resolvesToLabel(x, node) ) return x; + return factory.gen.target.getImplicitTokenLabel(x); + } + + public String getRuleLabel(String x) { + if ( node.resolver.resolvesToLabel(x, node) ) return x; + return factory.gen.target.getImplicitRuleLabel(x); + } + + public String getDynamicScopeName(String x) { + String scope; + if ( factory.g.getRule(x)==null ) { + scope = factory.gen.target.getGlobalDynamicScopeStructName(x); + } + else { + scope = factory.gen.target.getRuleDynamicScopeStructName(x); + } + return scope; + } + +// public String getTokenLabel(String x, ActionAST node) { +// Alternative alt = node.resolver. +// Rule r = node.ATNState.rule; +// if ( r.tokenRefs.get(x)!=null ) return true; +// LabelElementPair anyLabelDef = getAnyLabelDef(x); +// if ( anyLabelDef!=null && anyLabelDef.type== LabelType.TOKEN_LABEL ) return true; +// return false; +// } +} diff --git a/tool/src/org/antlr/v4/codegen/CodeGenerator.java b/tool/src/org/antlr/v4/codegen/CodeGenerator.java new file mode 100644 index 000000000..111d02806 --- /dev/null +++ b/tool/src/org/antlr/v4/codegen/CodeGenerator.java @@ -0,0 +1,180 @@ +package org.antlr.v4.codegen; + +import org.antlr.v4.codegen.model.OutputModelObject; +import org.antlr.v4.misc.Utils; +import org.antlr.v4.runtime.Token; +import org.antlr.v4.tool.*; +import org.stringtemplate.v4.*; + +import java.io.*; +import java.util.*; + +/** General controller for code gen. Can instantiate sub generator(s). + */ +public class CodeGenerator { + public static boolean LAUNCH_ST_INSPECTOR = false; + public static final String TEMPLATE_ROOT = "org/antlr/v4/tool/templates/codegen"; + public static final String VOCAB_FILE_EXTENSION = ".tokens"; + public final static String vocabFilePattern = + "=\n}>" + + "=\n}>"; + + public Grammar g; + public Target target; + public STGroup templates; + + public int lineWidth = 72; + + public CodeGenerator(Grammar g) { + this.g = g; + String language = g.getOption("language", "Java"); + loadLanguageTarget(language); + loadTemplates(language); + } + + void loadLanguageTarget(String language) { + String targetName = "org.antlr.v4.codegen."+language+"Target"; + try { + Class c = Class.forName(targetName); + target = (Target)c.newInstance(); + } + catch (ClassNotFoundException cnfe) { + target = new Target(); // use default + } + catch (InstantiationException ie) { + g.tool.errMgr.toolError(ErrorType.CANNOT_CREATE_TARGET_GENERATOR, + targetName, + ie); + } + catch (IllegalAccessException cnfe) { + g.tool.errMgr.toolError(ErrorType.CANNOT_CREATE_TARGET_GENERATOR, + targetName, + cnfe); + } + } + + public void loadTemplates(String language) { + try { + templates = new STGroupFile(TEMPLATE_ROOT+"/"+language+"/"+language+".stg"); + templates.registerRenderer(Integer.class, new NumberRenderer()); + } + catch (IllegalArgumentException iae) { + g.tool.errMgr.toolError(ErrorType.CANNOT_CREATE_TARGET_GENERATOR, + language); + } + +// if ( EMIT_TEMPLATE_DELIMITERS ) { +// templates.emitDebugStartStopStrings(true); +// templates.doNotEmitDebugStringsForTemplate("codeFileExtension"); +// templates.doNotEmitDebugStringsForTemplate("headerFileExtension"); +// } + } + + public ST generate() { + OutputModelFactory factory; + if ( g.isParser() || g.isCombined() || g.isTreeGrammar() ) { + factory = new ParserFactory(this); + } + else { + factory = new LexerFactory(this); + } + + OutputModelWalker walker = new OutputModelWalker(g.tool, templates); + OutputModelObject outputModel = factory.buildOutputModel(); + ST st = walker.walk(outputModel); + + if (CodeGenerator.LAUNCH_ST_INSPECTOR) { + st.inspect(); + //if ( templates.isDefined("headerFile") ) headerFileST.inspect(); + } + + return st; + } + + /** Generate a token vocab file with all the token names/types. For example: + * ID=7 + * FOR=8 + * 'for'=8 + * + * This is independent of the target language; used by antlr internally + */ + ST getTokenVocabOutput() { + ST vocabFileST = new ST(vocabFilePattern); + Map tokens = new HashMap(); + // make constants for the token names + for (String t : g.tokenNameToTypeMap.keySet()) { + int tokenType = g.tokenNameToTypeMap.get(t); + if ( tokenType>=Token.MIN_TOKEN_TYPE ) { + tokens.put(t, Utils.integer(tokenType)); + } + } + vocabFileST.add("tokens", tokens); + + // now dump the strings + Map literals = new HashMap(); + for (String literal : g.stringLiteralToTypeMap.keySet()) { + int tokenType = g.stringLiteralToTypeMap.get(literal); + if ( tokenType>=Token.MIN_TOKEN_TYPE ) { + literals.put(literal, Utils.integer(tokenType)); + } + } + vocabFileST.add("literals", literals); + + return vocabFileST; + } + + public void write(ST outputFileST) { + // WRITE FILES + try { + target.genRecognizerFile(this,g,outputFileST); + if ( templates.isDefined("headerFile") ) { + ST extST = templates.getInstanceOf("headerFileExtension"); + ST headerFileST = null; + target.genRecognizerHeaderFile(this,g,headerFileST,extST.render(lineWidth)); + } + // write out the vocab interchange file; used by antlr, + // does not change per target + ST tokenVocabSerialization = getTokenVocabOutput(); + String vocabFileName = getVocabFileName(); + if ( vocabFileName!=null ) { + write(tokenVocabSerialization, vocabFileName); + } + } + catch (IOException ioe) { + g.tool.errMgr.toolError(ErrorType.CANNOT_WRITE_FILE, + getVocabFileName(), + ioe); + } + } + + public void write(ST code, String fileName) throws IOException { + long start = System.currentTimeMillis(); + Writer w = g.tool.getOutputFile(g, fileName); + STWriter wr = new AutoIndentWriter(w); + wr.setLineWidth(lineWidth); + code.write(wr); + w.close(); + long stop = System.currentTimeMillis(); + System.out.println("render time for "+fileName+": "+(int)(stop-start)+"ms"); + } + + /** Generate TParser.java and TLexer.java from T.g if combined, else + * just use T.java as output regardless of type. + */ + public String getRecognizerFileName() { + ST extST = templates.getInstanceOf("codeFileExtension"); + String recognizerName = g.getRecognizerName(); + return recognizerName+extST.render(); + } + + /** What is the name of the vocab file generated for this grammar? + * Returns null if no .tokens file should be generated. + */ + public String getVocabFileName() { +// if ( g.isBuiltFromString() ) { +// return null; +// } + return g.name+VOCAB_FILE_EXTENSION; + } + +} diff --git a/tool/src/org/antlr/v4/codegen/OutputModelFactory.java b/tool/src/org/antlr/v4/codegen/OutputModelFactory.java new file mode 100644 index 000000000..37f32ad29 --- /dev/null +++ b/tool/src/org/antlr/v4/codegen/OutputModelFactory.java @@ -0,0 +1,167 @@ +package org.antlr.v4.codegen; + +import org.antlr.v4.analysis.AnalysisPipeline; +import org.antlr.v4.codegen.model.*; +import org.antlr.v4.misc.IntervalSet; +import org.antlr.v4.parse.ANTLRParser; +import org.antlr.v4.runtime.atn.*; +import org.antlr.v4.tool.*; + +import java.util.*; + +/** Create output objects wthin rule functions */ +public abstract class OutputModelFactory { + public Grammar g; + public CodeGenerator gen; + + // Context ptrs + public OutputModelObject file; // root + public Stack currentRule = new Stack(); + public Alternative currentAlt; + + protected OutputModelFactory(CodeGenerator gen) { + this.gen = gen; + this.g = gen.g; + } + + public abstract OutputModelObject buildOutputModel(); + + public CodeBlock epsilon() { return new CodeBlock(this); } + + public CodeBlock alternative(List elems) { return new CodeBlock(this, elems); } + + public SrcOp action(GrammarAST ast) { return new Action(this, ast); } + + public SrcOp forcedAction(GrammarAST ast) { return new ForcedAction(this, ast); } + + public SrcOp sempred(GrammarAST ast) { return new SemPred(this, ast); } + + public abstract List ruleRef(GrammarAST ID, GrammarAST label, GrammarAST args); + + public abstract List tokenRef(GrammarAST ID, GrammarAST label, GrammarAST args); + + public abstract List stringRef(GrammarAST ID, GrammarAST label); + + public Choice getChoiceBlock(BlockAST blkAST, List alts) { + int decision = ((DecisionState)blkAST.atnState).decision; + if ( AnalysisPipeline.disjoint(g.decisionLOOK.get(decision)) ) { + return getLL1ChoiceBlock(blkAST, alts); + } + else { + return getLLStarChoiceBlock(blkAST, alts); + } + } + + public Choice getEBNFBlock(GrammarAST ebnfRoot, List alts) { + int decision; + if ( ebnfRoot.getType()==ANTLRParser.POSITIVE_CLOSURE ) { + decision = ((PlusBlockStartState)ebnfRoot.atnState).loopBackState.decision; + } + else if ( ebnfRoot.getType()==ANTLRParser.CLOSURE ) { + decision = ((BlockStartState)ebnfRoot.atnState).decision; + } + else { + decision = ((DecisionState)ebnfRoot.atnState).decision; + } + if ( AnalysisPipeline.disjoint(g.decisionLOOK.get(decision)) ) { + return getLL1EBNFBlock(ebnfRoot, alts); + } + else { + return getLLStarEBNFBlock(ebnfRoot, alts); + } + } + + public Choice getLL1ChoiceBlock(BlockAST blkAST, List alts) { + return new LL1AltBlock(this, blkAST, alts); + } + + public Choice getLLStarChoiceBlock(BlockAST blkAST, List alts) { + return new AltBlock(this, blkAST, alts); + } + + public Choice getLL1EBNFBlock(GrammarAST ebnfRoot, List alts) { + int ebnf = 0; + if ( ebnfRoot!=null ) ebnf = ebnfRoot.getType(); + Choice c = null; + switch ( ebnf ) { + case ANTLRParser.OPTIONAL : + if ( alts.size()==1 ) c = new LL1OptionalBlockSingleAlt(this, ebnfRoot, alts); + else c = new LL1OptionalBlock(this, ebnfRoot, alts); + break; + case ANTLRParser.CLOSURE : + if ( alts.size()==1 ) c = new LL1StarBlockSingleAlt(this, ebnfRoot, alts); + else c = new LL1StarBlock(this, ebnfRoot, alts); + break; + case ANTLRParser.POSITIVE_CLOSURE : + if ( alts.size()==1 ) c = new LL1PlusBlockSingleAlt(this, ebnfRoot, alts); + else c = new LL1PlusBlock(this, ebnfRoot, alts); + break; + } + return c; + } + + public Choice getLLStarEBNFBlock(GrammarAST ebnfRoot, List alts) { + int ebnf = 0; + if ( ebnfRoot!=null ) ebnf = ebnfRoot.getType(); + Choice c = null; + switch ( ebnf ) { + case ANTLRParser.OPTIONAL : + c = new OptionalBlock(this, ebnfRoot, alts); + break; + case ANTLRParser.CLOSURE : + c = new StarBlock(this, ebnfRoot, alts); + break; + case ANTLRParser.POSITIVE_CLOSURE : + c = new PlusBlock(this, ebnfRoot, alts); + break; + } + return c; + } + + public abstract void defineBitSet(BitSetDecl b); + + public SrcOp getLL1Test(IntervalSet look, GrammarAST blkAST) { + return new TestSetInline(this, blkAST, look); +// OutputModelObject expr; +// if ( look.size() < gen.target.getInlineTestsVsBitsetThreshold() ) { +// expr = new TestSetInline(this, blkAST, look); +// } +// else { +// expr = new TestSet(this, blkAST, look); +// } +// return expr; + } + +// public DFADecl defineDFA(GrammarAST ast, DFA dfa) { +// return null; +//// DFADef d = new DFADef(name, dfa); +//// outputModel.dfaDefs.add(d); +// } +// + public BitSetDecl createFollowBitSet(GrammarAST ast, IntervalSet set) { + String inRuleName = ast.atnState.rule.name; + String elementName = ast.getText(); // assume rule ref + if ( ast.getType() == ANTLRParser.STRING_LITERAL ) { + elementName = gen.target.getTokenTypeAsTargetLabel(g, g.stringLiteralToTypeMap.get(elementName)); + } + String name = "FOLLOW_"+elementName+"_in_"+inRuleName+"_"+ast.token.getTokenIndex(); + BitSetDecl b = new BitSetDecl(this, name, set); + return b; + } + + public BitSetDecl createExpectingBitSet(GrammarAST ast, int decision, IntervalSet set, String position) { + String inRuleName = ast.atnState.rule.name; + String name = "EXPECTING_in_"+inRuleName+"_"+position+"_"+decision; + //System.out.println("!!!!!!!! create "+name); + BitSetDecl b = new BitSetDecl(this, name, set); + return b; + } + + public BitSetDecl createTestBitSet(GrammarAST ast, IntervalSet set) { + String inRuleName = ast.atnState.rule.name; + String name = "LOOK_in_"+inRuleName+"_"+ast.token.getTokenIndex(); + BitSetDecl b = new BitSetDecl(this, name, set); + return b; + } +} + diff --git a/tool/src/org/antlr/v4/codegen/ParserFactory.java b/tool/src/org/antlr/v4/codegen/ParserFactory.java new file mode 100644 index 000000000..1ba3c370a --- /dev/null +++ b/tool/src/org/antlr/v4/codegen/ParserFactory.java @@ -0,0 +1,58 @@ +package org.antlr.v4.codegen; + +import org.antlr.v4.codegen.model.*; +import org.antlr.v4.misc.Utils; +import org.antlr.v4.parse.ANTLRParser; +import org.antlr.v4.tool.*; + +import java.util.List; + +/** */ +public class ParserFactory extends OutputModelFactory { +// public static final Map modelToTemplateMap = new HashMap() {{ +// put(ParserFile.class, "parserFile"); +// put(Parser.class, "parser"); +// put(RuleFunction.class, "parserFunction"); +// put(DFADef.class, "DFA"); +// put(CodeBlock.class, "codeBlock"); +// put(LL1Choice.class, "switch"); +// put(MatchToken.class, "matchToken"); +// }}; + + public ParserFactory(CodeGenerator gen) { + super(gen); + } + + public OutputModelObject buildOutputModel() { + return new ParserFile(this, gen.getRecognizerFileName()); + } + + @Override + public List ruleRef(GrammarAST ID, GrammarAST label, GrammarAST args) { + InvokeRule r = new InvokeRule(this, ID, label); + AddToList a = null; + if ( label!=null && label.parent.getType()==ANTLRParser.PLUS_ASSIGN ) { + a = new AddToList(this, gen.target.getListLabel(label.getText()), r); + } + return Utils.list(r, a); + } + + @Override + public List tokenRef(GrammarAST ID, GrammarAST label, GrammarAST args) { + MatchToken m = new MatchToken(this, (TerminalAST) ID, label); + AddToList a = null; + if ( label!=null && label.parent.getType()==ANTLRParser.PLUS_ASSIGN ) { + a = new AddToList(this, gen.target.getListLabel(label.getText()), m); + } + return Utils.list(m, a); + } + + @Override + public List stringRef(GrammarAST ID, GrammarAST label) { + return tokenRef(ID, label, null); + } + + public void defineBitSet(BitSetDecl b) { +// ((ParserFile)file).defineBitSet(b); + } +} diff --git a/tool/src/org/antlr/v4/codegen/RuleContextDecl.java b/tool/src/org/antlr/v4/codegen/RuleContextDecl.java new file mode 100644 index 000000000..3c63ca971 --- /dev/null +++ b/tool/src/org/antlr/v4/codegen/RuleContextDecl.java @@ -0,0 +1,12 @@ +package org.antlr.v4.codegen; + +import org.antlr.v4.codegen.model.Decl; + +/** */ +public class RuleContextDecl extends Decl { + public String ctxName; + public RuleContextDecl(OutputModelFactory factory, String name, String ctxName) { + super(factory, name); + this.ctxName = ctxName; + } +} diff --git a/tool/src/org/antlr/v4/codegen/SourceGenTriggers.java b/tool/src/org/antlr/v4/codegen/SourceGenTriggers.java new file mode 100644 index 000000000..8d2e85feb --- /dev/null +++ b/tool/src/org/antlr/v4/codegen/SourceGenTriggers.java @@ -0,0 +1,1869 @@ +// $ANTLR 3.3 Nov 30, 2010 12:50:56 SourceGenTriggers.g 2011-06-13 18:23:30 + +package org.antlr.v4.codegen; + +import org.antlr.runtime.*; +import org.antlr.runtime.BitSet; +import org.antlr.runtime.tree.*; +import org.antlr.v4.codegen.model.*; +import org.antlr.v4.misc.Utils; +import org.antlr.v4.tool.*; + +import java.util.*; + +public class SourceGenTriggers extends TreeParser { + public static final String[] tokenNames = new String[] { + "", "", "", "", "SEMPRED", "FORCED_ACTION", "DOC_COMMENT", "SRC", "NLCHARS", "COMMENT", "DOUBLE_QUOTE_STRING_LITERAL", "DOUBLE_ANGLE_STRING_LITERAL", "ACTION_STRING_LITERAL", "ACTION_CHAR_LITERAL", "ARG_ACTION", "NESTED_ACTION", "ACTION", "ACTION_ESC", "WSNLCHARS", "OPTIONS", "TOKENS", "SCOPE", "IMPORT", "FRAGMENT", "LEXER", "PARSER", "TREE", "GRAMMAR", "PROTECTED", "PUBLIC", "PRIVATE", "RETURNS", "THROWS", "CATCH", "FINALLY", "TEMPLATE", "MODE", "COLON", "COLONCOLON", "COMMA", "SEMI", "LPAREN", "RPAREN", "IMPLIES", "LT", "GT", "ASSIGN", "QUESTION", "BANG", "STAR", "PLUS", "PLUS_ASSIGN", "OR", "ROOT", "DOLLAR", "DOT", "RANGE", "ETC", "RARROW", "TREE_BEGIN", "AT", "NOT", "RBRACE", "TOKEN_REF", "RULE_REF", "INT", "WSCHARS", "ESC_SEQ", "STRING_LITERAL", "HEX_DIGIT", "UNICODE_ESC", "WS", "ERRCHAR", "RULE", "RULES", "RULEMODIFIERS", "RULEACTIONS", "BLOCK", "REWRITE_BLOCK", "OPTIONAL", "CLOSURE", "POSITIVE_CLOSURE", "SYNPRED", "CHAR_RANGE", "EPSILON", "ALT", "ALTLIST", "ID", "ARG", "ARGLIST", "RET", "COMBINED", "INITACTION", "LABEL", "GATED_SEMPRED", "SYN_SEMPRED", "BACKTRACK_SEMPRED", "WILDCARD", "LIST", "ELEMENT_OPTIONS", "ST_RESULT", "RESULT", "ALT_REWRITE" + }; + public static final int EOF=-1; + public static final int SEMPRED=4; + public static final int FORCED_ACTION=5; + public static final int DOC_COMMENT=6; + public static final int SRC=7; + public static final int NLCHARS=8; + public static final int COMMENT=9; + public static final int DOUBLE_QUOTE_STRING_LITERAL=10; + public static final int DOUBLE_ANGLE_STRING_LITERAL=11; + public static final int ACTION_STRING_LITERAL=12; + public static final int ACTION_CHAR_LITERAL=13; + public static final int ARG_ACTION=14; + public static final int NESTED_ACTION=15; + public static final int ACTION=16; + public static final int ACTION_ESC=17; + public static final int WSNLCHARS=18; + public static final int OPTIONS=19; + public static final int TOKENS=20; + public static final int SCOPE=21; + public static final int IMPORT=22; + public static final int FRAGMENT=23; + public static final int LEXER=24; + public static final int PARSER=25; + public static final int TREE=26; + public static final int GRAMMAR=27; + public static final int PROTECTED=28; + public static final int PUBLIC=29; + public static final int PRIVATE=30; + public static final int RETURNS=31; + public static final int THROWS=32; + public static final int CATCH=33; + public static final int FINALLY=34; + public static final int TEMPLATE=35; + public static final int MODE=36; + public static final int COLON=37; + public static final int COLONCOLON=38; + public static final int COMMA=39; + public static final int SEMI=40; + public static final int LPAREN=41; + public static final int RPAREN=42; + public static final int IMPLIES=43; + public static final int LT=44; + public static final int GT=45; + public static final int ASSIGN=46; + public static final int QUESTION=47; + public static final int BANG=48; + public static final int STAR=49; + public static final int PLUS=50; + public static final int PLUS_ASSIGN=51; + public static final int OR=52; + public static final int ROOT=53; + public static final int DOLLAR=54; + public static final int DOT=55; + public static final int RANGE=56; + public static final int ETC=57; + public static final int RARROW=58; + public static final int TREE_BEGIN=59; + public static final int AT=60; + public static final int NOT=61; + public static final int RBRACE=62; + public static final int TOKEN_REF=63; + public static final int RULE_REF=64; + public static final int INT=65; + public static final int WSCHARS=66; + public static final int ESC_SEQ=67; + public static final int STRING_LITERAL=68; + public static final int HEX_DIGIT=69; + public static final int UNICODE_ESC=70; + public static final int WS=71; + public static final int ERRCHAR=72; + public static final int RULE=73; + public static final int RULES=74; + public static final int RULEMODIFIERS=75; + public static final int RULEACTIONS=76; + public static final int BLOCK=77; + public static final int REWRITE_BLOCK=78; + public static final int OPTIONAL=79; + public static final int CLOSURE=80; + public static final int POSITIVE_CLOSURE=81; + public static final int SYNPRED=82; + public static final int CHAR_RANGE=83; + public static final int EPSILON=84; + public static final int ALT=85; + public static final int ALTLIST=86; + public static final int ID=87; + public static final int ARG=88; + public static final int ARGLIST=89; + public static final int RET=90; + public static final int COMBINED=91; + public static final int INITACTION=92; + public static final int LABEL=93; + public static final int GATED_SEMPRED=94; + public static final int SYN_SEMPRED=95; + public static final int BACKTRACK_SEMPRED=96; + public static final int WILDCARD=97; + public static final int LIST=98; + public static final int ELEMENT_OPTIONS=99; + public static final int ST_RESULT=100; + public static final int RESULT=101; + public static final int ALT_REWRITE=102; + + // delegates + // delegators + + + public SourceGenTriggers(TreeNodeStream input) { + this(input, new RecognizerSharedState()); + } + public SourceGenTriggers(TreeNodeStream input, RecognizerSharedState state) { + super(input, state); + + } + + + public String[] getTokenNames() { return SourceGenTriggers.tokenNames; } + public String getGrammarFileName() { return "SourceGenTriggers.g"; } + + + // TODO: identical grammar to ATNBytecodeTriggers; would be nice to combine + public OutputModelFactory factory; + public SourceGenTriggers(TreeNodeStream input, OutputModelFactory factory) { + this(input); + this.factory = factory; + } + + + + // $ANTLR start "block" + // SourceGenTriggers.g:27:1: block[GrammarAST label, GrammarAST ebnfRoot] returns [SrcOp omo] : ^(blk= BLOCK ( ^( OPTIONS ( . )+ ) )? ( alternative )+ ) ; + public final SrcOp block(GrammarAST label, GrammarAST ebnfRoot) throws RecognitionException { + SrcOp omo = null; + + GrammarAST blk=null; + SourceGenTriggers.alternative_return alternative1 = null; + + + try { + // SourceGenTriggers.g:28:5: ( ^(blk= BLOCK ( ^( OPTIONS ( . )+ ) )? ( alternative )+ ) ) + // SourceGenTriggers.g:28:7: ^(blk= BLOCK ( ^( OPTIONS ( . )+ ) )? ( alternative )+ ) + { + blk=(GrammarAST)match(input,BLOCK,FOLLOW_BLOCK_in_block71); + + match(input, Token.DOWN, null); + // SourceGenTriggers.g:28:20: ( ^( OPTIONS ( . )+ ) )? + int alt2=2; + int LA2_0 = input.LA(1); + + if ( (LA2_0==OPTIONS) ) { + alt2=1; + } + switch (alt2) { + case 1 : + // SourceGenTriggers.g:28:21: ^( OPTIONS ( . )+ ) + { + match(input,OPTIONS,FOLLOW_OPTIONS_in_block75); + + match(input, Token.DOWN, null); + // SourceGenTriggers.g:28:31: ( . )+ + int cnt1=0; + loop1: + do { + int alt1=2; + int LA1_0 = input.LA(1); + + if ( ((LA1_0>=SEMPRED && LA1_0<=ALT_REWRITE)) ) { + alt1=1; + } + else if ( (LA1_0==UP) ) { + alt1=2; + } + + + switch (alt1) { + case 1 : + // SourceGenTriggers.g:28:31: . + { + matchAny(input); + + } + break; + + default : + if ( cnt1 >= 1 ) break loop1; + EarlyExitException eee = + new EarlyExitException(1, input); + throw eee; + } + cnt1++; + } while (true); + + + match(input, Token.UP, null); + + } + break; + + } + + List alts = new ArrayList(); + // SourceGenTriggers.g:30:7: ( alternative )+ + int cnt3=0; + loop3: + do { + int alt3=2; + int LA3_0 = input.LA(1); + + if ( (LA3_0==ALT||LA3_0==ALT_REWRITE) ) { + alt3=1; + } + + + switch (alt3) { + case 1 : + // SourceGenTriggers.g:30:9: alternative + { + pushFollow(FOLLOW_alternative_in_block95); + alternative1=alternative(); + + state._fsp--; + + alts.add((alternative1!=null?alternative1.omo:null)); + + } + break; + + default : + if ( cnt3 >= 1 ) break loop3; + EarlyExitException eee = + new EarlyExitException(3, input); + throw eee; + } + cnt3++; + } while (true); + + + match(input, Token.UP, null); + + if ( alts.size()==1 && ebnfRoot==null) return alts.get(0); + if ( ebnfRoot==null ) { + omo = factory.getChoiceBlock((BlockAST)blk, alts); + } + else { + omo = factory.getEBNFBlock(ebnfRoot, alts); + } + + + } + + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + } + finally { + } + return omo; + } + // $ANTLR end "block" + + public static class alternative_return extends TreeRuleReturnScope { + public CodeBlock omo; + }; + + // $ANTLR start "alternative" + // SourceGenTriggers.g:43:1: alternative returns [CodeBlock omo] : ( ^( ALT_REWRITE a= alternative . ) | ^( ALT EPSILON ) | ^( ALT ( element )+ ) ); + public final SourceGenTriggers.alternative_return alternative() throws RecognitionException { + SourceGenTriggers.alternative_return retval = new SourceGenTriggers.alternative_return(); + retval.start = input.LT(1); + + SourceGenTriggers.alternative_return a = null; + + List element2 = null; + + + + List elems = new ArrayList(); + if ( ((AltAST)((GrammarAST)retval.start)).alt!=null ) factory.currentAlt = ((AltAST)((GrammarAST)retval.start)).alt; + + + try { + // SourceGenTriggers.g:49:5: ( ^( ALT_REWRITE a= alternative . ) | ^( ALT EPSILON ) | ^( ALT ( element )+ ) ) + int alt5=3; + int LA5_0 = input.LA(1); + + if ( (LA5_0==ALT_REWRITE) ) { + alt5=1; + } + else if ( (LA5_0==ALT) ) { + int LA5_2 = input.LA(2); + + if ( (LA5_2==DOWN) ) { + int LA5_3 = input.LA(3); + + if ( (LA5_3==EPSILON) ) { + alt5=2; + } + else if ( ((LA5_3>=SEMPRED && LA5_3<=FORCED_ACTION)||LA5_3==ACTION||LA5_3==IMPLIES||LA5_3==ASSIGN||LA5_3==BANG||LA5_3==PLUS_ASSIGN||LA5_3==ROOT||(LA5_3>=DOT && LA5_3<=RANGE)||LA5_3==TREE_BEGIN||LA5_3==NOT||(LA5_3>=TOKEN_REF && LA5_3<=RULE_REF)||LA5_3==STRING_LITERAL||LA5_3==BLOCK||(LA5_3>=OPTIONAL && LA5_3<=POSITIVE_CLOSURE)||LA5_3==GATED_SEMPRED||LA5_3==WILDCARD) ) { + alt5=3; + } + else { + NoViableAltException nvae = + new NoViableAltException("", 5, 3, input); + + throw nvae; + } + } + else { + NoViableAltException nvae = + new NoViableAltException("", 5, 2, input); + + throw nvae; + } + } + else { + NoViableAltException nvae = + new NoViableAltException("", 5, 0, input); + + throw nvae; + } + switch (alt5) { + case 1 : + // SourceGenTriggers.g:49:7: ^( ALT_REWRITE a= alternative . ) + { + match(input,ALT_REWRITE,FOLLOW_ALT_REWRITE_in_alternative141); + + match(input, Token.DOWN, null); + pushFollow(FOLLOW_alternative_in_alternative145); + a=alternative(); + + state._fsp--; + + matchAny(input); + + match(input, Token.UP, null); + + } + break; + case 2 : + // SourceGenTriggers.g:50:7: ^( ALT EPSILON ) + { + match(input,ALT,FOLLOW_ALT_in_alternative157); + + match(input, Token.DOWN, null); + match(input,EPSILON,FOLLOW_EPSILON_in_alternative159); + + match(input, Token.UP, null); + retval.omo = factory.epsilon(); + + } + break; + case 3 : + // SourceGenTriggers.g:51:9: ^( ALT ( element )+ ) + { + match(input,ALT,FOLLOW_ALT_in_alternative174); + + match(input, Token.DOWN, null); + // SourceGenTriggers.g:51:16: ( element )+ + int cnt4=0; + loop4: + do { + int alt4=2; + int LA4_0 = input.LA(1); + + if ( ((LA4_0>=SEMPRED && LA4_0<=FORCED_ACTION)||LA4_0==ACTION||LA4_0==IMPLIES||LA4_0==ASSIGN||LA4_0==BANG||LA4_0==PLUS_ASSIGN||LA4_0==ROOT||(LA4_0>=DOT && LA4_0<=RANGE)||LA4_0==TREE_BEGIN||LA4_0==NOT||(LA4_0>=TOKEN_REF && LA4_0<=RULE_REF)||LA4_0==STRING_LITERAL||LA4_0==BLOCK||(LA4_0>=OPTIONAL && LA4_0<=POSITIVE_CLOSURE)||LA4_0==GATED_SEMPRED||LA4_0==WILDCARD) ) { + alt4=1; + } + + + switch (alt4) { + case 1 : + // SourceGenTriggers.g:51:18: element + { + pushFollow(FOLLOW_element_in_alternative178); + element2=element(); + + state._fsp--; + + elems.addAll(element2); + + } + break; + + default : + if ( cnt4 >= 1 ) break loop4; + EarlyExitException eee = + new EarlyExitException(4, input); + throw eee; + } + cnt4++; + } while (true); + + + match(input, Token.UP, null); + retval.omo = factory.alternative(elems); + + } + break; + + } + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + } + finally { + } + return retval; + } + // $ANTLR end "alternative" + + + // $ANTLR start "element" + // SourceGenTriggers.g:54:1: element returns [List omos] : ( labeledElement | atom[null] | ebnf | ACTION | FORCED_ACTION | SEMPRED | GATED_SEMPRED | treeSpec ); + public final List element() throws RecognitionException { + List omos = null; + + GrammarAST ACTION6=null; + GrammarAST FORCED_ACTION7=null; + GrammarAST SEMPRED8=null; + List labeledElement3 = null; + + List atom4 = null; + + SrcOp ebnf5 = null; + + + try { + // SourceGenTriggers.g:55:2: ( labeledElement | atom[null] | ebnf | ACTION | FORCED_ACTION | SEMPRED | GATED_SEMPRED | treeSpec ) + int alt6=8; + alt6 = dfa6.predict(input); + switch (alt6) { + case 1 : + // SourceGenTriggers.g:55:4: labeledElement + { + pushFollow(FOLLOW_labeledElement_in_element205); + labeledElement3=labeledElement(); + + state._fsp--; + + omos = labeledElement3; + + } + break; + case 2 : + // SourceGenTriggers.g:56:4: atom[null] + { + pushFollow(FOLLOW_atom_in_element216); + atom4=atom(null); + + state._fsp--; + + omos = atom4; + + } + break; + case 3 : + // SourceGenTriggers.g:57:4: ebnf + { + pushFollow(FOLLOW_ebnf_in_element229); + ebnf5=ebnf(); + + state._fsp--; + + omos = Utils.list(ebnf5); + + } + break; + case 4 : + // SourceGenTriggers.g:58:6: ACTION + { + ACTION6=(GrammarAST)match(input,ACTION,FOLLOW_ACTION_in_element244); + omos = Utils.list(factory.action(ACTION6)); + + } + break; + case 5 : + // SourceGenTriggers.g:59:6: FORCED_ACTION + { + FORCED_ACTION7=(GrammarAST)match(input,FORCED_ACTION,FOLLOW_FORCED_ACTION_in_element259); + omos = Utils.list(factory.forcedAction(FORCED_ACTION7)); + + } + break; + case 6 : + // SourceGenTriggers.g:60:6: SEMPRED + { + SEMPRED8=(GrammarAST)match(input,SEMPRED,FOLLOW_SEMPRED_in_element272); + omos = Utils.list(factory.sempred(SEMPRED8)); + + } + break; + case 7 : + // SourceGenTriggers.g:61:4: GATED_SEMPRED + { + match(input,GATED_SEMPRED,FOLLOW_GATED_SEMPRED_in_element285); + + } + break; + case 8 : + // SourceGenTriggers.g:62:4: treeSpec + { + pushFollow(FOLLOW_treeSpec_in_element290); + treeSpec(); + + state._fsp--; + + + } + break; + + } + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + } + finally { + } + return omos; + } + // $ANTLR end "element" + + + // $ANTLR start "labeledElement" + // SourceGenTriggers.g:65:1: labeledElement returns [List omos] : ( ^( ASSIGN ID atom[$ID] ) | ^( ASSIGN ID block[$ID,null] ) | ^( PLUS_ASSIGN ID atom[$ID] ) | ^( PLUS_ASSIGN ID block[$ID,null] ) ); + public final List labeledElement() throws RecognitionException { + List omos = null; + + GrammarAST ID9=null; + GrammarAST ID11=null; + GrammarAST ID13=null; + GrammarAST ID15=null; + List atom10 = null; + + SrcOp block12 = null; + + List atom14 = null; + + SrcOp block16 = null; + + + try { + // SourceGenTriggers.g:66:2: ( ^( ASSIGN ID atom[$ID] ) | ^( ASSIGN ID block[$ID,null] ) | ^( PLUS_ASSIGN ID atom[$ID] ) | ^( PLUS_ASSIGN ID block[$ID,null] ) ) + int alt7=4; + alt7 = dfa7.predict(input); + switch (alt7) { + case 1 : + // SourceGenTriggers.g:66:4: ^( ASSIGN ID atom[$ID] ) + { + match(input,ASSIGN,FOLLOW_ASSIGN_in_labeledElement306); + + match(input, Token.DOWN, null); + ID9=(GrammarAST)match(input,ID,FOLLOW_ID_in_labeledElement308); + pushFollow(FOLLOW_atom_in_labeledElement310); + atom10=atom(ID9); + + state._fsp--; + + + match(input, Token.UP, null); + omos = atom10; + + } + break; + case 2 : + // SourceGenTriggers.g:67:4: ^( ASSIGN ID block[$ID,null] ) + { + match(input,ASSIGN,FOLLOW_ASSIGN_in_labeledElement324); + + match(input, Token.DOWN, null); + ID11=(GrammarAST)match(input,ID,FOLLOW_ID_in_labeledElement326); + pushFollow(FOLLOW_block_in_labeledElement328); + block12=block(ID11, null); + + state._fsp--; + + + match(input, Token.UP, null); + omos = Utils.list(block12); + + } + break; + case 3 : + // SourceGenTriggers.g:68:4: ^( PLUS_ASSIGN ID atom[$ID] ) + { + match(input,PLUS_ASSIGN,FOLLOW_PLUS_ASSIGN_in_labeledElement339); + + match(input, Token.DOWN, null); + ID13=(GrammarAST)match(input,ID,FOLLOW_ID_in_labeledElement341); + pushFollow(FOLLOW_atom_in_labeledElement343); + atom14=atom(ID13); + + state._fsp--; + + + match(input, Token.UP, null); + omos = atom14; + + } + break; + case 4 : + // SourceGenTriggers.g:69:4: ^( PLUS_ASSIGN ID block[$ID,null] ) + { + match(input,PLUS_ASSIGN,FOLLOW_PLUS_ASSIGN_in_labeledElement355); + + match(input, Token.DOWN, null); + ID15=(GrammarAST)match(input,ID,FOLLOW_ID_in_labeledElement357); + pushFollow(FOLLOW_block_in_labeledElement359); + block16=block(ID15, null); + + state._fsp--; + + + match(input, Token.UP, null); + omos = Utils.list(block16); + + } + break; + + } + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + } + finally { + } + return omos; + } + // $ANTLR end "labeledElement" + + + // $ANTLR start "treeSpec" + // SourceGenTriggers.g:72:1: treeSpec returns [SrcOp omo] : ^( TREE_BEGIN (e= element )+ ) ; + public final SrcOp treeSpec() throws RecognitionException { + SrcOp omo = null; + + List e = null; + + + try { + // SourceGenTriggers.g:73:5: ( ^( TREE_BEGIN (e= element )+ ) ) + // SourceGenTriggers.g:73:7: ^( TREE_BEGIN (e= element )+ ) + { + match(input,TREE_BEGIN,FOLLOW_TREE_BEGIN_in_treeSpec382); + + match(input, Token.DOWN, null); + // SourceGenTriggers.g:73:21: (e= element )+ + int cnt8=0; + loop8: + do { + int alt8=2; + int LA8_0 = input.LA(1); + + if ( ((LA8_0>=SEMPRED && LA8_0<=FORCED_ACTION)||LA8_0==ACTION||LA8_0==IMPLIES||LA8_0==ASSIGN||LA8_0==BANG||LA8_0==PLUS_ASSIGN||LA8_0==ROOT||(LA8_0>=DOT && LA8_0<=RANGE)||LA8_0==TREE_BEGIN||LA8_0==NOT||(LA8_0>=TOKEN_REF && LA8_0<=RULE_REF)||LA8_0==STRING_LITERAL||LA8_0==BLOCK||(LA8_0>=OPTIONAL && LA8_0<=POSITIVE_CLOSURE)||LA8_0==GATED_SEMPRED||LA8_0==WILDCARD) ) { + alt8=1; + } + + + switch (alt8) { + case 1 : + // SourceGenTriggers.g:73:22: e= element + { + pushFollow(FOLLOW_element_in_treeSpec388); + e=element(); + + state._fsp--; + + + } + break; + + default : + if ( cnt8 >= 1 ) break loop8; + EarlyExitException eee = + new EarlyExitException(8, input); + throw eee; + } + cnt8++; + } while (true); + + + match(input, Token.UP, null); + + } + + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + } + finally { + } + return omo; + } + // $ANTLR end "treeSpec" + + + // $ANTLR start "ebnf" + // SourceGenTriggers.g:76:1: ebnf returns [SrcOp omo] : ( ^( astBlockSuffix block[null,null] ) | ^( OPTIONAL block[null,$OPTIONAL] ) | ^( CLOSURE block[null,$CLOSURE] ) | ^( POSITIVE_CLOSURE block[null,$POSITIVE_CLOSURE] ) | block[null, null] ); + public final SrcOp ebnf() throws RecognitionException { + SrcOp omo = null; + + GrammarAST OPTIONAL17=null; + GrammarAST CLOSURE19=null; + GrammarAST POSITIVE_CLOSURE21=null; + SrcOp block18 = null; + + SrcOp block20 = null; + + SrcOp block22 = null; + + SrcOp block23 = null; + + + try { + // SourceGenTriggers.g:77:2: ( ^( astBlockSuffix block[null,null] ) | ^( OPTIONAL block[null,$OPTIONAL] ) | ^( CLOSURE block[null,$CLOSURE] ) | ^( POSITIVE_CLOSURE block[null,$POSITIVE_CLOSURE] ) | block[null, null] ) + int alt9=5; + switch ( input.LA(1) ) { + case IMPLIES: + case BANG: + case ROOT: + { + alt9=1; + } + break; + case OPTIONAL: + { + alt9=2; + } + break; + case CLOSURE: + { + alt9=3; + } + break; + case POSITIVE_CLOSURE: + { + alt9=4; + } + break; + case BLOCK: + { + alt9=5; + } + break; + default: + NoViableAltException nvae = + new NoViableAltException("", 9, 0, input); + + throw nvae; + } + + switch (alt9) { + case 1 : + // SourceGenTriggers.g:77:4: ^( astBlockSuffix block[null,null] ) + { + pushFollow(FOLLOW_astBlockSuffix_in_ebnf411); + astBlockSuffix(); + + state._fsp--; + + + match(input, Token.DOWN, null); + pushFollow(FOLLOW_block_in_ebnf413); + block(null, null); + + state._fsp--; + + + match(input, Token.UP, null); + + } + break; + case 2 : + // SourceGenTriggers.g:78:4: ^( OPTIONAL block[null,$OPTIONAL] ) + { + OPTIONAL17=(GrammarAST)match(input,OPTIONAL,FOLLOW_OPTIONAL_in_ebnf421); + + match(input, Token.DOWN, null); + pushFollow(FOLLOW_block_in_ebnf423); + block18=block(null, OPTIONAL17); + + state._fsp--; + + + match(input, Token.UP, null); + omo = block18; + + } + break; + case 3 : + // SourceGenTriggers.g:79:4: ^( CLOSURE block[null,$CLOSURE] ) + { + CLOSURE19=(GrammarAST)match(input,CLOSURE,FOLLOW_CLOSURE_in_ebnf433); + + match(input, Token.DOWN, null); + pushFollow(FOLLOW_block_in_ebnf435); + block20=block(null, CLOSURE19); + + state._fsp--; + + + match(input, Token.UP, null); + omo = block20; + + } + break; + case 4 : + // SourceGenTriggers.g:80:4: ^( POSITIVE_CLOSURE block[null,$POSITIVE_CLOSURE] ) + { + POSITIVE_CLOSURE21=(GrammarAST)match(input,POSITIVE_CLOSURE,FOLLOW_POSITIVE_CLOSURE_in_ebnf446); + + match(input, Token.DOWN, null); + pushFollow(FOLLOW_block_in_ebnf448); + block22=block(null, POSITIVE_CLOSURE21); + + state._fsp--; + + + match(input, Token.UP, null); + omo = block22; + + } + break; + case 5 : + // SourceGenTriggers.g:82:5: block[null, null] + { + pushFollow(FOLLOW_block_in_ebnf472); + block23=block(null, null); + + state._fsp--; + + omo = block23; + + } + break; + + } + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + } + finally { + } + return omo; + } + // $ANTLR end "ebnf" + + + // $ANTLR start "astBlockSuffix" + // SourceGenTriggers.g:85:1: astBlockSuffix : ( ROOT | IMPLIES | BANG ); + public final void astBlockSuffix() throws RecognitionException { + try { + // SourceGenTriggers.g:86:5: ( ROOT | IMPLIES | BANG ) + // SourceGenTriggers.g: + { + if ( input.LA(1)==IMPLIES||input.LA(1)==BANG||input.LA(1)==ROOT ) { + input.consume(); + state.errorRecovery=false; + } + else { + MismatchedSetException mse = new MismatchedSetException(null,input); + throw mse; + } + + + } + + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + } + finally { + } + return ; + } + // $ANTLR end "astBlockSuffix" + + + // $ANTLR start "atom" + // SourceGenTriggers.g:93:1: atom[GrammarAST label] returns [List omos] : ( ^( ROOT range[label] ) | ^( BANG range[label] ) | ^( ROOT notSet[label] ) | ^( BANG notSet[label] ) | notSet[label] | range[label] | ^( DOT ID terminal[label] ) | ^( DOT ID ruleref[label] ) | ^( WILDCARD . ) | WILDCARD | terminal[label] | ruleref[label] ); + public final List atom(GrammarAST label) throws RecognitionException { + List omos = null; + + List range24 = null; + + List notSet25 = null; + + List range26 = null; + + List terminal27 = null; + + List ruleref28 = null; + + + try { + // SourceGenTriggers.g:94:2: ( ^( ROOT range[label] ) | ^( BANG range[label] ) | ^( ROOT notSet[label] ) | ^( BANG notSet[label] ) | notSet[label] | range[label] | ^( DOT ID terminal[label] ) | ^( DOT ID ruleref[label] ) | ^( WILDCARD . ) | WILDCARD | terminal[label] | ruleref[label] ) + int alt10=12; + alt10 = dfa10.predict(input); + switch (alt10) { + case 1 : + // SourceGenTriggers.g:94:4: ^( ROOT range[label] ) + { + match(input,ROOT,FOLLOW_ROOT_in_atom534); + + match(input, Token.DOWN, null); + pushFollow(FOLLOW_range_in_atom536); + range(label); + + state._fsp--; + + + match(input, Token.UP, null); + + } + break; + case 2 : + // SourceGenTriggers.g:95:4: ^( BANG range[label] ) + { + match(input,BANG,FOLLOW_BANG_in_atom544); + + match(input, Token.DOWN, null); + pushFollow(FOLLOW_range_in_atom546); + range24=range(label); + + state._fsp--; + + + match(input, Token.UP, null); + omos = range24; + + } + break; + case 3 : + // SourceGenTriggers.g:96:4: ^( ROOT notSet[label] ) + { + match(input,ROOT,FOLLOW_ROOT_in_atom557); + + match(input, Token.DOWN, null); + pushFollow(FOLLOW_notSet_in_atom559); + notSet(label); + + state._fsp--; + + + match(input, Token.UP, null); + + } + break; + case 4 : + // SourceGenTriggers.g:97:4: ^( BANG notSet[label] ) + { + match(input,BANG,FOLLOW_BANG_in_atom567); + + match(input, Token.DOWN, null); + pushFollow(FOLLOW_notSet_in_atom569); + notSet25=notSet(label); + + state._fsp--; + + + match(input, Token.UP, null); + omos = notSet25; + + } + break; + case 5 : + // SourceGenTriggers.g:98:4: notSet[label] + { + pushFollow(FOLLOW_notSet_in_atom579); + notSet(label); + + state._fsp--; + + + } + break; + case 6 : + // SourceGenTriggers.g:99:4: range[label] + { + pushFollow(FOLLOW_range_in_atom585); + range26=range(label); + + state._fsp--; + + omos = range26; + + } + break; + case 7 : + // SourceGenTriggers.g:100:4: ^( DOT ID terminal[label] ) + { + match(input,DOT,FOLLOW_DOT_in_atom597); + + match(input, Token.DOWN, null); + match(input,ID,FOLLOW_ID_in_atom599); + pushFollow(FOLLOW_terminal_in_atom601); + terminal(label); + + state._fsp--; + + + match(input, Token.UP, null); + + } + break; + case 8 : + // SourceGenTriggers.g:101:4: ^( DOT ID ruleref[label] ) + { + match(input,DOT,FOLLOW_DOT_in_atom609); + + match(input, Token.DOWN, null); + match(input,ID,FOLLOW_ID_in_atom611); + pushFollow(FOLLOW_ruleref_in_atom613); + ruleref(label); + + state._fsp--; + + + match(input, Token.UP, null); + + } + break; + case 9 : + // SourceGenTriggers.g:102:7: ^( WILDCARD . ) + { + match(input,WILDCARD,FOLLOW_WILDCARD_in_atom624); + + match(input, Token.DOWN, null); + matchAny(input); + + match(input, Token.UP, null); + + } + break; + case 10 : + // SourceGenTriggers.g:103:7: WILDCARD + { + match(input,WILDCARD,FOLLOW_WILDCARD_in_atom635); + + } + break; + case 11 : + // SourceGenTriggers.g:104:9: terminal[label] + { + pushFollow(FOLLOW_terminal_in_atom645); + terminal27=terminal(label); + + state._fsp--; + + omos = terminal27; + + } + break; + case 12 : + // SourceGenTriggers.g:105:9: ruleref[label] + { + pushFollow(FOLLOW_ruleref_in_atom661); + ruleref28=ruleref(label); + + state._fsp--; + + omos = ruleref28; + + } + break; + + } + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + } + finally { + } + return omos; + } + // $ANTLR end "atom" + + + // $ANTLR start "notSet" + // SourceGenTriggers.g:108:1: notSet[GrammarAST label] returns [List omos] : ( ^( NOT terminal[label] ) | ^( NOT block[label,null] ) ); + public final List notSet(GrammarAST label) throws RecognitionException { + List omos = null; + + try { + // SourceGenTriggers.g:109:5: ( ^( NOT terminal[label] ) | ^( NOT block[label,null] ) ) + int alt11=2; + int LA11_0 = input.LA(1); + + if ( (LA11_0==NOT) ) { + int LA11_1 = input.LA(2); + + if ( (LA11_1==DOWN) ) { + int LA11_2 = input.LA(3); + + if ( (LA11_2==BANG||LA11_2==ROOT||LA11_2==TOKEN_REF||LA11_2==STRING_LITERAL) ) { + alt11=1; + } + else if ( (LA11_2==BLOCK) ) { + alt11=2; + } + else { + NoViableAltException nvae = + new NoViableAltException("", 11, 2, input); + + throw nvae; + } + } + else { + NoViableAltException nvae = + new NoViableAltException("", 11, 1, input); + + throw nvae; + } + } + else { + NoViableAltException nvae = + new NoViableAltException("", 11, 0, input); + + throw nvae; + } + switch (alt11) { + case 1 : + // SourceGenTriggers.g:109:7: ^( NOT terminal[label] ) + { + match(input,NOT,FOLLOW_NOT_in_notSet690); + + match(input, Token.DOWN, null); + pushFollow(FOLLOW_terminal_in_notSet692); + terminal(label); + + state._fsp--; + + + match(input, Token.UP, null); + + } + break; + case 2 : + // SourceGenTriggers.g:110:7: ^( NOT block[label,null] ) + { + match(input,NOT,FOLLOW_NOT_in_notSet703); + + match(input, Token.DOWN, null); + pushFollow(FOLLOW_block_in_notSet705); + block(label, null); + + state._fsp--; + + + match(input, Token.UP, null); + + } + break; + + } + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + } + finally { + } + return omos; + } + // $ANTLR end "notSet" + + + // $ANTLR start "ruleref" + // SourceGenTriggers.g:113:1: ruleref[GrammarAST label] returns [List omos] : ( ^( ROOT ^( RULE_REF ( ARG_ACTION )? ) ) | ^( BANG ^( RULE_REF ( ARG_ACTION )? ) ) | ^( RULE_REF ( ARG_ACTION )? ) ); + public final List ruleref(GrammarAST label) throws RecognitionException { + List omos = null; + + GrammarAST RULE_REF29=null; + GrammarAST ARG_ACTION30=null; + GrammarAST RULE_REF31=null; + GrammarAST ARG_ACTION32=null; + + try { + // SourceGenTriggers.g:114:5: ( ^( ROOT ^( RULE_REF ( ARG_ACTION )? ) ) | ^( BANG ^( RULE_REF ( ARG_ACTION )? ) ) | ^( RULE_REF ( ARG_ACTION )? ) ) + int alt15=3; + switch ( input.LA(1) ) { + case ROOT: + { + alt15=1; + } + break; + case BANG: + { + alt15=2; + } + break; + case RULE_REF: + { + alt15=3; + } + break; + default: + NoViableAltException nvae = + new NoViableAltException("", 15, 0, input); + + throw nvae; + } + + switch (alt15) { + case 1 : + // SourceGenTriggers.g:114:7: ^( ROOT ^( RULE_REF ( ARG_ACTION )? ) ) + { + match(input,ROOT,FOLLOW_ROOT_in_ruleref730); + + match(input, Token.DOWN, null); + match(input,RULE_REF,FOLLOW_RULE_REF_in_ruleref733); + + if ( input.LA(1)==Token.DOWN ) { + match(input, Token.DOWN, null); + // SourceGenTriggers.g:114:25: ( ARG_ACTION )? + int alt12=2; + int LA12_0 = input.LA(1); + + if ( (LA12_0==ARG_ACTION) ) { + alt12=1; + } + switch (alt12) { + case 1 : + // SourceGenTriggers.g:114:25: ARG_ACTION + { + match(input,ARG_ACTION,FOLLOW_ARG_ACTION_in_ruleref735); + + } + break; + + } + + + match(input, Token.UP, null); + } + + match(input, Token.UP, null); + + } + break; + case 2 : + // SourceGenTriggers.g:115:7: ^( BANG ^( RULE_REF ( ARG_ACTION )? ) ) + { + match(input,BANG,FOLLOW_BANG_in_ruleref747); + + match(input, Token.DOWN, null); + RULE_REF29=(GrammarAST)match(input,RULE_REF,FOLLOW_RULE_REF_in_ruleref750); + + if ( input.LA(1)==Token.DOWN ) { + match(input, Token.DOWN, null); + // SourceGenTriggers.g:115:25: ( ARG_ACTION )? + int alt13=2; + int LA13_0 = input.LA(1); + + if ( (LA13_0==ARG_ACTION) ) { + alt13=1; + } + switch (alt13) { + case 1 : + // SourceGenTriggers.g:115:25: ARG_ACTION + { + ARG_ACTION30=(GrammarAST)match(input,ARG_ACTION,FOLLOW_ARG_ACTION_in_ruleref752); + + } + break; + + } + + + match(input, Token.UP, null); + } + + match(input, Token.UP, null); + omos = factory.ruleRef(RULE_REF29, label, ARG_ACTION30); + + } + break; + case 3 : + // SourceGenTriggers.g:116:7: ^( RULE_REF ( ARG_ACTION )? ) + { + RULE_REF31=(GrammarAST)match(input,RULE_REF,FOLLOW_RULE_REF_in_ruleref766); + + if ( input.LA(1)==Token.DOWN ) { + match(input, Token.DOWN, null); + // SourceGenTriggers.g:116:18: ( ARG_ACTION )? + int alt14=2; + int LA14_0 = input.LA(1); + + if ( (LA14_0==ARG_ACTION) ) { + alt14=1; + } + switch (alt14) { + case 1 : + // SourceGenTriggers.g:116:18: ARG_ACTION + { + ARG_ACTION32=(GrammarAST)match(input,ARG_ACTION,FOLLOW_ARG_ACTION_in_ruleref768); + + } + break; + + } + + + match(input, Token.UP, null); + } + omos = factory.ruleRef(RULE_REF31, label, ARG_ACTION32); + + } + break; + + } + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + } + finally { + } + return omos; + } + // $ANTLR end "ruleref" + + + // $ANTLR start "range" + // SourceGenTriggers.g:119:1: range[GrammarAST label] returns [List omos] : ^( RANGE a= STRING_LITERAL b= STRING_LITERAL ) ; + public final List range(GrammarAST label) throws RecognitionException { + List omos = null; + + GrammarAST a=null; + GrammarAST b=null; + + try { + // SourceGenTriggers.g:120:5: ( ^( RANGE a= STRING_LITERAL b= STRING_LITERAL ) ) + // SourceGenTriggers.g:120:7: ^( RANGE a= STRING_LITERAL b= STRING_LITERAL ) + { + match(input,RANGE,FOLLOW_RANGE_in_range797); + + match(input, Token.DOWN, null); + a=(GrammarAST)match(input,STRING_LITERAL,FOLLOW_STRING_LITERAL_in_range801); + b=(GrammarAST)match(input,STRING_LITERAL,FOLLOW_STRING_LITERAL_in_range805); + + match(input, Token.UP, null); + + } + + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + } + finally { + } + return omos; + } + // $ANTLR end "range" + + + // $ANTLR start "terminal" + // SourceGenTriggers.g:123:1: terminal[GrammarAST label] returns [List omos] : ( ^( STRING_LITERAL . ) | STRING_LITERAL | ^( TOKEN_REF ARG_ACTION . ) | ^( TOKEN_REF . ) | TOKEN_REF | ^( ROOT terminal[label] ) | ^( BANG terminal[label] ) ); + public final List terminal(GrammarAST label) throws RecognitionException { + List omos = null; + + GrammarAST STRING_LITERAL33=null; + GrammarAST STRING_LITERAL34=null; + GrammarAST TOKEN_REF35=null; + GrammarAST ARG_ACTION36=null; + GrammarAST TOKEN_REF37=null; + GrammarAST TOKEN_REF38=null; + + try { + // SourceGenTriggers.g:124:5: ( ^( STRING_LITERAL . ) | STRING_LITERAL | ^( TOKEN_REF ARG_ACTION . ) | ^( TOKEN_REF . ) | TOKEN_REF | ^( ROOT terminal[label] ) | ^( BANG terminal[label] ) ) + int alt16=7; + alt16 = dfa16.predict(input); + switch (alt16) { + case 1 : + // SourceGenTriggers.g:124:8: ^( STRING_LITERAL . ) + { + STRING_LITERAL33=(GrammarAST)match(input,STRING_LITERAL,FOLLOW_STRING_LITERAL_in_terminal830); + + match(input, Token.DOWN, null); + matchAny(input); + + match(input, Token.UP, null); + omos = factory.stringRef(STRING_LITERAL33, label); + + } + break; + case 2 : + // SourceGenTriggers.g:125:7: STRING_LITERAL + { + STRING_LITERAL34=(GrammarAST)match(input,STRING_LITERAL,FOLLOW_STRING_LITERAL_in_terminal845); + omos = factory.stringRef(STRING_LITERAL34, label); + + } + break; + case 3 : + // SourceGenTriggers.g:126:7: ^( TOKEN_REF ARG_ACTION . ) + { + TOKEN_REF35=(GrammarAST)match(input,TOKEN_REF,FOLLOW_TOKEN_REF_in_terminal859); + + match(input, Token.DOWN, null); + ARG_ACTION36=(GrammarAST)match(input,ARG_ACTION,FOLLOW_ARG_ACTION_in_terminal861); + matchAny(input); + + match(input, Token.UP, null); + omos = factory.tokenRef(TOKEN_REF35, label, ARG_ACTION36); + + } + break; + case 4 : + // SourceGenTriggers.g:127:7: ^( TOKEN_REF . ) + { + TOKEN_REF37=(GrammarAST)match(input,TOKEN_REF,FOLLOW_TOKEN_REF_in_terminal875); + + match(input, Token.DOWN, null); + matchAny(input); + + match(input, Token.UP, null); + omos = factory.tokenRef(TOKEN_REF37, label, null); + + } + break; + case 5 : + // SourceGenTriggers.g:128:7: TOKEN_REF + { + TOKEN_REF38=(GrammarAST)match(input,TOKEN_REF,FOLLOW_TOKEN_REF_in_terminal891); + omos = factory.tokenRef(TOKEN_REF38, label, null); + + } + break; + case 6 : + // SourceGenTriggers.g:129:7: ^( ROOT terminal[label] ) + { + match(input,ROOT,FOLLOW_ROOT_in_terminal906); + + match(input, Token.DOWN, null); + pushFollow(FOLLOW_terminal_in_terminal908); + terminal(label); + + state._fsp--; + + + match(input, Token.UP, null); + + } + break; + case 7 : + // SourceGenTriggers.g:130:7: ^( BANG terminal[label] ) + { + match(input,BANG,FOLLOW_BANG_in_terminal919); + + match(input, Token.DOWN, null); + pushFollow(FOLLOW_terminal_in_terminal921); + terminal(label); + + state._fsp--; + + + match(input, Token.UP, null); + + } + break; + + } + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + } + finally { + } + return omos; + } + // $ANTLR end "terminal" + + // Delegated rules + + + protected DFA6 dfa6 = new DFA6(this); + protected DFA7 dfa7 = new DFA7(this); + protected DFA10 dfa10 = new DFA10(this); + protected DFA16 dfa16 = new DFA16(this); + static final String DFA6_eotS = + "\15\uffff"; + static final String DFA6_eofS = + "\15\uffff"; + static final String DFA6_minS = + "\1\4\1\uffff\2\2\7\uffff\2\60"; + static final String DFA6_maxS = + "\1\141\1\uffff\2\2\7\uffff\2\115"; + static final String DFA6_acceptS = + "\1\uffff\1\1\2\uffff\1\2\1\3\1\4\1\5\1\6\1\7\1\10\2\uffff"; + static final String DFA6_specialS = + "\15\uffff}>"; + static final String[] DFA6_transitionS = { + "\1\10\1\7\12\uffff\1\6\32\uffff\1\5\2\uffff\1\1\1\uffff\1\3"+ + "\2\uffff\1\1\1\uffff\1\2\1\uffff\2\4\2\uffff\1\12\1\uffff\1"+ + "\4\1\uffff\2\4\3\uffff\1\4\10\uffff\1\5\1\uffff\3\5\14\uffff"+ + "\1\11\2\uffff\1\4", + "", + "\1\13", + "\1\14", + "", + "", + "", + "", + "", + "", + "", + "\1\4\4\uffff\1\4\2\uffff\1\4\4\uffff\1\4\1\uffff\2\4\3\uffff"+ + "\1\4\10\uffff\1\5", + "\1\4\4\uffff\1\4\2\uffff\1\4\4\uffff\1\4\1\uffff\2\4\3\uffff"+ + "\1\4\10\uffff\1\5" + }; + + static final short[] DFA6_eot = DFA.unpackEncodedString(DFA6_eotS); + static final short[] DFA6_eof = DFA.unpackEncodedString(DFA6_eofS); + static final char[] DFA6_min = DFA.unpackEncodedStringToUnsignedChars(DFA6_minS); + static final char[] DFA6_max = DFA.unpackEncodedStringToUnsignedChars(DFA6_maxS); + static final short[] DFA6_accept = DFA.unpackEncodedString(DFA6_acceptS); + static final short[] DFA6_special = DFA.unpackEncodedString(DFA6_specialS); + static final short[][] DFA6_transition; + + static { + int numStates = DFA6_transitionS.length; + DFA6_transition = new short[numStates][]; + for (int i=0; i omos] : ( labeledElement | atom[null] | ebnf | ACTION | FORCED_ACTION | SEMPRED | GATED_SEMPRED | treeSpec );"; + } + } + static final String DFA7_eotS = + "\13\uffff"; + static final String DFA7_eofS = + "\13\uffff"; + static final String DFA7_minS = + "\1\56\2\2\2\127\2\60\4\uffff"; + static final String DFA7_maxS = + "\1\63\2\2\2\127\2\141\4\uffff"; + static final String DFA7_acceptS = + "\7\uffff\1\1\1\2\1\3\1\4"; + static final String DFA7_specialS = + "\13\uffff}>"; + static final String[] DFA7_transitionS = { + "\1\1\4\uffff\1\2", + "\1\3", + "\1\4", + "\1\5", + "\1\6", + "\1\7\4\uffff\1\7\1\uffff\2\7\4\uffff\1\7\1\uffff\2\7\3\uffff"+ + "\1\7\10\uffff\1\10\23\uffff\1\7", + "\1\11\4\uffff\1\11\1\uffff\2\11\4\uffff\1\11\1\uffff\2\11\3"+ + "\uffff\1\11\10\uffff\1\12\23\uffff\1\11", + "", + "", + "", + "" + }; + + static final short[] DFA7_eot = DFA.unpackEncodedString(DFA7_eotS); + static final short[] DFA7_eof = DFA.unpackEncodedString(DFA7_eofS); + static final char[] DFA7_min = DFA.unpackEncodedStringToUnsignedChars(DFA7_minS); + static final char[] DFA7_max = DFA.unpackEncodedStringToUnsignedChars(DFA7_maxS); + static final short[] DFA7_accept = DFA.unpackEncodedString(DFA7_acceptS); + static final short[] DFA7_special = DFA.unpackEncodedString(DFA7_specialS); + static final short[][] DFA7_transition; + + static { + int numStates = DFA7_transitionS.length; + DFA7_transition = new short[numStates][]; + for (int i=0; i omos] : ( ^( ASSIGN ID atom[$ID] ) | ^( ASSIGN ID block[$ID,null] ) | ^( PLUS_ASSIGN ID atom[$ID] ) | ^( PLUS_ASSIGN ID block[$ID,null] ) );"; + } + } + static final String DFA10_eotS = + "\31\uffff"; + static final String DFA10_eofS = + "\31\uffff"; + static final String DFA10_minS = + "\1\60\2\2\2\uffff\2\2\2\uffff\2\60\1\127\6\uffff\1\60\1\uffff\2"+ + "\2\1\uffff\2\60"; + static final String DFA10_maxS = + "\1\141\2\2\2\uffff\1\2\1\141\2\uffff\2\104\1\127\6\uffff\1\104\1"+ + "\uffff\2\2\1\uffff\2\104"; + static final String DFA10_acceptS = + "\3\uffff\1\5\1\6\2\uffff\1\13\1\14\3\uffff\1\11\1\12\1\1\1\3\1\2"+ + "\1\4\1\uffff\1\7\2\uffff\1\10\2\uffff"; + static final String DFA10_specialS = + "\31\uffff}>"; + static final String[] DFA10_transitionS = { + "\1\2\4\uffff\1\1\1\uffff\1\5\1\4\4\uffff\1\3\1\uffff\1\7\1\10"+ + "\3\uffff\1\7\34\uffff\1\6", + "\1\11", + "\1\12", + "", + "", + "\1\13", + "\1\14\3\15\12\uffff\1\15\32\uffff\1\15\2\uffff\1\15\1\uffff"+ + "\1\15\2\uffff\1\15\1\uffff\1\15\1\uffff\2\15\2\uffff\1\15\1"+ + "\uffff\1\15\1\uffff\2\15\3\uffff\1\15\10\uffff\1\15\1\uffff"+ + "\3\15\14\uffff\1\15\2\uffff\1\15", + "", + "", + "\1\7\4\uffff\1\7\2\uffff\1\16\4\uffff\1\17\1\uffff\1\7\1\10"+ + "\3\uffff\1\7", + "\1\7\4\uffff\1\7\2\uffff\1\20\4\uffff\1\21\1\uffff\1\7\1\10"+ + "\3\uffff\1\7", + "\1\22", + "", + "", + "", + "", + "", + "", + "\1\25\4\uffff\1\24\11\uffff\1\23\1\26\3\uffff\1\23", + "", + "\1\27", + "\1\30", + "", + "\1\23\4\uffff\1\23\11\uffff\1\23\1\26\3\uffff\1\23", + "\1\23\4\uffff\1\23\11\uffff\1\23\1\26\3\uffff\1\23" + }; + + static final short[] DFA10_eot = DFA.unpackEncodedString(DFA10_eotS); + static final short[] DFA10_eof = DFA.unpackEncodedString(DFA10_eofS); + static final char[] DFA10_min = DFA.unpackEncodedStringToUnsignedChars(DFA10_minS); + static final char[] DFA10_max = DFA.unpackEncodedStringToUnsignedChars(DFA10_maxS); + static final short[] DFA10_accept = DFA.unpackEncodedString(DFA10_acceptS); + static final short[] DFA10_special = DFA.unpackEncodedString(DFA10_specialS); + static final short[][] DFA10_transition; + + static { + int numStates = DFA10_transitionS.length; + DFA10_transition = new short[numStates][]; + for (int i=0; i omos] : ( ^( ROOT range[label] ) | ^( BANG range[label] ) | ^( ROOT notSet[label] ) | ^( BANG notSet[label] ) | notSet[label] | range[label] | ^( DOT ID terminal[label] ) | ^( DOT ID ruleref[label] ) | ^( WILDCARD . ) | WILDCARD | terminal[label] | ruleref[label] );"; + } + } + static final String DFA16_eotS = + "\14\uffff"; + static final String DFA16_eofS = + "\14\uffff"; + static final String DFA16_minS = + "\1\60\2\2\4\uffff\1\4\1\uffff\1\2\2\uffff"; + static final String DFA16_maxS = + "\1\104\2\141\4\uffff\1\146\1\uffff\1\146\2\uffff"; + static final String DFA16_acceptS = + "\3\uffff\1\6\1\7\1\1\1\2\1\uffff\1\5\1\uffff\1\4\1\3"; + static final String DFA16_specialS = + "\14\uffff}>"; + static final String[] DFA16_transitionS = { + "\1\4\4\uffff\1\3\11\uffff\1\2\4\uffff\1\1", + "\1\5\3\6\12\uffff\1\6\32\uffff\1\6\2\uffff\1\6\1\uffff\1\6"+ + "\2\uffff\1\6\1\uffff\1\6\1\uffff\2\6\2\uffff\1\6\1\uffff\1\6"+ + "\1\uffff\2\6\3\uffff\1\6\10\uffff\1\6\1\uffff\3\6\14\uffff\1"+ + "\6\2\uffff\1\6", + "\1\7\3\10\12\uffff\1\10\32\uffff\1\10\2\uffff\1\10\1\uffff"+ + "\1\10\2\uffff\1\10\1\uffff\1\10\1\uffff\2\10\2\uffff\1\10\1"+ + "\uffff\1\10\1\uffff\2\10\3\uffff\1\10\10\uffff\1\10\1\uffff"+ + "\3\10\14\uffff\1\10\2\uffff\1\10", + "", + "", + "", + "", + "\12\12\1\11\130\12", + "", + "\2\12\143\13", + "", + "" + }; + + static final short[] DFA16_eot = DFA.unpackEncodedString(DFA16_eotS); + static final short[] DFA16_eof = DFA.unpackEncodedString(DFA16_eofS); + static final char[] DFA16_min = DFA.unpackEncodedStringToUnsignedChars(DFA16_minS); + static final char[] DFA16_max = DFA.unpackEncodedStringToUnsignedChars(DFA16_maxS); + static final short[] DFA16_accept = DFA.unpackEncodedString(DFA16_acceptS); + static final short[] DFA16_special = DFA.unpackEncodedString(DFA16_specialS); + static final short[][] DFA16_transition; + + static { + int numStates = DFA16_transitionS.length; + DFA16_transition = new short[numStates][]; + for (int i=0; i omos] : ( ^( STRING_LITERAL . ) | STRING_LITERAL | ^( TOKEN_REF ARG_ACTION . ) | ^( TOKEN_REF . ) | TOKEN_REF | ^( ROOT terminal[label] ) | ^( BANG terminal[label] ) );"; + } + } + + + public static final BitSet FOLLOW_BLOCK_in_block71 = new BitSet(new long[]{0x0000000000000004L}); + public static final BitSet FOLLOW_OPTIONS_in_block75 = new BitSet(new long[]{0x0000000000000004L}); + public static final BitSet FOLLOW_alternative_in_block95 = new BitSet(new long[]{0x0000000000000008L,0x0000004000200000L}); + public static final BitSet FOLLOW_ALT_REWRITE_in_alternative141 = new BitSet(new long[]{0x0000000000000004L}); + public static final BitSet FOLLOW_alternative_in_alternative145 = new BitSet(new long[]{0xFFFFFFFFFFFFFFF0L,0x0000007FFFFFFFFFL}); + public static final BitSet FOLLOW_ALT_in_alternative157 = new BitSet(new long[]{0x0000000000000004L}); + public static final BitSet FOLLOW_EPSILON_in_alternative159 = new BitSet(new long[]{0x0000000000000008L}); + public static final BitSet FOLLOW_ALT_in_alternative174 = new BitSet(new long[]{0x0000000000000004L}); + public static final BitSet FOLLOW_element_in_alternative178 = new BitSet(new long[]{0xA9A9480000010038L,0x000000024003A011L}); + public static final BitSet FOLLOW_labeledElement_in_element205 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_atom_in_element216 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_ebnf_in_element229 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_ACTION_in_element244 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_FORCED_ACTION_in_element259 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_SEMPRED_in_element272 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_GATED_SEMPRED_in_element285 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_treeSpec_in_element290 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_ASSIGN_in_labeledElement306 = new BitSet(new long[]{0x0000000000000004L}); + public static final BitSet FOLLOW_ID_in_labeledElement308 = new BitSet(new long[]{0xA1A1000000000000L,0x0000000200000011L}); + public static final BitSet FOLLOW_atom_in_labeledElement310 = new BitSet(new long[]{0x0000000000000008L}); + public static final BitSet FOLLOW_ASSIGN_in_labeledElement324 = new BitSet(new long[]{0x0000000000000004L}); + public static final BitSet FOLLOW_ID_in_labeledElement326 = new BitSet(new long[]{0x0021080000000000L,0x000000000003A000L}); + public static final BitSet FOLLOW_block_in_labeledElement328 = new BitSet(new long[]{0x0000000000000008L}); + public static final BitSet FOLLOW_PLUS_ASSIGN_in_labeledElement339 = new BitSet(new long[]{0x0000000000000004L}); + public static final BitSet FOLLOW_ID_in_labeledElement341 = new BitSet(new long[]{0xA1A1000000000000L,0x0000000200000011L}); + public static final BitSet FOLLOW_atom_in_labeledElement343 = new BitSet(new long[]{0x0000000000000008L}); + public static final BitSet FOLLOW_PLUS_ASSIGN_in_labeledElement355 = new BitSet(new long[]{0x0000000000000004L}); + public static final BitSet FOLLOW_ID_in_labeledElement357 = new BitSet(new long[]{0x0021080000000000L,0x000000000003A000L}); + public static final BitSet FOLLOW_block_in_labeledElement359 = new BitSet(new long[]{0x0000000000000008L}); + public static final BitSet FOLLOW_TREE_BEGIN_in_treeSpec382 = new BitSet(new long[]{0x0000000000000004L}); + public static final BitSet FOLLOW_element_in_treeSpec388 = new BitSet(new long[]{0xA9A9480000010038L,0x000000024003A011L}); + public static final BitSet FOLLOW_astBlockSuffix_in_ebnf411 = new BitSet(new long[]{0x0000000000000004L}); + public static final BitSet FOLLOW_block_in_ebnf413 = new BitSet(new long[]{0x0000000000000008L}); + public static final BitSet FOLLOW_OPTIONAL_in_ebnf421 = new BitSet(new long[]{0x0000000000000004L}); + public static final BitSet FOLLOW_block_in_ebnf423 = new BitSet(new long[]{0x0000000000000008L}); + public static final BitSet FOLLOW_CLOSURE_in_ebnf433 = new BitSet(new long[]{0x0000000000000004L}); + public static final BitSet FOLLOW_block_in_ebnf435 = new BitSet(new long[]{0x0000000000000008L}); + public static final BitSet FOLLOW_POSITIVE_CLOSURE_in_ebnf446 = new BitSet(new long[]{0x0000000000000004L}); + public static final BitSet FOLLOW_block_in_ebnf448 = new BitSet(new long[]{0x0000000000000008L}); + public static final BitSet FOLLOW_block_in_ebnf472 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_set_in_astBlockSuffix0 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_ROOT_in_atom534 = new BitSet(new long[]{0x0000000000000004L}); + public static final BitSet FOLLOW_range_in_atom536 = new BitSet(new long[]{0x0000000000000008L}); + public static final BitSet FOLLOW_BANG_in_atom544 = new BitSet(new long[]{0x0000000000000004L}); + public static final BitSet FOLLOW_range_in_atom546 = new BitSet(new long[]{0x0000000000000008L}); + public static final BitSet FOLLOW_ROOT_in_atom557 = new BitSet(new long[]{0x0000000000000004L}); + public static final BitSet FOLLOW_notSet_in_atom559 = new BitSet(new long[]{0x0000000000000008L}); + public static final BitSet FOLLOW_BANG_in_atom567 = new BitSet(new long[]{0x0000000000000004L}); + public static final BitSet FOLLOW_notSet_in_atom569 = new BitSet(new long[]{0x0000000000000008L}); + public static final BitSet FOLLOW_notSet_in_atom579 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_range_in_atom585 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_DOT_in_atom597 = new BitSet(new long[]{0x0000000000000004L}); + public static final BitSet FOLLOW_ID_in_atom599 = new BitSet(new long[]{0x8021000000000000L,0x0000000000000010L}); + public static final BitSet FOLLOW_terminal_in_atom601 = new BitSet(new long[]{0x0000000000000008L}); + public static final BitSet FOLLOW_DOT_in_atom609 = new BitSet(new long[]{0x0000000000000004L}); + public static final BitSet FOLLOW_ID_in_atom611 = new BitSet(new long[]{0xA1A1000000000000L,0x0000000200000011L}); + public static final BitSet FOLLOW_ruleref_in_atom613 = new BitSet(new long[]{0x0000000000000008L}); + public static final BitSet FOLLOW_WILDCARD_in_atom624 = new BitSet(new long[]{0x0000000000000004L}); + public static final BitSet FOLLOW_WILDCARD_in_atom635 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_terminal_in_atom645 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_ruleref_in_atom661 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_NOT_in_notSet690 = new BitSet(new long[]{0x0000000000000004L}); + public static final BitSet FOLLOW_terminal_in_notSet692 = new BitSet(new long[]{0x0000000000000008L}); + public static final BitSet FOLLOW_NOT_in_notSet703 = new BitSet(new long[]{0x0000000000000004L}); + public static final BitSet FOLLOW_block_in_notSet705 = new BitSet(new long[]{0x0000000000000008L}); + public static final BitSet FOLLOW_ROOT_in_ruleref730 = new BitSet(new long[]{0x0000000000000004L}); + public static final BitSet FOLLOW_RULE_REF_in_ruleref733 = new BitSet(new long[]{0x0000000000000004L}); + public static final BitSet FOLLOW_ARG_ACTION_in_ruleref735 = new BitSet(new long[]{0x0000000000000008L}); + public static final BitSet FOLLOW_BANG_in_ruleref747 = new BitSet(new long[]{0x0000000000000004L}); + public static final BitSet FOLLOW_RULE_REF_in_ruleref750 = new BitSet(new long[]{0x0000000000000004L}); + public static final BitSet FOLLOW_ARG_ACTION_in_ruleref752 = new BitSet(new long[]{0x0000000000000008L}); + public static final BitSet FOLLOW_RULE_REF_in_ruleref766 = new BitSet(new long[]{0x0000000000000004L}); + public static final BitSet FOLLOW_ARG_ACTION_in_ruleref768 = new BitSet(new long[]{0x0000000000000008L}); + public static final BitSet FOLLOW_RANGE_in_range797 = new BitSet(new long[]{0x0000000000000004L}); + public static final BitSet FOLLOW_STRING_LITERAL_in_range801 = new BitSet(new long[]{0x0000000000000000L,0x0000000000000010L}); + public static final BitSet FOLLOW_STRING_LITERAL_in_range805 = new BitSet(new long[]{0x0000000000000008L}); + public static final BitSet FOLLOW_STRING_LITERAL_in_terminal830 = new BitSet(new long[]{0x0000000000000004L}); + public static final BitSet FOLLOW_STRING_LITERAL_in_terminal845 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_TOKEN_REF_in_terminal859 = new BitSet(new long[]{0x0000000000000004L}); + public static final BitSet FOLLOW_ARG_ACTION_in_terminal861 = new BitSet(new long[]{0xFFFFFFFFFFFFFFF0L,0x0000007FFFFFFFFFL}); + public static final BitSet FOLLOW_TOKEN_REF_in_terminal875 = new BitSet(new long[]{0x0000000000000004L}); + public static final BitSet FOLLOW_TOKEN_REF_in_terminal891 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_ROOT_in_terminal906 = new BitSet(new long[]{0x0000000000000004L}); + public static final BitSet FOLLOW_terminal_in_terminal908 = new BitSet(new long[]{0x0000000000000008L}); + public static final BitSet FOLLOW_BANG_in_terminal919 = new BitSet(new long[]{0x0000000000000004L}); + public static final BitSet FOLLOW_terminal_in_terminal921 = new BitSet(new long[]{0x0000000000000008L}); + +} \ No newline at end of file diff --git a/tool/src/org/antlr/v4/codegen/model/Action.java b/tool/src/org/antlr/v4/codegen/model/Action.java new file mode 100644 index 000000000..24b1fbc62 --- /dev/null +++ b/tool/src/org/antlr/v4/codegen/model/Action.java @@ -0,0 +1,29 @@ +package org.antlr.v4.codegen.model; + +import org.antlr.v4.codegen.*; +import org.antlr.v4.codegen.model.actions.ActionChunk; +import org.antlr.v4.tool.*; + +import java.util.List; + +/** */ +public class Action extends RuleElement { + public List chunks; + public Action(OutputModelFactory factory, GrammarAST ast) { + super(factory,ast); + RuleFunction rf = null; + if ( factory.currentRule.size()>0 ) rf = factory.currentRule.peek(); + chunks = ActionTranslator.translateAction(factory, rf, ast.token, (ActionAST)ast); + //System.out.println("actions="+chunks); + } + +// @Override +// public List getChildren() { +// final List sup = super.getChildren(); +// return new ArrayList() {{ +// if ( sup!=null ) addAll(sup); +// add("chunks"); +// }}; +// } + +} diff --git a/tool/src/org/antlr/v4/codegen/model/AddToList.java b/tool/src/org/antlr/v4/codegen/model/AddToList.java new file mode 100644 index 000000000..840d3c7df --- /dev/null +++ b/tool/src/org/antlr/v4/codegen/model/AddToList.java @@ -0,0 +1,15 @@ +package org.antlr.v4.codegen.model; + +import org.antlr.v4.codegen.OutputModelFactory; + +/** */ +public class AddToList extends SrcOp { + public String listName; + public LabeledOp opWithResultToAdd; + + public AddToList(OutputModelFactory factory, String listName, LabeledOp opWithResultToAdd) { + super(factory); + this.listName = listName; + this.opWithResultToAdd = opWithResultToAdd; + } +} diff --git a/tool/src/org/antlr/v4/codegen/model/AltBlock.java b/tool/src/org/antlr/v4/codegen/model/AltBlock.java new file mode 100644 index 000000000..42ec4d653 --- /dev/null +++ b/tool/src/org/antlr/v4/codegen/model/AltBlock.java @@ -0,0 +1,20 @@ +package org.antlr.v4.codegen.model; + +import org.antlr.v4.codegen.OutputModelFactory; +import org.antlr.v4.runtime.atn.BlockStartState; +import org.antlr.v4.tool.GrammarAST; + +import java.util.List; + +public class AltBlock extends Choice { + public ThrowNoViableAlt error; + + public AltBlock(OutputModelFactory factory, + GrammarAST blkOrEbnfRootAST, + List alts) + { + super(factory, blkOrEbnfRootAST, alts); + decision = ((BlockStartState)blkOrEbnfRootAST.atnState).decision; + this.error = new ThrowNoViableAlt(factory, blkOrEbnfRootAST, null); + } +} diff --git a/tool/src/org/antlr/v4/codegen/model/CaptureNextToken.java b/tool/src/org/antlr/v4/codegen/model/CaptureNextToken.java new file mode 100644 index 000000000..0a894e9ab --- /dev/null +++ b/tool/src/org/antlr/v4/codegen/model/CaptureNextToken.java @@ -0,0 +1,6 @@ +package org.antlr.v4.codegen.model; + +public class CaptureNextToken extends SrcOp { + public String varName; + public CaptureNextToken(String varName) { this.varName = varName; } +} diff --git a/tool/src/org/antlr/v4/codegen/model/CaptureNextTokenType.java b/tool/src/org/antlr/v4/codegen/model/CaptureNextTokenType.java new file mode 100644 index 000000000..2bcf45e08 --- /dev/null +++ b/tool/src/org/antlr/v4/codegen/model/CaptureNextTokenType.java @@ -0,0 +1,7 @@ +package org.antlr.v4.codegen.model; + +/** */ +public class CaptureNextTokenType extends SrcOp { + public String varName; + public CaptureNextTokenType(String varName) { this.varName = varName; } +} diff --git a/tool/src/org/antlr/v4/codegen/model/Choice.java b/tool/src/org/antlr/v4/codegen/model/Choice.java new file mode 100644 index 000000000..f6c13d311 --- /dev/null +++ b/tool/src/org/antlr/v4/codegen/model/Choice.java @@ -0,0 +1,58 @@ +package org.antlr.v4.codegen.model; + +import org.antlr.v4.codegen.OutputModelFactory; +import org.antlr.v4.misc.IntervalSet; +import org.antlr.v4.tool.GrammarAST; + +import java.util.*; + +/** The class hierarchy underneath SrcOp is pretty deep but makes sense that, + * for example LL1StarBlock is a kind of LL1Loop which is a kind of Choice. + * The problem is it's impossible to figure + * out how to construct one of these deeply nested objects because of the + * long super constructor call chain. Instead, I decided to in-line all of + * this and then look for opportunities to re-factor code into functions. + * It makes sense to use a class hierarchy to share data fields, but I don't + * think it makes sense to factor code using super constructors because + * it has too much work to do. + */ +public abstract class Choice extends RuleElement { + public int decision = -1; + public List alts; + public List preamble; + + public Choice(OutputModelFactory factory, + GrammarAST blkOrEbnfRootAST, + List alts) + { + super(factory, blkOrEbnfRootAST); + this.alts = alts; + } + + public void addPreambleOp(SrcOp op) { + if ( preamble==null ) preamble = new ArrayList(); + preamble.add(op); + } + + public List getAltLookaheadAsStringLists(IntervalSet[] altLookSets) { + List altLook = new ArrayList(); + for (int a=1; a ops; + + public CodeBlock(OutputModelFactory factory) { this.factory = factory; } + + public CodeBlock(OutputModelFactory factory, List ops) { + super(factory); + this.ops = ops; + } + + public CodeBlock(OutputModelFactory factory, final SrcOp elem) { + this(factory, new ArrayList() {{ add(elem); }}); + } + +// @Override +// public List getChildren() { +// final List sup = super.getChildren(); +// return new ArrayList() {{ if ( sup!=null ) addAll(sup); add("ops"); }}; +// } +} diff --git a/tool/src/org/antlr/v4/codegen/model/Decl.java b/tool/src/org/antlr/v4/codegen/model/Decl.java new file mode 100644 index 000000000..89aff3b58 --- /dev/null +++ b/tool/src/org/antlr/v4/codegen/model/Decl.java @@ -0,0 +1,29 @@ +package org.antlr.v4.codegen.model; + +import org.antlr.v4.codegen.OutputModelFactory; + +/** */ +public class Decl extends SrcOp { + public String name; + public String decl; // whole thing if copied from action + + public Decl(OutputModelFactory factory, String name, String decl) { + this(factory, name); + this.decl = decl; + } + + public Decl(OutputModelFactory factory, String name) { + super(factory); + this.name = name; + } + + @Override + public int hashCode() { + return name.hashCode(); + } + + @Override + public boolean equals(Object obj) { + return name.equals(((Decl)obj).name); + } +} diff --git a/tool/src/org/antlr/v4/codegen/model/ForcedAction.java b/tool/src/org/antlr/v4/codegen/model/ForcedAction.java new file mode 100644 index 000000000..6b2151b26 --- /dev/null +++ b/tool/src/org/antlr/v4/codegen/model/ForcedAction.java @@ -0,0 +1,10 @@ +package org.antlr.v4.codegen.model; + +import org.antlr.v4.codegen.OutputModelFactory; +import org.antlr.v4.tool.GrammarAST; + +public class ForcedAction extends Action { + public ForcedAction(OutputModelFactory factory, GrammarAST ast) { + super(factory, ast); + } +} diff --git a/tool/src/org/antlr/v4/codegen/model/LL1AltBlock.java b/tool/src/org/antlr/v4/codegen/model/LL1AltBlock.java new file mode 100644 index 000000000..9fe4f3031 --- /dev/null +++ b/tool/src/org/antlr/v4/codegen/model/LL1AltBlock.java @@ -0,0 +1,25 @@ +package org.antlr.v4.codegen.model; + +import org.antlr.v4.codegen.OutputModelFactory; +import org.antlr.v4.misc.IntervalSet; +import org.antlr.v4.runtime.atn.DecisionState; +import org.antlr.v4.tool.GrammarAST; + +import java.util.List; + +/** (A | B | C) */ +public class LL1AltBlock extends LL1Choice { + public LL1AltBlock(OutputModelFactory factory, GrammarAST blkAST, List alts) { + super(factory, blkAST, alts); + this.decision = ((DecisionState)blkAST.atnState).decision; + + /** Lookahead for each alt 1..n */ +// IntervalSet[] altLookSets = LinearApproximator.getLL1LookaheadSets(dfa); + IntervalSet[] altLookSets = factory.g.decisionLOOK.get(decision); + altLook = getAltLookaheadAsStringLists(altLookSets); + + IntervalSet expecting = IntervalSet.or(altLookSets); // combine alt sets + this.error = new ThrowNoViableAlt(factory, blkAST, expecting); + System.out.println(blkAST.toStringTree()+" LL1AltBlock expecting="+expecting); + } +} diff --git a/tool/src/org/antlr/v4/codegen/model/LL1Loop.java b/tool/src/org/antlr/v4/codegen/model/LL1Loop.java new file mode 100644 index 000000000..587e51fe9 --- /dev/null +++ b/tool/src/org/antlr/v4/codegen/model/LL1Loop.java @@ -0,0 +1,36 @@ +package org.antlr.v4.codegen.model; + +import org.antlr.v4.codegen.OutputModelFactory; +import org.antlr.v4.misc.IntervalSet; +import org.antlr.v4.tool.GrammarAST; + +import java.util.*; + +/** */ +public abstract class LL1Loop extends Choice { + public OutputModelObject loopExpr; + public List iteration; + public Sync sync; + + public LL1Loop(OutputModelFactory factory, + GrammarAST blkAST, + List alts) + { + super(factory, blkAST, alts); + } + + public void addIterationOp(SrcOp op) { + if ( iteration==null ) iteration = new ArrayList(); + iteration.add(op); + } + + public SrcOp addCodeForLoopLookaheadTempVar(IntervalSet look) { + SrcOp expr = addCodeForLookaheadTempVar(look); + if ( expr instanceof TestSetInline ) { + TestSetInline e = (TestSetInline)expr; + CaptureNextTokenType nextType = new CaptureNextTokenType(e.varName); + addIterationOp(nextType); + } + return expr; + } +} diff --git a/tool/src/org/antlr/v4/codegen/model/LL1OptionalBlockSingleAlt.java b/tool/src/org/antlr/v4/codegen/model/LL1OptionalBlockSingleAlt.java new file mode 100644 index 000000000..48619e2a2 --- /dev/null +++ b/tool/src/org/antlr/v4/codegen/model/LL1OptionalBlockSingleAlt.java @@ -0,0 +1,36 @@ +package org.antlr.v4.codegen.model; + +import org.antlr.v4.codegen.OutputModelFactory; +import org.antlr.v4.misc.IntervalSet; +import org.antlr.v4.runtime.atn.DecisionState; +import org.antlr.v4.tool.GrammarAST; + +import java.util.List; + +/** (A B C)? */ +public class LL1OptionalBlockSingleAlt extends LL1Choice { + public OutputModelObject expr; + public OutputModelObject followExpr; + + public LL1OptionalBlockSingleAlt(OutputModelFactory factory, + GrammarAST blkAST, + List alts) + { + super(factory, blkAST, alts); + this.decision = ((DecisionState)blkAST.atnState).decision; + + /** Lookahead for each alt 1..n */ +// IntervalSet[] altLookSets = LinearApproximator.getLL1LookaheadSets(dfa); + IntervalSet[] altLookSets = factory.g.decisionLOOK.get(decision); + altLook = getAltLookaheadAsStringLists(altLookSets); + IntervalSet look = altLookSets[1]; + IntervalSet followLook = altLookSets[2]; + + IntervalSet expecting = (IntervalSet)look.or(followLook); + this.error = new ThrowNoViableAlt(factory, blkAST, expecting); + System.out.println(blkAST.toStringTree()+" LL1OptionalBlockSingleAlt expecting="+expecting); + + expr = addCodeForLookaheadTempVar(look); + followExpr = factory.getLL1Test(followLook, blkAST); + } +} diff --git a/tool/src/org/antlr/v4/codegen/model/LL1StarBlock.java b/tool/src/org/antlr/v4/codegen/model/LL1StarBlock.java new file mode 100644 index 000000000..504dc5108 --- /dev/null +++ b/tool/src/org/antlr/v4/codegen/model/LL1StarBlock.java @@ -0,0 +1,37 @@ +package org.antlr.v4.codegen.model; + +import org.antlr.v4.codegen.OutputModelFactory; +import org.antlr.v4.misc.IntervalSet; +import org.antlr.v4.runtime.atn.StarBlockStartState; +import org.antlr.v4.tool.GrammarAST; + +import java.util.List; + +/** */ +public class LL1StarBlock extends LL1Loop { + /** Token names for each alt 0..n-1 */ + public List altLook; + public String loopLabel; + public String[] exitLook; + + public LL1StarBlock(OutputModelFactory factory, GrammarAST blkAST, List alts) { + super(factory, blkAST, alts); + + StarBlockStartState blkStart = (StarBlockStartState)blkAST.atnState; + this.decision = blkStart.decision; + + /** Lookahead for each alt 1..n */ + IntervalSet[] altLookSets = factory.g.decisionLOOK.get(decision); + IntervalSet lastLook = altLookSets[altLookSets.length-1]; + IntervalSet[] copy = new IntervalSet[altLookSets.length-1]; + System.arraycopy(altLookSets, 0, copy, 0, altLookSets.length-1); // remove last (exit) alt + altLookSets = copy; + altLook = getAltLookaheadAsStringLists(altLookSets); + loopLabel = factory.gen.target.getLoopLabel(blkAST); + + this.exitLook = + factory.gen.target.getTokenTypesAsTargetLabels(factory.g, lastLook.toArray()); + +// this.sync = new Sync(factory, blkAST, expecting, decision, "iter"); + } +} diff --git a/tool/src/org/antlr/v4/codegen/model/LL1StarBlockSingleAlt.java b/tool/src/org/antlr/v4/codegen/model/LL1StarBlockSingleAlt.java new file mode 100644 index 000000000..9d21446cd --- /dev/null +++ b/tool/src/org/antlr/v4/codegen/model/LL1StarBlockSingleAlt.java @@ -0,0 +1,27 @@ +package org.antlr.v4.codegen.model; + +import org.antlr.v4.codegen.OutputModelFactory; +import org.antlr.v4.misc.IntervalSet; +import org.antlr.v4.runtime.atn.StarBlockStartState; +import org.antlr.v4.tool.GrammarAST; + +import java.util.List; + +/** */ +public class LL1StarBlockSingleAlt extends LL1Loop { + public LL1StarBlockSingleAlt(OutputModelFactory factory, GrammarAST starRoot, List alts) { + super(factory, starRoot, alts); + + StarBlockStartState star = (StarBlockStartState)starRoot.atnState; + this.decision = star.decision; +// DFA dfa = factory.g.decisionDFAs.get(decision); +// IntervalSet[] altLookSets = LinearApproximator.getLL1LookaheadSets(dfa); + IntervalSet[] altLookSets = factory.g.decisionLOOK.get(decision); + IntervalSet enterLook = altLookSets[1]; + IntervalSet exitLook = altLookSets[2]; + loopExpr = addCodeForLoopLookaheadTempVar(enterLook); + + IntervalSet enterExpecting = (IntervalSet)exitLook.or(enterLook); + this.sync = new Sync(factory, starRoot, enterExpecting, decision, "iter"); + } +} diff --git a/tool/src/org/antlr/v4/codegen/model/LabeledOp.java b/tool/src/org/antlr/v4/codegen/model/LabeledOp.java new file mode 100644 index 000000000..1d0993fd0 --- /dev/null +++ b/tool/src/org/antlr/v4/codegen/model/LabeledOp.java @@ -0,0 +1,8 @@ +package org.antlr.v4.codegen.model; + +import java.util.List; + +/** */ +public interface LabeledOp { + public List getLabels(); +} diff --git a/tool/src/org/antlr/v4/codegen/model/Lexer.java b/tool/src/org/antlr/v4/codegen/model/Lexer.java new file mode 100644 index 000000000..6b9b37e81 --- /dev/null +++ b/tool/src/org/antlr/v4/codegen/model/Lexer.java @@ -0,0 +1,46 @@ +package org.antlr.v4.codegen.model; + +import org.antlr.v4.codegen.OutputModelFactory; +import org.antlr.v4.tool.*; + +import java.util.*; + +public class Lexer extends OutputModelObject { + public String name; + public Map tokens; + public LexerFile file; + public String[] tokenNames; + public Set ruleNames; + public Collection modes; + public LinkedHashMap actions; + public LinkedHashMap sempreds; + public SerializedATN atn; + + public Lexer(OutputModelFactory factory, LexerFile file) { + this.factory = factory; + this.file = file; // who contains us? + name = factory.g.getRecognizerName(); + tokens = new LinkedHashMap(); + LexerGrammar lg = (LexerGrammar)factory.g; + atn = new SerializedATN(factory, lg.atn); + modes = lg.modes.keySet(); + + for (String t : factory.g.tokenNameToTypeMap.keySet()) { + Integer ttype = factory.g.tokenNameToTypeMap.get(t); + if ( ttype>0 ) tokens.put(t, ttype); + } + + tokenNames = factory.g.getTokenDisplayNames(); + ruleNames = factory.g.rules.keySet(); + + sempreds = new LinkedHashMap(); + for (PredAST p : factory.g.sempreds.keySet()) { + sempreds.put(factory.g.sempreds.get(p), new Action(factory, p)); + } + actions = new LinkedHashMap(); + for (ActionAST a : factory.g.actions.keySet()) { + actions.put(factory.g.actions.get(a), new Action(factory, a)); + } + } + +} diff --git a/tool/src/org/antlr/v4/codegen/model/MatchToken.java b/tool/src/org/antlr/v4/codegen/model/MatchToken.java new file mode 100644 index 000000000..779240f07 --- /dev/null +++ b/tool/src/org/antlr/v4/codegen/model/MatchToken.java @@ -0,0 +1,46 @@ +package org.antlr.v4.codegen.model; + +import org.antlr.v4.codegen.OutputModelFactory; +import org.antlr.v4.parse.ANTLRParser; +import org.antlr.v4.tool.*; + +import java.util.*; + +/** */ +public class MatchToken extends RuleElement implements LabeledOp { + public String name; + public BitSetDecl follow; + public List labels = new ArrayList(); + + public MatchToken(OutputModelFactory factory, TerminalAST ast, GrammarAST labelAST) { + super(factory, ast); + int ttype = factory.g.getTokenType(ast.getText()); + name = factory.gen.target.getTokenTypeAsTargetLabel(factory.g, ttype); + if ( labelAST!=null ) { + String label = labelAST.getText(); + labels.add(label); + TokenDecl d = new TokenDecl(factory, label); + factory.currentRule.peek().addDecl(d); + if ( labelAST.parent.getType() == ANTLRParser.PLUS_ASSIGN ) { + TokenListDecl l = new TokenListDecl(factory, factory.gen.target.getListLabel(label)); + factory.currentRule.peek().addDecl(l); + } + } + + // If action refs as token not label, we need to define implicit label + if ( factory.currentAlt.tokenRefsInActions.containsKey(ast.getText()) ) { + String label = factory.gen.target.getImplicitTokenLabel(ast.getText()); + labels.add(label); + TokenDecl d = new TokenDecl(factory, label); + factory.currentRule.peek().addDecl(d); + } + +// LinearApproximator approx = new LinearApproximator(factory.g, ATN.INVALID_DECISION_NUMBER); +// IntervalSet fset = approx.FIRST(ast.ATNState.transition(0).target); +// System.out.println("follow match "+name+"="+fset); +// follow = factory.createFollowBitSet(ast, fset); +// factory.defineBitSet(follow); + } + + public List getLabels() { return labels; } +} diff --git a/tool/src/org/antlr/v4/codegen/model/OptionalBlock.java b/tool/src/org/antlr/v4/codegen/model/OptionalBlock.java new file mode 100644 index 000000000..4144d1152 --- /dev/null +++ b/tool/src/org/antlr/v4/codegen/model/OptionalBlock.java @@ -0,0 +1,16 @@ +package org.antlr.v4.codegen.model; + +import org.antlr.v4.codegen.OutputModelFactory; +import org.antlr.v4.tool.GrammarAST; + +import java.util.List; + +/** */ +public class OptionalBlock extends AltBlock { + public OptionalBlock(OutputModelFactory factory, + GrammarAST questionAST, + List alts) + { + super(factory, questionAST, alts); + } +} diff --git a/tool/src/org/antlr/v4/codegen/model/Parser.java b/tool/src/org/antlr/v4/codegen/model/Parser.java new file mode 100644 index 000000000..1a81c6906 --- /dev/null +++ b/tool/src/org/antlr/v4/codegen/model/Parser.java @@ -0,0 +1,63 @@ +package org.antlr.v4.codegen.model; + +import org.antlr.v4.codegen.OutputModelFactory; +import org.antlr.v4.tool.*; + +import java.util.*; + +/** */ +public class Parser extends OutputModelObject { + public String name; + public Map tokens; + public String[] tokenNames; + public Set ruleNames; + public List funcs = new ArrayList(); + public List scopes = new ArrayList(); + public ParserFile file; + public SerializedATN atn; + public LinkedHashMap actions; + public LinkedHashMap sempreds; + + public Parser(OutputModelFactory factory, ParserFile file) { + this.factory = factory; + this.file = file; // who contains us? + name = factory.g.getRecognizerName(); + tokens = new LinkedHashMap(); + //tokens.putAll( factory.g.tokenNameToTypeMap ); + for (String t : factory.g.tokenNameToTypeMap.keySet()) { + Integer ttype = factory.g.tokenNameToTypeMap.get(t); + if ( ttype>0 ) tokens.put(t, ttype); + } +// int numTokens = factory.g.getTokenTypes().size(); +// for (int t=Token.MIN_TOKEN_TYPE; t(); + for (PredAST p : factory.g.sempreds.keySet()) { + sempreds.put(factory.g.sempreds.get(p), new Action(factory, p)); + } + actions = new LinkedHashMap(); + for (ActionAST a : factory.g.actions.keySet()) { + actions.put(factory.g.actions.get(a), new ForcedAction(factory, a)); + } + } + +// @Override +// public List getChildren() { +// final List sup = super.getChildren(); +// return new ArrayList() {{ if ( sup!=null ) addAll(sup); add("funcs"); add("scopes"); }}; +// } +} diff --git a/tool/src/org/antlr/v4/codegen/model/ParserFile.java b/tool/src/org/antlr/v4/codegen/model/ParserFile.java new file mode 100644 index 000000000..ae844f135 --- /dev/null +++ b/tool/src/org/antlr/v4/codegen/model/ParserFile.java @@ -0,0 +1,35 @@ +package org.antlr.v4.codegen.model; + +import org.antlr.v4.codegen.OutputModelFactory; +import org.antlr.v4.tool.GrammarAST; + +import java.util.*; + +/** */ +public class ParserFile extends OutputModelObject { + public String fileName; + public Parser parser; +// public List dfaDecls = new ArrayList(); +// public OrderedHashSet bitSetDecls = new OrderedHashSet(); + public String TokenLabelType; + public String ASTLabelType; + public Map namedActions; + + public ParserFile(OutputModelFactory factory, String fileName) { + super(factory); + this.fileName = fileName; + factory.file = this; + TokenLabelType = factory.gen.g.getOption("TokenLabelType"); + ASTLabelType = factory.gen.g.getOption("ASTLabelType"); + namedActions = new HashMap(); + for (String name : factory.gen.g.namedActions.keySet()) { + GrammarAST ast = factory.gen.g.namedActions.get(name); + namedActions.put(name, new Action(factory, ast)); + } + parser = new Parser(factory, this); + } + +// public void defineBitSet(BitSetDecl b) { +// bitSetDecls.add(b); +// } +} diff --git a/tool/src/org/antlr/v4/codegen/model/RuleElement.java b/tool/src/org/antlr/v4/codegen/model/RuleElement.java new file mode 100644 index 000000000..dc0e9bb29 --- /dev/null +++ b/tool/src/org/antlr/v4/codegen/model/RuleElement.java @@ -0,0 +1,14 @@ +package org.antlr.v4.codegen.model; + +import org.antlr.v4.codegen.OutputModelFactory; +import org.antlr.v4.tool.GrammarAST; + +public class RuleElement extends SrcOp { + /** Associated ATN state for this rule elements (action, token, ruleref, ...) */ + public int stateNumber; + + public RuleElement(OutputModelFactory factory, GrammarAST ast) { + super(factory, ast); + if ( ast.atnState!=null ) stateNumber = ast.atnState.stateNumber; + } +} diff --git a/tool/src/org/antlr/v4/codegen/model/RuleFunction.java b/tool/src/org/antlr/v4/codegen/model/RuleFunction.java new file mode 100644 index 000000000..4bb8c5ba2 --- /dev/null +++ b/tool/src/org/antlr/v4/codegen/model/RuleFunction.java @@ -0,0 +1,118 @@ +package org.antlr.v4.codegen.model; + +import org.antlr.runtime.tree.CommonTreeNodeStream; +import org.antlr.v4.codegen.*; +import org.antlr.v4.misc.*; +import org.antlr.v4.parse.*; +import org.antlr.v4.runtime.atn.ATNState; +import org.antlr.v4.tool.*; + +import java.util.*; + +/** */ +public class RuleFunction extends OutputModelObject { + public String name; + public List modifiers; + public String ctxType; + public List globalScopesUsed; + public Collection ruleLabels; + public Collection tokenLabels; + public List elementsReferencedInRewrite; + public List exceptions; + public Action finallyAction; + public Map namedActions; + + public ATNState startState; + + public StructDecl context; + public DynamicScopeStruct scope; + + public int index; + + public Collection args = null; + public OrderedHashSet decls; + public SrcOp code; + + public RuleFunction(OutputModelFactory factory) { + super(factory); + } + + public RuleFunction(OutputModelFactory factory, Rule r) { + super(factory); + this.name = r.name; + if ( r.modifiers!=null && r.modifiers.size()>0 ) { + this.modifiers = new ArrayList(); + for (GrammarAST t : r.modifiers) modifiers.add(t.getText()); + } + modifiers = Utils.nodesToStrings(r.modifiers); + + ctxType = factory.gen.target.getRuleFunctionContextStructName(r); + + List argsAndReturnValues = new ArrayList(); + List ctorAttrs = new ArrayList(); + + index = r.index; + + if ( r.args!=null ) { + argsAndReturnValues.addAll(r.args.attributes.values()); + args = r.args.attributes.values(); + ctorAttrs.addAll(args); + } + if ( r.retvals!=null ) { + argsAndReturnValues.addAll(r.retvals.attributes.values()); + } + if ( r.scope!=null ) { + scope = new DynamicScopeStruct(factory, factory.gen.target.getRuleDynamicScopeStructName(r.name), + r.scope.attributes.values()); + } + + globalScopesUsed = Utils.apply(r.useScopes, "getText"); + + if ( argsAndReturnValues.size()>0 ) { + context = new StructDecl(factory, factory.gen.target.getRuleFunctionContextStructName(r), + argsAndReturnValues); + context.ctorAttrs = ctorAttrs; + } + + ruleLabels = r.getLabelNames(); + tokenLabels = r.getTokenRefs(); + exceptions = Utils.nodesToStrings(r.exceptionActions); + if ( r.finallyAction!=null ) finallyAction = new Action(factory, r.finallyAction); + + namedActions = new HashMap(); + for (String name : r.namedActions.keySet()) { + GrammarAST ast = r.namedActions.get(name); + namedActions.put(name, new Action(factory, ast)); + } + + startState = factory.g.atn.ruleToStartState.get(r); + + factory.currentRule.push(this); + GrammarASTAdaptor adaptor = new GrammarASTAdaptor(r.ast.token.getInputStream()); + GrammarAST blk = (GrammarAST)r.ast.getFirstChildWithType(ANTLRParser.BLOCK); + CommonTreeNodeStream nodes = new CommonTreeNodeStream(adaptor,blk); + SourceGenTriggers genTriggers = new SourceGenTriggers(nodes, factory); + try { + code = genTriggers.block(null,null); // GEN Instr OBJECTS + } + catch (Exception e){ + e.printStackTrace(System.err); + } + factory.currentRule.pop(); + } + + public void addDecl(Decl d) { + if ( decls==null ) decls = new OrderedHashSet(); + decls.add(d); + } + +// @Override +// public List getChildren() { +// final List sup = super.getChildren(); +// return new ArrayList() {{ +// if ( sup!=null ) addAll(sup); +// add("context"); add("scope"); add("decls"); add("code"); +// add("finallyAction"); add("namedActions"); +// }}; +// } +} diff --git a/tool/src/org/antlr/v4/codegen/model/SemPred.java b/tool/src/org/antlr/v4/codegen/model/SemPred.java new file mode 100644 index 000000000..3cedf04a2 --- /dev/null +++ b/tool/src/org/antlr/v4/codegen/model/SemPred.java @@ -0,0 +1,9 @@ +package org.antlr.v4.codegen.model; + +import org.antlr.v4.codegen.OutputModelFactory; +import org.antlr.v4.tool.GrammarAST; + +/** */ +public class SemPred extends Action { + public SemPred(OutputModelFactory factory, GrammarAST ast) { super(factory,ast); } +} diff --git a/tool/src/org/antlr/v4/codegen/model/SrcOp.java b/tool/src/org/antlr/v4/codegen/model/SrcOp.java new file mode 100644 index 000000000..928890a16 --- /dev/null +++ b/tool/src/org/antlr/v4/codegen/model/SrcOp.java @@ -0,0 +1,17 @@ +package org.antlr.v4.codegen.model; + +import org.antlr.v4.codegen.OutputModelFactory; +import org.antlr.v4.tool.GrammarAST; + +/** */ +public abstract class SrcOp extends OutputModelObject { + /** Used to create unique var names etc... */ + public int uniqueID; + + public SrcOp() {;} + public SrcOp(OutputModelFactory factory) { super(factory); } + public SrcOp(OutputModelFactory factory, GrammarAST ast) { + super(factory,ast); + uniqueID = ast.token.getTokenIndex(); + } +} diff --git a/tool/src/org/antlr/v4/codegen/model/StarBlock.java b/tool/src/org/antlr/v4/codegen/model/StarBlock.java new file mode 100644 index 000000000..41885033e --- /dev/null +++ b/tool/src/org/antlr/v4/codegen/model/StarBlock.java @@ -0,0 +1,22 @@ +package org.antlr.v4.codegen.model; + +import org.antlr.v4.codegen.OutputModelFactory; +import org.antlr.v4.runtime.atn.BlockStartState; +import org.antlr.v4.tool.GrammarAST; + +import java.util.List; + +public class StarBlock extends Loop { + public String loopLabel; + + public StarBlock(OutputModelFactory factory, + GrammarAST blkOrEbnfRootAST, + List alts) + { + super(factory, blkOrEbnfRootAST, alts); + loopLabel = factory.gen.target.getLoopLabel(blkOrEbnfRootAST); + BlockStartState star = (BlockStartState)blkOrEbnfRootAST.atnState; + decision = star.decision; + exitAlt = alts.size()+1; + } +} diff --git a/tool/src/org/antlr/v4/codegen/model/StructDecl.java b/tool/src/org/antlr/v4/codegen/model/StructDecl.java new file mode 100644 index 000000000..c76ea9ba8 --- /dev/null +++ b/tool/src/org/antlr/v4/codegen/model/StructDecl.java @@ -0,0 +1,27 @@ +package org.antlr.v4.codegen.model; + +import org.antlr.v4.codegen.OutputModelFactory; +import org.antlr.v4.tool.Attribute; + +import java.util.*; + +/** */ +public class StructDecl extends Decl { + public List attrs = new ArrayList(); + public Collection ctorAttrs; + + public StructDecl(OutputModelFactory factory, String name, Collection attrList) { + super(factory, name); + for (Attribute a : attrList) { + attrs.add(new AttributeDecl(factory, a.name, a.decl)); + } + } + +// @Override +// public List getChildren() { +// final List sup = super.getChildren(); +// return new ArrayList() {{ +// if ( sup!=null ) addAll(sup); add("attrs"); +// }}; +// } +} diff --git a/tool/src/org/antlr/v4/codegen/model/Sync.java b/tool/src/org/antlr/v4/codegen/model/Sync.java new file mode 100644 index 000000000..6b33b46d3 --- /dev/null +++ b/tool/src/org/antlr/v4/codegen/model/Sync.java @@ -0,0 +1,22 @@ +package org.antlr.v4.codegen.model; + +import org.antlr.v4.codegen.OutputModelFactory; +import org.antlr.v4.misc.IntervalSet; +import org.antlr.v4.tool.GrammarAST; + +/** */ +public class Sync extends SrcOp { + public int decision; + public BitSetDecl expecting; + public Sync(OutputModelFactory factory, + GrammarAST blkOrEbnfRootAST, + IntervalSet expecting, + int decision, + String position) + { + super(factory, blkOrEbnfRootAST); + this.decision = decision; + this.expecting = factory.createExpectingBitSet(ast, decision, expecting, position); + factory.defineBitSet(this.expecting); + } +} diff --git a/tool/src/org/antlr/v4/codegen/model/TestSet.java b/tool/src/org/antlr/v4/codegen/model/TestSet.java new file mode 100644 index 000000000..61db9fa3d --- /dev/null +++ b/tool/src/org/antlr/v4/codegen/model/TestSet.java @@ -0,0 +1,15 @@ +package org.antlr.v4.codegen.model; + +import org.antlr.v4.codegen.OutputModelFactory; +import org.antlr.v4.misc.IntervalSet; +import org.antlr.v4.tool.GrammarAST; + +/** */ +public class TestSet extends RuleElement { + public BitSetDecl set; + public TestSet(OutputModelFactory factory, GrammarAST blkAST, IntervalSet set) { + super(factory, blkAST); + this.set = factory.createTestBitSet(blkAST, set); + factory.defineBitSet(this.set); + } +} diff --git a/tool/src/org/antlr/v4/codegen/model/TestSetInline.java b/tool/src/org/antlr/v4/codegen/model/TestSetInline.java new file mode 100644 index 000000000..8fae8f791 --- /dev/null +++ b/tool/src/org/antlr/v4/codegen/model/TestSetInline.java @@ -0,0 +1,21 @@ +package org.antlr.v4.codegen.model; + +import org.antlr.v4.codegen.OutputModelFactory; +import org.antlr.v4.misc.IntervalSet; +import org.antlr.v4.tool.GrammarAST; + +/** */ +public class TestSetInline extends SrcOp { + public String varName; + public String[] ttypes; +// public CaptureNextToken nextToken; +// public Choice choice; + public TestSetInline(OutputModelFactory factory, GrammarAST ast, IntervalSet set) { + super(factory, ast); + this.ttypes = factory.gen.target.getTokenTypesAsTargetLabels(factory.g, set.toArray()); + this.varName = "_la"; +// this.choice = choice; +// nextToken = new CaptureNextToken(); +// choice.addPreambleOp(nextToken); + } +} diff --git a/tool/src/org/antlr/v4/codegen/model/ThrowEarlyExitException.java b/tool/src/org/antlr/v4/codegen/model/ThrowEarlyExitException.java new file mode 100644 index 000000000..e5f4cfd7f --- /dev/null +++ b/tool/src/org/antlr/v4/codegen/model/ThrowEarlyExitException.java @@ -0,0 +1,12 @@ +package org.antlr.v4.codegen.model; + +import org.antlr.v4.codegen.OutputModelFactory; +import org.antlr.v4.misc.IntervalSet; +import org.antlr.v4.tool.GrammarAST; + +/** */ +public class ThrowEarlyExitException extends ThrowRecognitionException { + public ThrowEarlyExitException(OutputModelFactory factory, GrammarAST ast, IntervalSet expecting) { + super(factory, ast, expecting); + } +} diff --git a/tool/src/org/antlr/v4/codegen/model/ThrowNoViableAlt.java b/tool/src/org/antlr/v4/codegen/model/ThrowNoViableAlt.java new file mode 100644 index 000000000..425c88de2 --- /dev/null +++ b/tool/src/org/antlr/v4/codegen/model/ThrowNoViableAlt.java @@ -0,0 +1,14 @@ +package org.antlr.v4.codegen.model; + +import org.antlr.v4.codegen.OutputModelFactory; +import org.antlr.v4.misc.IntervalSet; +import org.antlr.v4.tool.GrammarAST; + +/** */ +public class ThrowNoViableAlt extends ThrowRecognitionException { + public ThrowNoViableAlt(OutputModelFactory factory, GrammarAST blkOrEbnfRootAST, + IntervalSet expecting) + { + super(factory, blkOrEbnfRootAST, expecting); + } +} diff --git a/tool/src/org/antlr/v4/codegen/model/ThrowRecognitionException.java b/tool/src/org/antlr/v4/codegen/model/ThrowRecognitionException.java new file mode 100644 index 000000000..4551eb574 --- /dev/null +++ b/tool/src/org/antlr/v4/codegen/model/ThrowRecognitionException.java @@ -0,0 +1,24 @@ +package org.antlr.v4.codegen.model; + +import org.antlr.v4.codegen.OutputModelFactory; +import org.antlr.v4.misc.IntervalSet; +import org.antlr.v4.tool.GrammarAST; + +/** */ +public class ThrowRecognitionException extends SrcOp { + public int decision; + public String grammarFile; + public int grammarLine; + public int grammarCharPosInLine; + public BitSetDecl expecting; + + public ThrowRecognitionException(OutputModelFactory factory, GrammarAST ast, IntervalSet expecting) { + super(factory, ast); + //this.decision = ((BlockStartState)ast.ATNState).decision; + grammarLine = ast.getLine(); + grammarLine = ast.getCharPositionInLine(); + grammarFile = factory.g.fileName; + //this.expecting = factory.createExpectingBitSet(ast, decision, expecting, "error"); + factory.defineBitSet(this.expecting); + } +} diff --git a/tool/src/org/antlr/v4/codegen/model/TokenDecl.java b/tool/src/org/antlr/v4/codegen/model/TokenDecl.java new file mode 100644 index 000000000..5db78a9e1 --- /dev/null +++ b/tool/src/org/antlr/v4/codegen/model/TokenDecl.java @@ -0,0 +1,10 @@ +package org.antlr.v4.codegen.model; + +import org.antlr.v4.codegen.OutputModelFactory; + +/** */ +public class TokenDecl extends Decl { + public TokenDecl(OutputModelFactory factory, String varName) { + super(factory, varName); + } +} diff --git a/tool/src/org/antlr/v4/codegen/model/TokenListDecl.java b/tool/src/org/antlr/v4/codegen/model/TokenListDecl.java new file mode 100644 index 000000000..c10fbc39f --- /dev/null +++ b/tool/src/org/antlr/v4/codegen/model/TokenListDecl.java @@ -0,0 +1,10 @@ +package org.antlr.v4.codegen.model; + +import org.antlr.v4.codegen.OutputModelFactory; + +/** */ +public class TokenListDecl extends Decl { + public TokenListDecl(OutputModelFactory factory, String varName) { + super(factory, varName); + } +} diff --git a/tool/src/org/antlr/v4/codegen/model/actions/ActionText.java b/tool/src/org/antlr/v4/codegen/model/actions/ActionText.java new file mode 100644 index 000000000..435317258 --- /dev/null +++ b/tool/src/org/antlr/v4/codegen/model/actions/ActionText.java @@ -0,0 +1,10 @@ +package org.antlr.v4.codegen.model.actions; + +/** */ +public class ActionText extends ActionChunk { + public String text; + + public ActionText(String text) { + this.text = text; + } +} diff --git a/tool/src/org/antlr/v4/codegen/model/actions/ArgRef.java b/tool/src/org/antlr/v4/codegen/model/actions/ArgRef.java new file mode 100644 index 000000000..fb2503d05 --- /dev/null +++ b/tool/src/org/antlr/v4/codegen/model/actions/ArgRef.java @@ -0,0 +1,10 @@ +package org.antlr.v4.codegen.model.actions; + +/** */ +public class ArgRef extends ActionChunk { + public String name; + + public ArgRef(String name) { + this.name = name; + } +} diff --git a/tool/src/org/antlr/v4/codegen/model/actions/DynScopeAttrRef.java b/tool/src/org/antlr/v4/codegen/model/actions/DynScopeAttrRef.java new file mode 100644 index 000000000..42267b5fe --- /dev/null +++ b/tool/src/org/antlr/v4/codegen/model/actions/DynScopeAttrRef.java @@ -0,0 +1,12 @@ +package org.antlr.v4.codegen.model.actions; + +/** */ +public class DynScopeAttrRef extends ActionChunk { + public String scope; + public String attr; + + public DynScopeAttrRef(String scope, String attr) { + this.attr = attr; + this.scope = scope; + } +} diff --git a/tool/src/org/antlr/v4/codegen/model/actions/DynScopeRef.java b/tool/src/org/antlr/v4/codegen/model/actions/DynScopeRef.java new file mode 100644 index 000000000..cf276fdfd --- /dev/null +++ b/tool/src/org/antlr/v4/codegen/model/actions/DynScopeRef.java @@ -0,0 +1,10 @@ +package org.antlr.v4.codegen.model.actions; + +/** */ +public class DynScopeRef extends ActionChunk { + public String scope; + + public DynScopeRef(String scope) { + this.scope = scope; + } +} diff --git a/tool/src/org/antlr/v4/codegen/model/actions/RetValueRef.java b/tool/src/org/antlr/v4/codegen/model/actions/RetValueRef.java new file mode 100644 index 000000000..b2071ec4a --- /dev/null +++ b/tool/src/org/antlr/v4/codegen/model/actions/RetValueRef.java @@ -0,0 +1,11 @@ +package org.antlr.v4.codegen.model.actions; + +/** */ +public class RetValueRef extends ActionChunk { + public String name; + + public RetValueRef(String name) { + this.name = name; + } + +} diff --git a/tool/src/org/antlr/v4/codegen/model/actions/RulePropertyRef.java b/tool/src/org/antlr/v4/codegen/model/actions/RulePropertyRef.java new file mode 100644 index 000000000..64740eb8b --- /dev/null +++ b/tool/src/org/antlr/v4/codegen/model/actions/RulePropertyRef.java @@ -0,0 +1,10 @@ +package org.antlr.v4.codegen.model.actions; + +/** */ +public class RulePropertyRef extends ActionChunk { + public String label; + + public RulePropertyRef(String label) { + this.label = label; + } +} diff --git a/tool/src/org/antlr/v4/codegen/model/actions/RulePropertyRef_st.java b/tool/src/org/antlr/v4/codegen/model/actions/RulePropertyRef_st.java new file mode 100644 index 000000000..2823ed47c --- /dev/null +++ b/tool/src/org/antlr/v4/codegen/model/actions/RulePropertyRef_st.java @@ -0,0 +1,8 @@ +package org.antlr.v4.codegen.model.actions; + +/** */ +public class RulePropertyRef_st extends RulePropertyRef { + public RulePropertyRef_st(String label) { + super(label); + } +} diff --git a/tool/src/org/antlr/v4/codegen/model/actions/RulePropertyRef_start.java b/tool/src/org/antlr/v4/codegen/model/actions/RulePropertyRef_start.java new file mode 100644 index 000000000..c5e388c8c --- /dev/null +++ b/tool/src/org/antlr/v4/codegen/model/actions/RulePropertyRef_start.java @@ -0,0 +1,8 @@ +package org.antlr.v4.codegen.model.actions; + +/** */ +public class RulePropertyRef_start extends RulePropertyRef { + public RulePropertyRef_start(String label) { + super(label); + } +} diff --git a/tool/src/org/antlr/v4/codegen/model/actions/RulePropertyRef_stop.java b/tool/src/org/antlr/v4/codegen/model/actions/RulePropertyRef_stop.java new file mode 100644 index 000000000..4624fae39 --- /dev/null +++ b/tool/src/org/antlr/v4/codegen/model/actions/RulePropertyRef_stop.java @@ -0,0 +1,8 @@ +package org.antlr.v4.codegen.model.actions; + +/** */ +public class RulePropertyRef_stop extends RulePropertyRef { + public RulePropertyRef_stop(String label) { + super(label); + } +} diff --git a/tool/src/org/antlr/v4/codegen/model/actions/RulePropertyRef_text.java b/tool/src/org/antlr/v4/codegen/model/actions/RulePropertyRef_text.java new file mode 100644 index 000000000..be5a08bd7 --- /dev/null +++ b/tool/src/org/antlr/v4/codegen/model/actions/RulePropertyRef_text.java @@ -0,0 +1,8 @@ +package org.antlr.v4.codegen.model.actions; + +/** */ +public class RulePropertyRef_text extends RulePropertyRef { + public RulePropertyRef_text(String label) { + super(label); + } +} diff --git a/tool/src/org/antlr/v4/codegen/model/actions/RulePropertyRef_tree.java b/tool/src/org/antlr/v4/codegen/model/actions/RulePropertyRef_tree.java new file mode 100644 index 000000000..3402a5e6f --- /dev/null +++ b/tool/src/org/antlr/v4/codegen/model/actions/RulePropertyRef_tree.java @@ -0,0 +1,8 @@ +package org.antlr.v4.codegen.model.actions; + +/** */ +public class RulePropertyRef_tree extends RulePropertyRef { + public RulePropertyRef_tree(String label) { + super(label); + } +} diff --git a/tool/src/org/antlr/v4/codegen/model/actions/SetAttr.java b/tool/src/org/antlr/v4/codegen/model/actions/SetAttr.java new file mode 100644 index 000000000..f05dad42f --- /dev/null +++ b/tool/src/org/antlr/v4/codegen/model/actions/SetAttr.java @@ -0,0 +1,23 @@ +package org.antlr.v4.codegen.model.actions; + +import java.util.List; + +/** */ +public class SetAttr extends ActionChunk { + public String name; + public List rhsChunks; + + public SetAttr(String name, List rhsChunks) { + this.name = name; + this.rhsChunks = rhsChunks; + } + +// @Override +// public List getChildren() { +// final List sup = super.getChildren(); +// return new ArrayList() {{ +// if ( sup!=null ) addAll(sup); +// add("rhsChunks"); +// }}; +// } +} diff --git a/tool/src/org/antlr/v4/codegen/model/actions/SetDynScopeAttr.java b/tool/src/org/antlr/v4/codegen/model/actions/SetDynScopeAttr.java new file mode 100644 index 000000000..2b91c4f99 --- /dev/null +++ b/tool/src/org/antlr/v4/codegen/model/actions/SetDynScopeAttr.java @@ -0,0 +1,25 @@ +package org.antlr.v4.codegen.model.actions; + +import java.util.List; + +/** */ +public class SetDynScopeAttr extends ActionChunk { + public String scope; + public String attr; + public List rhsChunks; + + public SetDynScopeAttr(String scope, String attr, List rhsChunks) { + this.scope = scope; + this.attr = attr; + this.rhsChunks = rhsChunks; + } +// +// @Override +// public List getChildren() { +// final List sup = super.getChildren(); +// return new ArrayList() {{ +// if ( sup!=null ) addAll(sup); +// add("rhsChunks"); +// }}; +// } +} diff --git a/tool/src/org/antlr/v4/codegen/model/actions/SetDynScopeAttr_index.java b/tool/src/org/antlr/v4/codegen/model/actions/SetDynScopeAttr_index.java new file mode 100644 index 000000000..8d60d4f39 --- /dev/null +++ b/tool/src/org/antlr/v4/codegen/model/actions/SetDynScopeAttr_index.java @@ -0,0 +1,21 @@ +package org.antlr.v4.codegen.model.actions; + +import java.util.List; + +/** */ +public class SetDynScopeAttr_index extends SetDynScopeAttr { + public List indexChunks; + public SetDynScopeAttr_index(String scope, String attr, List indexChunks, List rhsChunks) { + super(scope, attr, rhsChunks); + this.indexChunks = indexChunks; + } + +// @Override +// public List getChildren() { +// final List sup = super.getChildren(); +// return new ArrayList() {{ +// if ( sup!=null ) addAll(sup); +// add("indexChunks"); +// }}; +// } +} diff --git a/tool/src/org/antlr/v4/codegen/model/actions/TokenPropertyRef.java b/tool/src/org/antlr/v4/codegen/model/actions/TokenPropertyRef.java new file mode 100644 index 000000000..d31293fe0 --- /dev/null +++ b/tool/src/org/antlr/v4/codegen/model/actions/TokenPropertyRef.java @@ -0,0 +1,10 @@ +package org.antlr.v4.codegen.model.actions; + +/** */ +public class TokenPropertyRef extends ActionChunk { + public String label; + + public TokenPropertyRef(String label) { + this.label = label; + } +} diff --git a/tool/src/org/antlr/v4/codegen/model/actions/TokenPropertyRef_int.java b/tool/src/org/antlr/v4/codegen/model/actions/TokenPropertyRef_int.java new file mode 100644 index 000000000..c34d0aee2 --- /dev/null +++ b/tool/src/org/antlr/v4/codegen/model/actions/TokenPropertyRef_int.java @@ -0,0 +1,8 @@ +package org.antlr.v4.codegen.model.actions; + +/** */ +public class TokenPropertyRef_int extends TokenPropertyRef { + public TokenPropertyRef_int(String label) { + super(label); + } +} diff --git a/tool/src/org/antlr/v4/codegen/model/actions/TokenPropertyRef_text.java b/tool/src/org/antlr/v4/codegen/model/actions/TokenPropertyRef_text.java new file mode 100644 index 000000000..33086720c --- /dev/null +++ b/tool/src/org/antlr/v4/codegen/model/actions/TokenPropertyRef_text.java @@ -0,0 +1,8 @@ +package org.antlr.v4.codegen.model.actions; + +/** */ +public class TokenPropertyRef_text extends TokenPropertyRef { + public TokenPropertyRef_text(String label) { + super(label); + } +} diff --git a/tool/src/org/antlr/v4/codegen/model/actions/TokenPropertyRef_tree.java b/tool/src/org/antlr/v4/codegen/model/actions/TokenPropertyRef_tree.java new file mode 100644 index 000000000..e78f57acd --- /dev/null +++ b/tool/src/org/antlr/v4/codegen/model/actions/TokenPropertyRef_tree.java @@ -0,0 +1,8 @@ +package org.antlr.v4.codegen.model.actions; + +/** */ +public class TokenPropertyRef_tree extends TokenPropertyRef { + public TokenPropertyRef_tree(String label) { + super(label); + } +} diff --git a/tool/src/org/antlr/v4/codegen/model/actions/TokenPropertyRef_type.java b/tool/src/org/antlr/v4/codegen/model/actions/TokenPropertyRef_type.java new file mode 100644 index 000000000..ad77116b8 --- /dev/null +++ b/tool/src/org/antlr/v4/codegen/model/actions/TokenPropertyRef_type.java @@ -0,0 +1,8 @@ +package org.antlr.v4.codegen.model.actions; + +/** */ +public class TokenPropertyRef_type extends TokenPropertyRef { + public TokenPropertyRef_type(String label) { + super(label); + } +} diff --git a/tool/src/org/antlr/v4/codegen/model/ast/MakeRoot.java b/tool/src/org/antlr/v4/codegen/model/ast/MakeRoot.java new file mode 100644 index 000000000..039ce09c7 --- /dev/null +++ b/tool/src/org/antlr/v4/codegen/model/ast/MakeRoot.java @@ -0,0 +1,8 @@ +package org.antlr.v4.codegen.model.ast; + +import org.antlr.v4.codegen.model.SrcOp; + +/** */ +public class MakeRoot extends SrcOp { + public SrcOp opWithResultToAdd; +} diff --git a/tool/src/org/antlr/v4/codegen/model/dbg.java b/tool/src/org/antlr/v4/codegen/model/dbg.java new file mode 100644 index 000000000..4823266c6 --- /dev/null +++ b/tool/src/org/antlr/v4/codegen/model/dbg.java @@ -0,0 +1,5 @@ +package org.antlr.v4.codegen.model; + +/** */ +public class dbg extends OutputModelObject { +} diff --git a/tool/src/org/antlr/v4/parse/TokenVocabParser.java b/tool/src/org/antlr/v4/parse/TokenVocabParser.java index 3fea4f8cb..bf2243803 100644 --- a/tool/src/org/antlr/v4/parse/TokenVocabParser.java +++ b/tool/src/org/antlr/v4/parse/TokenVocabParser.java @@ -118,7 +118,7 @@ public class TokenVocabParser { */ public File getImportedVocabFile() { - File f = new File(tool.getLibraryDirectory(), + File f = new File(tool.libDirectory, File.separator + vocabName + CodeGenerator.VOCAB_FILE_EXTENSION); @@ -131,12 +131,7 @@ public class TokenVocabParser { // files are generated (in the base, not relative to the input // location.) // - if (tool.haveOutputDir) { - f = new File(tool.getOutputDirectory(), vocabName + CodeGenerator.VOCAB_FILE_EXTENSION); - } - else { - f = new File(vocabName + CodeGenerator.VOCAB_FILE_EXTENSION); - } + f = new File(tool.outputDirectory, vocabName + CodeGenerator.VOCAB_FILE_EXTENSION); return f; } } diff --git a/tool/src/org/antlr/v4/semantics/ActionSniffer.java b/tool/src/org/antlr/v4/semantics/ActionSniffer.java new file mode 100644 index 000000000..e144688fc --- /dev/null +++ b/tool/src/org/antlr/v4/semantics/ActionSniffer.java @@ -0,0 +1,52 @@ +package org.antlr.v4.semantics; + +import org.antlr.runtime.*; +import org.antlr.v4.parse.ActionSplitter; +import org.antlr.v4.tool.*; + +import java.util.List; + +/** Find token and rule refs, side-effect: update Alternatives */ +public class ActionSniffer extends BlankActionSplitterListener { + public Grammar g; + public Rule r; // null if action outside of rule + public Alternative alt; // null if action outside of alt; could be in rule + public ActionAST node; + public Token actionToken; // token within action + public ErrorManager errMgr; + + public ActionSniffer(Grammar g, Rule r, Alternative alt, ActionAST node, Token actionToken) { + this.g = g; + this.r = r; + this.alt = alt; + this.node = node; + this.actionToken = actionToken; + this.errMgr = g.tool.errMgr; + } + + public void examineAction() { + //System.out.println("examine "+actionToken); + ANTLRStringStream in = new ANTLRStringStream(actionToken.getText()); + in.setLine(actionToken.getLine()); + in.setCharPositionInLine(actionToken.getCharPositionInLine()); + ActionSplitter splitter = new ActionSplitter(in, this); + // forces eval, triggers listener methods + node.chunks = splitter.getActionTokens(); + System.out.println(node.chunks); + } + + public void attr(String expr, Token x) { + List xRefs = alt.tokenRefs.get(x.getText()); + if ( alt!=null && xRefs!=null ) { + alt.tokenRefsInActions.map(x.getText(), node); + } + List rRefs = alt.ruleRefs.get(x.getText()); + if ( alt!=null && rRefs!=null ) { + alt.ruleRefsInActions.map(x.getText(), node); + } + } + + public void qualifiedAttr(String expr, Token x, Token y) { + attr(expr, x); + } +} diff --git a/tool/src/org/antlr/v4/semantics/AttributeChecks.java b/tool/src/org/antlr/v4/semantics/AttributeChecks.java new file mode 100644 index 000000000..75211c72d --- /dev/null +++ b/tool/src/org/antlr/v4/semantics/AttributeChecks.java @@ -0,0 +1,219 @@ +package org.antlr.v4.semantics; + +import org.antlr.runtime.*; +import org.antlr.v4.parse.*; +import org.antlr.v4.tool.*; + +import java.util.List; + +/** Trigger checks for various kinds of attribute expressions. + * no side-effects. + */ +public class AttributeChecks implements ActionSplitterListener { + public Grammar g; + public Rule r; // null if action outside of rule + public Alternative alt; // null if action outside of alt; could be in rule + public ActionAST node; + public Token actionToken; // token within action + public ErrorManager errMgr; + + public AttributeChecks(Grammar g, Rule r, Alternative alt, ActionAST node, Token actionToken) { + this.g = g; + this.r = r; + this.alt = alt; + this.node = node; + this.actionToken = actionToken; + this.errMgr = g.tool.errMgr; + } + + public static void checkAllAttributeExpressions(Grammar g) { + for (ActionAST act : g.namedActions.values()) { + AttributeChecks checker = new AttributeChecks(g, null, null, act, act.token); + checker.examineAction(); + } + + for (Rule r : g.rules.values()) { + for (ActionAST a : r.namedActions.values()) { + AttributeChecks checker = new AttributeChecks(g, r, null, a, a.token); + checker.examineAction(); + } + for (int i=1; i<=r.numberOfAlts; i++) { + Alternative alt = r.alt[i]; + for (ActionAST a : alt.actions) { + AttributeChecks checker = + new AttributeChecks(g, r, alt, a, a.token); + checker.examineAction(); + } + } + for (ActionAST a : r.exceptionActions) { + AttributeChecks checker = new AttributeChecks(g, r, null, a, a.token); + checker.examineAction(); + } + if ( r.finallyAction!=null ) { + AttributeChecks checker = + new AttributeChecks(g, r, null, r.finallyAction, r.finallyAction.token); + checker.examineAction(); + } + } + } + + public void examineAction() { + //System.out.println("examine "+actionToken); + ANTLRStringStream in = new ANTLRStringStream(actionToken.getText()); + in.setLine(actionToken.getLine()); + in.setCharPositionInLine(actionToken.getCharPositionInLine()); + ActionSplitter splitter = new ActionSplitter(in, this); + // forces eval, triggers listener methods + node.chunks = splitter.getActionTokens(); + System.out.println(node.chunks); + } + + // LISTENER METHODS + + public void setQualifiedAttr(String expr, Token x, Token y, Token rhs) { + qualifiedAttr(expr, x, y); + new AttributeChecks(g, r, alt, node, rhs).examineAction(); + } + + // $x.y + public void qualifiedAttr(String expr, Token x, Token y) { + if ( node.resolver.resolveToAttribute(x.getText(), y.getText(), node)==null ) { + Rule rref = isolatedRuleRef(x.getText()); + if ( rref!=null ) { + //if ( r.name.equals(x.getText()) ) return; // $a.x in rule a is ok + if ( rref.args!=null && rref.args.get(y.getText())!=null ) { + g.tool.errMgr.grammarError(ErrorType.INVALID_RULE_PARAMETER_REF, + g.fileName, y, y.getText(), expr); + } + else { + errMgr.grammarError(ErrorType.UNKNOWN_RULE_ATTRIBUTE, + g.fileName, y, y.getText(), rref.name, expr); + } + } + else if ( !node.resolver.resolvesToAttributeDict(x.getText(), node) ) { + errMgr.grammarError(ErrorType.UNKNOWN_SIMPLE_ATTRIBUTE, + g.fileName, x, x.getText(), expr); + } + else { + errMgr.grammarError(ErrorType.UNKNOWN_ATTRIBUTE_IN_SCOPE, + g.fileName, y, y.getText(), expr); + } + } + } + + public void setAttr(String expr, Token x, Token rhs) { + if ( node.resolver.resolveToAttribute(x.getText(), node)==null ) { + errMgr.grammarError(ErrorType.UNKNOWN_SIMPLE_ATTRIBUTE, + g.fileName, x, x.getText(), expr); + } + new AttributeChecks(g, r, alt, node, rhs).examineAction(); + } + + public void attr(String expr, Token x) { + if ( node.resolver.resolveToAttribute(x.getText(), node)==null ) { + if ( node.resolver.resolveToDynamicScope(x.getText(), node)!=null ) { + return; // $S for scope S is ok + } + if ( node.resolver.resolvesToToken(x.getText(), node) ) { + return; // $ID for token ref or label of token + } + if ( node.resolver.resolvesToListLabel(x.getText(), node) ) { + return; // $ids for ids+=ID etc... + } + if ( isolatedRuleRef(x.getText())!=null ) { + errMgr.grammarError(ErrorType.ISOLATED_RULE_REF, + g.fileName, x, x.getText(), expr); + return; + } + errMgr.grammarError(ErrorType.UNKNOWN_SIMPLE_ATTRIBUTE, + g.fileName, x, x.getText(), expr); + } + } + + public void setDynamicScopeAttr(String expr, Token x, Token y, Token rhs) { + //System.out.println("SET "+x+" :: "+y); + dynamicScopeAttr(expr, x, y); + new AttributeChecks(g, r, alt, node, rhs).examineAction(); + } + + public void dynamicScopeAttr(String expr, Token x, Token y) { + //System.out.println(x+" :: "+y); + AttributeDict s = node.resolver.resolveToDynamicScope(x.getText(), node); + if ( s==null ) { + errMgr.grammarError(ErrorType.UNKNOWN_DYNAMIC_SCOPE, + g.fileName, x, x.getText(), expr); + return; + } + Attribute a = s.get(y.getText()); + if ( a==null ) { + errMgr.grammarError(ErrorType.UNKNOWN_DYNAMIC_SCOPE_ATTRIBUTE, + g.fileName, y, x.getText(), y.getText(), expr); + } + } + + public void setDynamicNegativeIndexedScopeAttr(String expr, Token x, Token y, + Token index, Token rhs) { + setDynamicScopeAttr(expr, x, y, rhs); + new AttributeChecks(g, r, alt, node, index).examineAction(); + } + + public void dynamicNegativeIndexedScopeAttr(String expr, Token x, Token y, + Token index) { + dynamicScopeAttr(expr, x, y); + new AttributeChecks(g, r, alt, node, index).examineAction(); + } + + public void setDynamicAbsoluteIndexedScopeAttr(String expr, Token x, Token y, + Token index, Token rhs) { + setDynamicScopeAttr(expr, x, y, rhs); + new AttributeChecks(g, r, alt, node, index).examineAction(); + } + + public void dynamicAbsoluteIndexedScopeAttr(String expr, Token x, Token y, + Token index) { + dynamicScopeAttr(expr, x, y); + new AttributeChecks(g, r, alt, node, index).examineAction(); + } + + public void unknownSyntax(Token t) { + errMgr.grammarError(ErrorType.INVALID_TEMPLATE_ACTION, + g.fileName, t, t.getText()); + } + + public void text(String text) { } + + // don't care + public void templateInstance(String expr) { } + public void indirectTemplateInstance(String expr) { } + public void setExprAttribute(String expr) { } + public void setSTAttribute(String expr) { } + public void templateExpr(String expr) { } + + // SUPPORT + + public Rule isolatedRuleRef(String x) { + if ( node.resolver instanceof Grammar ) return null; + + if ( x.equals(r.name) ) return r; + List labels = null; + if ( node.resolver instanceof Rule ) { + labels = r.getLabelDefs().get(x); + } + else if ( node.resolver instanceof Alternative ) { + labels = ((Alternative)node.resolver).labelDefs.get(x); + } + if ( labels!=null ) { // it's a label ref. is it a rule label? + LabelElementPair anyLabelDef = labels.get(0); + if ( anyLabelDef.type==LabelType.RULE_LABEL ) { + return g.getRule(anyLabelDef.element.getText()); + } + } + if ( node.resolver instanceof Alternative ) { + if ( ((Alternative)node.resolver).ruleRefs.get(x)!=null ) { + return g.getRule(x); + } + } + return null; + } + +} diff --git a/tool/src/org/antlr/v4/semantics/BasicSemanticChecks.java b/tool/src/org/antlr/v4/semantics/BasicSemanticChecks.java new file mode 100644 index 000000000..6099d6534 --- /dev/null +++ b/tool/src/org/antlr/v4/semantics/BasicSemanticChecks.java @@ -0,0 +1,442 @@ +package org.antlr.v4.semantics; + +import org.antlr.runtime.Token; +import org.antlr.v4.misc.Utils; +import org.antlr.v4.parse.ANTLRParser; +import org.antlr.v4.tool.*; +import org.stringtemplate.v4.misc.MultiMap; + +import java.io.File; +import java.util.*; + +/** No side-effects; BasicSemanticTriggers.g invokes check rules for these: + * + * FILE_AND_GRAMMAR_NAME_DIFFER + * LEXER_RULES_NOT_ALLOWED + * PARSER_RULES_NOT_ALLOWED + * CANNOT_ALIAS_TOKENS + * ARGS_ON_TOKEN_REF + * ILLEGAL_OPTION + * REWRITE_OR_OP_WITH_NO_OUTPUT_OPTION + * NO_RULES + * REWRITE_FOR_MULTI_ELEMENT_ALT + * HETERO_ILLEGAL_IN_REWRITE_ALT + * AST_OP_WITH_NON_AST_OUTPUT_OPTION + * AST_OP_IN_ALT_WITH_REWRITE + * CONFLICTING_OPTION_IN_TREE_FILTER + * WILDCARD_AS_ROOT + * INVALID_IMPORT + * TOKEN_VOCAB_IN_DELEGATE + * IMPORT_NAME_CLASH + * REPEATED_PREQUEL + * TOKEN_NAMES_MUST_START_UPPER + * + * TODO: 1 action per lex rule + */ +public class BasicSemanticChecks { + public static final Set legalLexerOptions = + new HashSet() { + { + add("language"); add("tokenVocab"); + add("TokenLabelType"); + add("superClass"); + add("filter"); + add("k"); + add("backtrack"); + add("memoize"); + } + }; + + public static final Set legalParserOptions = + new HashSet() { + { + add("language"); add("tokenVocab"); + add("output"); add("rewrite"); add("ASTLabelType"); + add("TokenLabelType"); + add("superClass"); + add("k"); + add("backtrack"); + add("memoize"); + } + }; + + public static final Set legalTreeParserOptions = + new HashSet() { + { + add("language"); add("tokenVocab"); + add("output"); add("rewrite"); add("ASTLabelType"); + add("TokenLabelType"); + add("superClass"); + add("k"); + add("backtrack"); + add("memoize"); + add("filter"); + } + }; + + public static final Set legalRuleOptions = + new HashSet() { + { + add("k"); add("greedy"); add("memoize"); + add("backtrack"); add("strategy"); + } + }; + + public static final Set legalBlockOptions = + new HashSet() {{add("k"); add("greedy"); add("backtrack"); add("memoize");}}; + + /** Legal options for terminal refs like ID */ + public static final Set legalTokenOptions = + new HashSet() { + { + add(TerminalAST.defaultTokenOption); + add("associativity"); + } + }; + + /** Set of valid imports. E.g., can only import a tree parser into + * another tree parser. Maps delegate to set of delegator grammar types. + * validDelegations.get(LEXER) gives list of the kinds of delegators + * that can import lexers. + */ + public static MultiMap validImportTypes = + new MultiMap() { + { + map(ANTLRParser.LEXER, ANTLRParser.LEXER); + map(ANTLRParser.LEXER, ANTLRParser.COMBINED); + + map(ANTLRParser.PARSER, ANTLRParser.PARSER); + map(ANTLRParser.PARSER, ANTLRParser.COMBINED); + + map(ANTLRParser.TREE, ANTLRParser.TREE); + + map(ANTLRParser.COMBINED, ANTLRParser.COMBINED); + } + }; + + public Grammar g; + public ErrorManager errMgr; + + public BasicSemanticChecks(Grammar g) { + this.g = g; + this.errMgr = g.tool.errMgr; + } + + void checkGrammarName(Token nameToken) { + if ( g.implicitLexer==null ) return; + String fullyQualifiedName = nameToken.getInputStream().getSourceName(); + File f = new File(fullyQualifiedName); + String fileName = f.getName(); + if ( !Utils.stripFileExtension(fileName).equals(nameToken.getText()) ) { + g.tool.errMgr.grammarError(ErrorType.FILE_AND_GRAMMAR_NAME_DIFFER, + fileName, nameToken, nameToken.getText(), fileName); + } + } + + void checkNumRules(GrammarAST rulesNode) { + if ( rulesNode.getChildCount()==0 ) { + GrammarAST root = (GrammarAST)rulesNode.getParent(); + GrammarAST IDNode = (GrammarAST)root.getChild(0); + g.tool.errMgr.grammarError(ErrorType.NO_RULES, g.fileName, + null, IDNode.getText(), g); + } + } + + void checkMode(Token modeNameToken) { + if ( !g.isLexer() ) { + g.tool.errMgr.grammarError(ErrorType.MODE_NOT_IN_LEXER, g.fileName, + modeNameToken, modeNameToken.getText(), g); + } + } + + void checkNumPrequels(List options, + List imports, + List tokens) + { + List secondOptionTokens = new ArrayList(); + if ( options!=null && options.size()>1 ) { + secondOptionTokens.add(options.get(1).token); + } + if ( imports!=null && imports.size()>1 ) { + secondOptionTokens.add(imports.get(1).token); + } + if ( tokens!=null && tokens.size()>1 ) { + secondOptionTokens.add(tokens.get(1).token); + } + for (Token t : secondOptionTokens) { + String fileName = t.getInputStream().getSourceName(); + g.tool.errMgr.grammarError(ErrorType.REPEATED_PREQUEL, + fileName, t); + } + } + + void checkInvalidRuleDef(Token ruleID) { + String fileName = null; + if ( ruleID.getInputStream()!=null ) { + fileName = ruleID.getInputStream().getSourceName(); + } + if ( g.isLexer() && Character.isLowerCase(ruleID.getText().charAt(0)) ) { + g.tool.errMgr.grammarError(ErrorType.PARSER_RULES_NOT_ALLOWED, + fileName, ruleID, ruleID.getText()); + } + if ( (g.isParser()||g.isTreeGrammar()) && + Character.isUpperCase(ruleID.getText().charAt(0)) ) + { + g.tool.errMgr.grammarError(ErrorType.LEXER_RULES_NOT_ALLOWED, + fileName, ruleID, ruleID.getText()); + } + } + + void checkInvalidRuleRef(Token ruleID) { + String fileName = ruleID.getInputStream().getSourceName(); + if ( g.isLexer() && Character.isLowerCase(ruleID.getText().charAt(0)) ) { + g.tool.errMgr.grammarError(ErrorType.PARSER_RULES_NOT_ALLOWED, + fileName, ruleID, ruleID.getText()); + } + } + + void checkTokenAlias(Token tokenID) { + String fileName = tokenID.getInputStream().getSourceName(); + if ( Character.isLowerCase(tokenID.getText().charAt(0)) ) { + g.tool.errMgr.grammarError(ErrorType.TOKEN_NAMES_MUST_START_UPPER, + fileName, + tokenID, + tokenID.getText()); + } + if ( !g.isCombined() ) { + g.tool.errMgr.grammarError(ErrorType.CANNOT_ALIAS_TOKENS, + fileName, + tokenID, + tokenID.getText()); + } + } + + /** At this point, we can only rule out obvious problems like ID[3] + * in parser. Might be illegal too in later stage when we see ID + * isn't a fragment. + */ + void checkTokenArgs(Token tokenID) { + String fileName = tokenID.getInputStream().getSourceName(); + if ( !g.isLexer() ) { + g.tool.errMgr.grammarError(ErrorType.ARGS_ON_TOKEN_REF, + fileName, tokenID, tokenID.getText()); + } + } + + /** Check option is appropriate for grammar, rule, subrule */ + boolean checkOptions(GrammarAST parent, + Token optionID, String value) + { + boolean ok = true; + if ( optionID.getText().equals("tokenVocab") && + g.parent!=null ) // only allow tokenVocab option in root grammar + { + g.tool.errMgr.grammarWarning(ErrorType.TOKEN_VOCAB_IN_DELEGATE, + g.fileName, + optionID, + g.name); + ok = false; + } + + if ( parent.getType()==ANTLRParser.BLOCK ) { + if ( !legalBlockOptions.contains(optionID.getText()) ) { // block + g.tool.errMgr.grammarError(ErrorType.ILLEGAL_OPTION, + g.fileName, + optionID, + optionID.getText()); + ok = false; + } + } + else if ( parent.getType()==ANTLRParser.RULE ) { + if ( !legalRuleOptions.contains(optionID.getText()) ) { // rule + g.tool.errMgr.grammarError(ErrorType.ILLEGAL_OPTION, + g.fileName, + optionID, + optionID.getText()); + ok = false; + } + } + else if ( parent.getType()==ANTLRParser.GRAMMAR && + !legalGrammarOption(optionID.getText()) ) { // grammar + g.tool.errMgr.grammarError(ErrorType.ILLEGAL_OPTION, + g.fileName, + optionID, + optionID.getText()); + ok = false; + } + + return ok; + } + + /** Check option is appropriate for token; parent is ELEMENT_OPTIONS */ + boolean checkTokenOptions(GrammarAST parent, + Token optionID, String value) + { + String fileName = optionID.getInputStream().getSourceName(); + // don't care about ID options + if ( value!=null && !legalTokenOptions.contains(optionID.getText()) ) { + g.tool.errMgr.grammarError(ErrorType.ILLEGAL_OPTION, + fileName, + optionID, + optionID.getText()); + return false; + } + // example (ALT_REWRITE (ALT (ID (ELEMENT_OPTIONS Foo))) (-> (ALT ID)) + if ( parent.hasAncestor(ANTLRParser.ALT_REWRITE) ) { + g.tool.errMgr.grammarError(ErrorType.HETERO_ILLEGAL_IN_REWRITE_ALT, + fileName, + optionID); + + } + // TODO: extra checks depending on terminal kind? + switch ( parent.getType() ) { + case ANTLRParser.TOKEN_REF : + case ANTLRParser.STRING_LITERAL : + case ANTLRParser.WILDCARD : + } + return true; + } + + boolean legalGrammarOption(String key) { + switch ( g.getType() ) { + case ANTLRParser.LEXER : + return legalLexerOptions.contains(key); + case ANTLRParser.PARSER : + return legalParserOptions.contains(key); + case ANTLRParser.TREE : + return legalTreeParserOptions.contains(key); + default : + return legalParserOptions.contains(key); + } + } + + /** Rules in tree grammar that use -> rewrites and are spitting out + * templates via output=template and then use rewrite=true must only + * use -> on alts that are simple nodes or trees or single rule refs + * that match either nodes or trees. + */ + void checkRewriteForMultiRootAltInTreeGrammar( + Map options, + Token altStart, + int alt) + { + if ( g.isTreeGrammar() && + options!=null && options.get("output")!=null && + options.get("output").equals("template") && + options.get("rewrite")!=null && + options.get("rewrite").equals("true") ) + { + String fileName = altStart.getInputStream().getSourceName(); + g.tool.errMgr.grammarWarning(ErrorType.REWRITE_FOR_MULTI_ELEMENT_ALT, + fileName, + altStart, + alt); + } + } + + void checkASTOps(Map options, + GrammarAST op, + GrammarAST elementRoot) + { + RuleAST rule = (RuleAST)op.getAncestor(ANTLRParser.RULE); + String ruleName = rule.getChild(0).getText(); + String fileName = elementRoot.token.getInputStream().getSourceName(); + if ( options==null || !options.get("output").equals("AST") ) { + g.tool.errMgr.grammarWarning(ErrorType.AST_OP_WITH_NON_AST_OUTPUT_OPTION, + fileName, + elementRoot.token, + op.getText()); + } + if ( options!=null && options.get("output")==null ) { + g.tool.errMgr.grammarWarning(ErrorType.REWRITE_OR_OP_WITH_NO_OUTPUT_OPTION, + fileName, + elementRoot.token, + ruleName); + } + if ( op.hasAncestor(ANTLRParser.ALT_REWRITE) ) { + GrammarAST rew = (GrammarAST)op.getAncestor(ANTLRParser.ALT_REWRITE); + int altNum = rew.getChildIndex() + 1; // alts are 1..n + g.tool.errMgr.grammarWarning(ErrorType.AST_OP_IN_ALT_WITH_REWRITE, + fileName, + elementRoot.token, + ruleName, + altNum); + } + } + + void checkRewriteOk(Map options, GrammarAST elementRoot) { + RuleAST rule = (RuleAST)elementRoot.getAncestor(ANTLRParser.RULE); + String ruleName = rule.getChild(0).getText(); + String fileName = elementRoot.token.getInputStream().getSourceName(); + if ( options!=null && options.get("output")==null ) { + g.tool.errMgr.grammarWarning(ErrorType.REWRITE_OR_OP_WITH_NO_OUTPUT_OPTION, + fileName, + elementRoot.token, + ruleName); + } + } + + void checkTreeFilterOptions(GrammarRootAST root, + Map options) + { + if ( options==null ) return; + String fileName = root.token.getInputStream().getSourceName(); + String filter = options.get("filter"); + if ( g.isTreeGrammar() && filter!=null && filter.equals("true") ) { + // check for conflicting options + // filter => backtrack=true (can't be false) + // filter&&output!=AST => error + // filter&&output=AST => rewrite=true + // any deviation from valid option set is an error + String backtrack = options.get("backtrack"); + String output = options.get("output"); + String rewrite = options.get("rewrite"); + if ( backtrack!=null && !backtrack.toString().equals("true") ) { + g.tool.errMgr.grammarError(ErrorType.CONFLICTING_OPTION_IN_TREE_FILTER, + fileName, + root.token, + "backtrack", backtrack); + } + if ( output!=null && !output.equals("AST") ) { + g.tool.errMgr.grammarError(ErrorType.CONFLICTING_OPTION_IN_TREE_FILTER, + fileName, + root.token, + "output", output); + } + else if ( rewrite!=null && !rewrite.equals("true") ) { // && AST output + g.tool.errMgr.grammarError(ErrorType.CONFLICTING_OPTION_IN_TREE_FILTER, + fileName, + root.token, + "rewrite", rewrite); + } + } + } + + void checkWildcardRoot(Token wild) { + String fileName = wild.getInputStream().getSourceName(); + g.tool.errMgr.grammarError(ErrorType.WILDCARD_AS_ROOT, + fileName, + wild); + } + + void checkImport(Token importID) { + Grammar delegate = g.getImportedGrammar(importID.getText()); + if ( delegate==null ) return; + List validDelegators = validImportTypes.get(delegate.getType()); + if ( validDelegators!=null && !validDelegators.contains(g.getType()) ) { + g.tool.errMgr.grammarError(ErrorType.INVALID_IMPORT, + g.fileName, + importID, + g, delegate); + } + if ( g.isCombined() && + (delegate.name.equals(g.name+Grammar.getGrammarTypeToFileNameSuffix(ANTLRParser.LEXER))|| + delegate.name.equals(g.name+Grammar.getGrammarTypeToFileNameSuffix(ANTLRParser.PARSER))) ) + { + g.tool.errMgr.grammarError(ErrorType.IMPORT_NAME_CLASH, + g.fileName, + importID, + g, delegate); + } + } +} diff --git a/tool/src/org/antlr/v4/semantics/BasicSemanticTriggers.g b/tool/src/org/antlr/v4/semantics/BasicSemanticTriggers.g new file mode 100644 index 000000000..f36d1eb69 --- /dev/null +++ b/tool/src/org/antlr/v4/semantics/BasicSemanticTriggers.g @@ -0,0 +1,240 @@ +/* + [The "BSD license"] + Copyright (c) 2010 Terence Parr + All rights reserved. + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions + are met: + 1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + 2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + 3. The name of the author may not be used to endorse or promote products + derived from this software without specific prior written permission. + THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR + IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES + OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. + IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, + INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT + NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF + THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*/ + +/** Triggers for the basic semantics of the input. Side-effects: + * Set token, block, rule options in the tree. Load field option + * with grammar options. Only legal options are set. + */ +tree grammar BasicSemanticTriggers; +options { + language = Java; + tokenVocab = ANTLRParser; + ASTLabelType = GrammarAST; + filter = true; + //superClass = 'org.antlr.v4.runtime.tree.TreeFilter'; +} + +// Include the copyright in this source and also the generated source +@header { +/* + [The "BSD license"] + Copyright (c) 2010 Terence Parr + All rights reserved. + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions + are met: + 1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + 2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + 3. The name of the author may not be used to endorse or promote products + derived from this software without specific prior written permission. + THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR + IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES + OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. + IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, + INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT + NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF + THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*/ +package org.antlr.v4.semantics; +import org.antlr.v4.tool.*; +} + +@members { +// TODO: SHOULD we fix up grammar AST to remove errors? Like kill refs to bad rules? +// that is, rewrite tree? maybe all passes are filters until code gen, which needs +// tree grammar. 'course we won't try codegen if errors. +public String name; +GrammarASTWithOptions root; +Grammar g; // which grammar are we checking +BasicSemanticChecks checker; +public BasicSemanticTriggers(TreeNodeStream input, Grammar g) { + this(input); + this.g = g; + checker = new BasicSemanticChecks(g); +} +} + +topdown // do these on way down so options and such are set first + : grammarSpec + | rules + | mode + | option + | rule + | tokenAlias + | rewrite + ; + +bottomup // do these "inside to outside" of expressions. + : multiElementAltInTreeGrammar + | astOps + | ruleref + | tokenRefWithArgs + | elementOption + | checkGrammarOptions // do after we see everything + | wildcardRoot + ; + +grammarSpec + : ^( GRAMMAR ID DOC_COMMENT? + { + name = $ID.text; + checker.checkGrammarName($ID.token); + root = (GrammarRootAST)$start; + } + prequelConstructs ^(RULES .*) + ) + ; + +checkGrammarOptions // when we get back to root + : GRAMMAR + {checker.checkTreeFilterOptions((GrammarRootAST)$GRAMMAR, + root.getOptions());} + ; + +/* +grammarType +@init {gtype = $start.getType(); root = (GrammarASTWithOptions)$start;} + : LEXER_GRAMMAR | PARSER_GRAMMAR | TREE_GRAMMAR | COMBINED_GRAMMAR + ; + */ + +prequelConstructs + : ( ^(o+=OPTIONS .+) + | ^(i+=IMPORT delegateGrammar+) + | ^(t+=TOKENS .+) + )* + {checker.checkNumPrequels($o, $i, $t);} + ; + +delegateGrammar + : ( ^(ASSIGN ID id=ID) + | id=ID + ) + {checker.checkImport($id.token);} + ; + +rules : RULES {checker.checkNumRules($RULES);} ; + +mode : ^(MODE ID .*) {checker.checkMode($ID.token);} ; + +option // TODO: put in grammar, or rule, or block + : {inContext("OPTIONS")}? ^(ASSIGN o=ID optionValue) + { + GrammarAST parent = (GrammarAST)$start.getParent(); // OPTION + GrammarAST parentWithOptionKind = (GrammarAST)parent.getParent(); + boolean ok = checker.checkOptions(parentWithOptionKind, + $ID.token, $optionValue.v); + // store options into XXX_GRAMMAR, RULE, BLOCK nodes + if ( ok ) { + ((GrammarASTWithOptions)parentWithOptionKind).setOption($o.text, $optionValue.v); + } + } + ; + +optionValue returns [String v] +@init {$v = $start.token.getText();} + : ID + | STRING_LITERAL + | INT + | STAR + ; + +rule: ^( RULE r=ID .*) {checker.checkInvalidRuleDef($r.token);} + ; + +ruleref + : RULE_REF {checker.checkInvalidRuleRef($RULE_REF.token);} + ; + +tokenAlias + : {inContext("TOKENS")}? ^(ASSIGN ID STRING_LITERAL) + {checker.checkTokenAlias($ID.token);} + ; + +tokenRefWithArgs + : {!inContext("RESULT ...")}? // if not on right side of -> + ^(TOKEN_REF ARG_ACTION) + {checker.checkTokenArgs($TOKEN_REF.token);} + ; + +elementOption + : {!inContext("RESULT ...")}? // not on right side of -> + ^( ELEMENT_OPTIONS + ( ^(ASSIGN o=ID value=ID) + | ^(ASSIGN o=ID value=STRING_LITERAL) + | o=ID + ) + ) + { + boolean ok = checker.checkTokenOptions((GrammarAST)$o.getParent(), + $o.token, $value.text); + if ( ok ) { + if ( value!=null ) { + TerminalAST terminal = (TerminalAST)$start.getParent(); + terminal.setOption($o.text, $value.text); + } + else { + TerminalAST terminal = (TerminalAST)$start.getParent(); + terminal.setOption(TerminalAST.defaultTokenOption, $o.text); + } + } + } + ; + +// (ALT_REWRITE (ALT A B) ^( ALT ^( A B ) ) or ( ALT A ) +multiElementAltInTreeGrammar + : {inContext("ALT_REWRITE")}? + ^( ALT ~(SEMPRED|ACTION) ~(SEMPRED|ACTION)+ ) // > 1 element at outer level + { + int altNum = $start.getParent().getChildIndex() + 1; // alts are 1..n + GrammarAST firstNode = (GrammarAST)$start.getChild(0); + checker.checkRewriteForMultiRootAltInTreeGrammar(root.getOptions(), + firstNode.token, + altNum); + } + ; + +// Check stuff like (^ A) (! r) +astOps + : ^(ROOT el=.) {checker.checkASTOps(root.getOptions(), $start, $el);} + | ^(BANG el=.) {checker.checkASTOps(root.getOptions(), $start, $el);} + ; + +rewrite + : (RESULT|ST_RESULT) + {checker.checkRewriteOk(root.getOptions(),$start);} + ; + +wildcardRoot + : ^(TREE_BEGIN WILDCARD .*) + {checker.checkWildcardRoot($WILDCARD.token);} + ; diff --git a/tool/src/org/antlr/v4/semantics/BasicSemanticTriggers.java b/tool/src/org/antlr/v4/semantics/BasicSemanticTriggers.java new file mode 100644 index 000000000..e2d4ef7f7 --- /dev/null +++ b/tool/src/org/antlr/v4/semantics/BasicSemanticTriggers.java @@ -0,0 +1,1741 @@ +// $ANTLR 3.3 Nov 30, 2010 12:50:56 BasicSemanticTriggers.g 2011-06-11 10:28:59 + +/* + [The "BSD license"] + Copyright (c) 2010 Terence Parr + All rights reserved. + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions + are met: + 1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + 2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + 3. The name of the author may not be used to endorse or promote products + derived from this software without specific prior written permission. + THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR + IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES + OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. + IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, + INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT + NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF + THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*/ +package org.antlr.v4.semantics; + +import org.antlr.runtime.*; +import org.antlr.runtime.BitSet; +import org.antlr.runtime.tree.*; +import org.antlr.v4.tool.*; + +import java.util.*; +/** Triggers for the basic semantics of the input. Side-effects: + * Set token, block, rule options in the tree. Load field option + * with grammar options. Only legal options are set. + */ +public class BasicSemanticTriggers extends TreeFilter { + public static final String[] tokenNames = new String[] { + "", "", "", "", "SEMPRED", "FORCED_ACTION", "DOC_COMMENT", "SRC", "NLCHARS", "COMMENT", "DOUBLE_QUOTE_STRING_LITERAL", "DOUBLE_ANGLE_STRING_LITERAL", "ACTION_STRING_LITERAL", "ACTION_CHAR_LITERAL", "ARG_ACTION", "NESTED_ACTION", "ACTION", "ACTION_ESC", "WSNLCHARS", "OPTIONS", "TOKENS", "SCOPE", "IMPORT", "FRAGMENT", "LEXER", "PARSER", "TREE", "GRAMMAR", "PROTECTED", "PUBLIC", "PRIVATE", "RETURNS", "THROWS", "CATCH", "FINALLY", "TEMPLATE", "MODE", "COLON", "COLONCOLON", "COMMA", "SEMI", "LPAREN", "RPAREN", "IMPLIES", "LT", "GT", "ASSIGN", "QUESTION", "BANG", "STAR", "PLUS", "PLUS_ASSIGN", "OR", "ROOT", "DOLLAR", "DOT", "RANGE", "ETC", "RARROW", "TREE_BEGIN", "AT", "NOT", "RBRACE", "TOKEN_REF", "RULE_REF", "INT", "WSCHARS", "ESC_SEQ", "STRING_LITERAL", "HEX_DIGIT", "UNICODE_ESC", "WS", "ERRCHAR", "RULE", "RULES", "RULEMODIFIERS", "RULEACTIONS", "BLOCK", "REWRITE_BLOCK", "OPTIONAL", "CLOSURE", "POSITIVE_CLOSURE", "SYNPRED", "CHAR_RANGE", "EPSILON", "ALT", "ALTLIST", "ID", "ARG", "ARGLIST", "RET", "COMBINED", "INITACTION", "LABEL", "GATED_SEMPRED", "SYN_SEMPRED", "BACKTRACK_SEMPRED", "WILDCARD", "LIST", "ELEMENT_OPTIONS", "ST_RESULT", "RESULT", "ALT_REWRITE" + }; + public static final int EOF=-1; + public static final int SEMPRED=4; + public static final int FORCED_ACTION=5; + public static final int DOC_COMMENT=6; + public static final int SRC=7; + public static final int NLCHARS=8; + public static final int COMMENT=9; + public static final int DOUBLE_QUOTE_STRING_LITERAL=10; + public static final int DOUBLE_ANGLE_STRING_LITERAL=11; + public static final int ACTION_STRING_LITERAL=12; + public static final int ACTION_CHAR_LITERAL=13; + public static final int ARG_ACTION=14; + public static final int NESTED_ACTION=15; + public static final int ACTION=16; + public static final int ACTION_ESC=17; + public static final int WSNLCHARS=18; + public static final int OPTIONS=19; + public static final int TOKENS=20; + public static final int SCOPE=21; + public static final int IMPORT=22; + public static final int FRAGMENT=23; + public static final int LEXER=24; + public static final int PARSER=25; + public static final int TREE=26; + public static final int GRAMMAR=27; + public static final int PROTECTED=28; + public static final int PUBLIC=29; + public static final int PRIVATE=30; + public static final int RETURNS=31; + public static final int THROWS=32; + public static final int CATCH=33; + public static final int FINALLY=34; + public static final int TEMPLATE=35; + public static final int MODE=36; + public static final int COLON=37; + public static final int COLONCOLON=38; + public static final int COMMA=39; + public static final int SEMI=40; + public static final int LPAREN=41; + public static final int RPAREN=42; + public static final int IMPLIES=43; + public static final int LT=44; + public static final int GT=45; + public static final int ASSIGN=46; + public static final int QUESTION=47; + public static final int BANG=48; + public static final int STAR=49; + public static final int PLUS=50; + public static final int PLUS_ASSIGN=51; + public static final int OR=52; + public static final int ROOT=53; + public static final int DOLLAR=54; + public static final int DOT=55; + public static final int RANGE=56; + public static final int ETC=57; + public static final int RARROW=58; + public static final int TREE_BEGIN=59; + public static final int AT=60; + public static final int NOT=61; + public static final int RBRACE=62; + public static final int TOKEN_REF=63; + public static final int RULE_REF=64; + public static final int INT=65; + public static final int WSCHARS=66; + public static final int ESC_SEQ=67; + public static final int STRING_LITERAL=68; + public static final int HEX_DIGIT=69; + public static final int UNICODE_ESC=70; + public static final int WS=71; + public static final int ERRCHAR=72; + public static final int RULE=73; + public static final int RULES=74; + public static final int RULEMODIFIERS=75; + public static final int RULEACTIONS=76; + public static final int BLOCK=77; + public static final int REWRITE_BLOCK=78; + public static final int OPTIONAL=79; + public static final int CLOSURE=80; + public static final int POSITIVE_CLOSURE=81; + public static final int SYNPRED=82; + public static final int CHAR_RANGE=83; + public static final int EPSILON=84; + public static final int ALT=85; + public static final int ALTLIST=86; + public static final int ID=87; + public static final int ARG=88; + public static final int ARGLIST=89; + public static final int RET=90; + public static final int COMBINED=91; + public static final int INITACTION=92; + public static final int LABEL=93; + public static final int GATED_SEMPRED=94; + public static final int SYN_SEMPRED=95; + public static final int BACKTRACK_SEMPRED=96; + public static final int WILDCARD=97; + public static final int LIST=98; + public static final int ELEMENT_OPTIONS=99; + public static final int ST_RESULT=100; + public static final int RESULT=101; + public static final int ALT_REWRITE=102; + + // delegates + // delegators + + + public BasicSemanticTriggers(TreeNodeStream input) { + this(input, new RecognizerSharedState()); + } + public BasicSemanticTriggers(TreeNodeStream input, RecognizerSharedState state) { + super(input, state); + + } + + + public String[] getTokenNames() { return BasicSemanticTriggers.tokenNames; } + public String getGrammarFileName() { return "BasicSemanticTriggers.g"; } + + + // TODO: SHOULD we fix up grammar AST to remove errors? Like kill refs to bad rules? + // that is, rewrite tree? maybe all passes are filters until code gen, which needs + // tree grammar. 'course we won't try codegen if errors. + public String name; + GrammarASTWithOptions root; + Grammar g; // which grammar are we checking + BasicSemanticChecks checker; + public BasicSemanticTriggers(TreeNodeStream input, Grammar g) { + this(input); + this.g = g; + checker = new BasicSemanticChecks(g); + } + + + + // $ANTLR start "topdown" + // BasicSemanticTriggers.g:86:1: topdown : ( grammarSpec | rules | mode | option | rule | tokenAlias | rewrite ); + public final void topdown() throws RecognitionException { + try { + // BasicSemanticTriggers.g:87:2: ( grammarSpec | rules | mode | option | rule | tokenAlias | rewrite ) + int alt1=7; + alt1 = dfa1.predict(input); + switch (alt1) { + case 1 : + // BasicSemanticTriggers.g:87:4: grammarSpec + { + pushFollow(FOLLOW_grammarSpec_in_topdown87); + grammarSpec(); + + state._fsp--; + if (state.failed) return ; + + } + break; + case 2 : + // BasicSemanticTriggers.g:88:4: rules + { + pushFollow(FOLLOW_rules_in_topdown92); + rules(); + + state._fsp--; + if (state.failed) return ; + + } + break; + case 3 : + // BasicSemanticTriggers.g:89:4: mode + { + pushFollow(FOLLOW_mode_in_topdown97); + mode(); + + state._fsp--; + if (state.failed) return ; + + } + break; + case 4 : + // BasicSemanticTriggers.g:90:4: option + { + pushFollow(FOLLOW_option_in_topdown102); + option(); + + state._fsp--; + if (state.failed) return ; + + } + break; + case 5 : + // BasicSemanticTriggers.g:91:4: rule + { + pushFollow(FOLLOW_rule_in_topdown107); + rule(); + + state._fsp--; + if (state.failed) return ; + + } + break; + case 6 : + // BasicSemanticTriggers.g:92:4: tokenAlias + { + pushFollow(FOLLOW_tokenAlias_in_topdown112); + tokenAlias(); + + state._fsp--; + if (state.failed) return ; + + } + break; + case 7 : + // BasicSemanticTriggers.g:93:4: rewrite + { + pushFollow(FOLLOW_rewrite_in_topdown117); + rewrite(); + + state._fsp--; + if (state.failed) return ; + + } + break; + + } + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + } + finally { + } + return ; + } + // $ANTLR end "topdown" + + + // $ANTLR start "bottomup" + // BasicSemanticTriggers.g:96:1: bottomup : ( multiElementAltInTreeGrammar | astOps | ruleref | tokenRefWithArgs | elementOption | checkGrammarOptions | wildcardRoot ); + public final void bottomup() throws RecognitionException { + try { + // BasicSemanticTriggers.g:97:2: ( multiElementAltInTreeGrammar | astOps | ruleref | tokenRefWithArgs | elementOption | checkGrammarOptions | wildcardRoot ) + int alt2=7; + switch ( input.LA(1) ) { + case ALT: + { + alt2=1; + } + break; + case BANG: + case ROOT: + { + alt2=2; + } + break; + case RULE_REF: + { + alt2=3; + } + break; + case TOKEN_REF: + { + alt2=4; + } + break; + case ELEMENT_OPTIONS: + { + alt2=5; + } + break; + case GRAMMAR: + { + alt2=6; + } + break; + case TREE_BEGIN: + { + alt2=7; + } + break; + default: + if (state.backtracking>0) {state.failed=true; return ;} + NoViableAltException nvae = + new NoViableAltException("", 2, 0, input); + + throw nvae; + } + + switch (alt2) { + case 1 : + // BasicSemanticTriggers.g:97:4: multiElementAltInTreeGrammar + { + pushFollow(FOLLOW_multiElementAltInTreeGrammar_in_bottomup129); + multiElementAltInTreeGrammar(); + + state._fsp--; + if (state.failed) return ; + + } + break; + case 2 : + // BasicSemanticTriggers.g:98:4: astOps + { + pushFollow(FOLLOW_astOps_in_bottomup134); + astOps(); + + state._fsp--; + if (state.failed) return ; + + } + break; + case 3 : + // BasicSemanticTriggers.g:99:4: ruleref + { + pushFollow(FOLLOW_ruleref_in_bottomup139); + ruleref(); + + state._fsp--; + if (state.failed) return ; + + } + break; + case 4 : + // BasicSemanticTriggers.g:100:4: tokenRefWithArgs + { + pushFollow(FOLLOW_tokenRefWithArgs_in_bottomup144); + tokenRefWithArgs(); + + state._fsp--; + if (state.failed) return ; + + } + break; + case 5 : + // BasicSemanticTriggers.g:101:4: elementOption + { + pushFollow(FOLLOW_elementOption_in_bottomup149); + elementOption(); + + state._fsp--; + if (state.failed) return ; + + } + break; + case 6 : + // BasicSemanticTriggers.g:102:4: checkGrammarOptions + { + pushFollow(FOLLOW_checkGrammarOptions_in_bottomup154); + checkGrammarOptions(); + + state._fsp--; + if (state.failed) return ; + + } + break; + case 7 : + // BasicSemanticTriggers.g:103:4: wildcardRoot + { + pushFollow(FOLLOW_wildcardRoot_in_bottomup160); + wildcardRoot(); + + state._fsp--; + if (state.failed) return ; + + } + break; + + } + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + } + finally { + } + return ; + } + // $ANTLR end "bottomup" + + public static class grammarSpec_return extends TreeRuleReturnScope { + }; + + // $ANTLR start "grammarSpec" + // BasicSemanticTriggers.g:106:1: grammarSpec : ^( GRAMMAR ID ( DOC_COMMENT )? prequelConstructs ^( RULES ( . )* ) ) ; + public final BasicSemanticTriggers.grammarSpec_return grammarSpec() throws RecognitionException { + BasicSemanticTriggers.grammarSpec_return retval = new BasicSemanticTriggers.grammarSpec_return(); + retval.start = input.LT(1); + + GrammarAST ID1=null; + + try { + // BasicSemanticTriggers.g:107:5: ( ^( GRAMMAR ID ( DOC_COMMENT )? prequelConstructs ^( RULES ( . )* ) ) ) + // BasicSemanticTriggers.g:107:9: ^( GRAMMAR ID ( DOC_COMMENT )? prequelConstructs ^( RULES ( . )* ) ) + { + match(input,GRAMMAR,FOLLOW_GRAMMAR_in_grammarSpec178); if (state.failed) return retval; + + match(input, Token.DOWN, null); if (state.failed) return retval; + ID1=(GrammarAST)match(input,ID,FOLLOW_ID_in_grammarSpec180); if (state.failed) return retval; + // BasicSemanticTriggers.g:107:23: ( DOC_COMMENT )? + int alt3=2; + int LA3_0 = input.LA(1); + + if ( (LA3_0==DOC_COMMENT) ) { + alt3=1; + } + switch (alt3) { + case 1 : + // BasicSemanticTriggers.g:107:23: DOC_COMMENT + { + match(input,DOC_COMMENT,FOLLOW_DOC_COMMENT_in_grammarSpec182); if (state.failed) return retval; + + } + break; + + } + + if ( state.backtracking==1 ) { + + name = (ID1!=null?ID1.getText():null); + checker.checkGrammarName(ID1.token); + root = (GrammarRootAST)((GrammarAST)retval.start); + + } + pushFollow(FOLLOW_prequelConstructs_in_grammarSpec199); + prequelConstructs(); + + state._fsp--; + if (state.failed) return retval; + match(input,RULES,FOLLOW_RULES_in_grammarSpec202); if (state.failed) return retval; + + if ( input.LA(1)==Token.DOWN ) { + match(input, Token.DOWN, null); if (state.failed) return retval; + // BasicSemanticTriggers.g:113:33: ( . )* + loop4: + do { + int alt4=2; + int LA4_0 = input.LA(1); + + if ( ((LA4_0>=SEMPRED && LA4_0<=ALT_REWRITE)) ) { + alt4=1; + } + else if ( (LA4_0==UP) ) { + alt4=2; + } + + + switch (alt4) { + case 1 : + // BasicSemanticTriggers.g:113:33: . + { + matchAny(input); if (state.failed) return retval; + + } + break; + + default : + break loop4; + } + } while (true); + + + match(input, Token.UP, null); if (state.failed) return retval; + } + + match(input, Token.UP, null); if (state.failed) return retval; + + } + + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + } + finally { + } + return retval; + } + // $ANTLR end "grammarSpec" + + + // $ANTLR start "checkGrammarOptions" + // BasicSemanticTriggers.g:117:1: checkGrammarOptions : GRAMMAR ; + public final void checkGrammarOptions() throws RecognitionException { + GrammarAST GRAMMAR2=null; + + try { + // BasicSemanticTriggers.g:118:2: ( GRAMMAR ) + // BasicSemanticTriggers.g:118:4: GRAMMAR + { + GRAMMAR2=(GrammarAST)match(input,GRAMMAR,FOLLOW_GRAMMAR_in_checkGrammarOptions225); if (state.failed) return ; + if ( state.backtracking==1 ) { + checker.checkTreeFilterOptions((GrammarRootAST)GRAMMAR2, + root.getOptions()); + } + + } + + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + } + finally { + } + return ; + } + // $ANTLR end "checkGrammarOptions" + + + // $ANTLR start "prequelConstructs" + // BasicSemanticTriggers.g:130:1: prequelConstructs : ( ^(o+= OPTIONS ( . )+ ) | ^(i+= IMPORT ( delegateGrammar )+ ) | ^(t+= TOKENS ( . )+ ) )* ; + public final void prequelConstructs() throws RecognitionException { + GrammarAST o=null; + GrammarAST i=null; + GrammarAST t=null; + List list_o=null; + List list_i=null; + List list_t=null; + + try { + // BasicSemanticTriggers.g:131:2: ( ( ^(o+= OPTIONS ( . )+ ) | ^(i+= IMPORT ( delegateGrammar )+ ) | ^(t+= TOKENS ( . )+ ) )* ) + // BasicSemanticTriggers.g:131:4: ( ^(o+= OPTIONS ( . )+ ) | ^(i+= IMPORT ( delegateGrammar )+ ) | ^(t+= TOKENS ( . )+ ) )* + { + // BasicSemanticTriggers.g:131:4: ( ^(o+= OPTIONS ( . )+ ) | ^(i+= IMPORT ( delegateGrammar )+ ) | ^(t+= TOKENS ( . )+ ) )* + loop8: + do { + int alt8=4; + switch ( input.LA(1) ) { + case OPTIONS: + { + alt8=1; + } + break; + case IMPORT: + { + alt8=2; + } + break; + case TOKENS: + { + alt8=3; + } + break; + + } + + switch (alt8) { + case 1 : + // BasicSemanticTriggers.g:131:6: ^(o+= OPTIONS ( . )+ ) + { + o=(GrammarAST)match(input,OPTIONS,FOLLOW_OPTIONS_in_prequelConstructs248); if (state.failed) return ; + if (list_o==null) list_o=new ArrayList(); + list_o.add(o); + + + match(input, Token.DOWN, null); if (state.failed) return ; + // BasicSemanticTriggers.g:131:19: ( . )+ + int cnt5=0; + loop5: + do { + int alt5=2; + int LA5_0 = input.LA(1); + + if ( ((LA5_0>=SEMPRED && LA5_0<=ALT_REWRITE)) ) { + alt5=1; + } + else if ( (LA5_0==UP) ) { + alt5=2; + } + + + switch (alt5) { + case 1 : + // BasicSemanticTriggers.g:131:19: . + { + matchAny(input); if (state.failed) return ; + + } + break; + + default : + if ( cnt5 >= 1 ) break loop5; + if (state.backtracking>0) {state.failed=true; return ;} + EarlyExitException eee = + new EarlyExitException(5, input); + throw eee; + } + cnt5++; + } while (true); + + + match(input, Token.UP, null); if (state.failed) return ; + + } + break; + case 2 : + // BasicSemanticTriggers.g:132:5: ^(i+= IMPORT ( delegateGrammar )+ ) + { + i=(GrammarAST)match(input,IMPORT,FOLLOW_IMPORT_in_prequelConstructs261); if (state.failed) return ; + if (list_i==null) list_i=new ArrayList(); + list_i.add(i); + + + match(input, Token.DOWN, null); if (state.failed) return ; + // BasicSemanticTriggers.g:132:17: ( delegateGrammar )+ + int cnt6=0; + loop6: + do { + int alt6=2; + int LA6_0 = input.LA(1); + + if ( (LA6_0==ASSIGN||LA6_0==ID) ) { + alt6=1; + } + + + switch (alt6) { + case 1 : + // BasicSemanticTriggers.g:132:17: delegateGrammar + { + pushFollow(FOLLOW_delegateGrammar_in_prequelConstructs263); + delegateGrammar(); + + state._fsp--; + if (state.failed) return ; + + } + break; + + default : + if ( cnt6 >= 1 ) break loop6; + if (state.backtracking>0) {state.failed=true; return ;} + EarlyExitException eee = + new EarlyExitException(6, input); + throw eee; + } + cnt6++; + } while (true); + + + match(input, Token.UP, null); if (state.failed) return ; + + } + break; + case 3 : + // BasicSemanticTriggers.g:133:5: ^(t+= TOKENS ( . )+ ) + { + t=(GrammarAST)match(input,TOKENS,FOLLOW_TOKENS_in_prequelConstructs274); if (state.failed) return ; + if (list_t==null) list_t=new ArrayList(); + list_t.add(t); + + + match(input, Token.DOWN, null); if (state.failed) return ; + // BasicSemanticTriggers.g:133:17: ( . )+ + int cnt7=0; + loop7: + do { + int alt7=2; + int LA7_0 = input.LA(1); + + if ( ((LA7_0>=SEMPRED && LA7_0<=ALT_REWRITE)) ) { + alt7=1; + } + else if ( (LA7_0==UP) ) { + alt7=2; + } + + + switch (alt7) { + case 1 : + // BasicSemanticTriggers.g:133:17: . + { + matchAny(input); if (state.failed) return ; + + } + break; + + default : + if ( cnt7 >= 1 ) break loop7; + if (state.backtracking>0) {state.failed=true; return ;} + EarlyExitException eee = + new EarlyExitException(7, input); + throw eee; + } + cnt7++; + } while (true); + + + match(input, Token.UP, null); if (state.failed) return ; + + } + break; + + default : + break loop8; + } + } while (true); + + if ( state.backtracking==1 ) { + checker.checkNumPrequels(list_o, list_i, list_t); + } + + } + + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + } + finally { + } + return ; + } + // $ANTLR end "prequelConstructs" + + + // $ANTLR start "delegateGrammar" + // BasicSemanticTriggers.g:138:1: delegateGrammar : ( ^( ASSIGN ID id= ID ) | id= ID ) ; + public final void delegateGrammar() throws RecognitionException { + GrammarAST id=null; + + try { + // BasicSemanticTriggers.g:139:5: ( ( ^( ASSIGN ID id= ID ) | id= ID ) ) + // BasicSemanticTriggers.g:139:9: ( ^( ASSIGN ID id= ID ) | id= ID ) + { + // BasicSemanticTriggers.g:139:9: ( ^( ASSIGN ID id= ID ) | id= ID ) + int alt9=2; + int LA9_0 = input.LA(1); + + if ( (LA9_0==ASSIGN) ) { + alt9=1; + } + else if ( (LA9_0==ID) ) { + alt9=2; + } + else { + if (state.backtracking>0) {state.failed=true; return ;} + NoViableAltException nvae = + new NoViableAltException("", 9, 0, input); + + throw nvae; + } + switch (alt9) { + case 1 : + // BasicSemanticTriggers.g:139:11: ^( ASSIGN ID id= ID ) + { + match(input,ASSIGN,FOLLOW_ASSIGN_in_delegateGrammar306); if (state.failed) return ; + + match(input, Token.DOWN, null); if (state.failed) return ; + match(input,ID,FOLLOW_ID_in_delegateGrammar308); if (state.failed) return ; + id=(GrammarAST)match(input,ID,FOLLOW_ID_in_delegateGrammar312); if (state.failed) return ; + + match(input, Token.UP, null); if (state.failed) return ; + + } + break; + case 2 : + // BasicSemanticTriggers.g:140:10: id= ID + { + id=(GrammarAST)match(input,ID,FOLLOW_ID_in_delegateGrammar326); if (state.failed) return ; + + } + break; + + } + + if ( state.backtracking==1 ) { + checker.checkImport(id.token); + } + + } + + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + } + finally { + } + return ; + } + // $ANTLR end "delegateGrammar" + + + // $ANTLR start "rules" + // BasicSemanticTriggers.g:145:1: rules : RULES ; + public final void rules() throws RecognitionException { + GrammarAST RULES3=null; + + try { + // BasicSemanticTriggers.g:145:7: ( RULES ) + // BasicSemanticTriggers.g:145:9: RULES + { + RULES3=(GrammarAST)match(input,RULES,FOLLOW_RULES_in_rules353); if (state.failed) return ; + if ( state.backtracking==1 ) { + checker.checkNumRules(RULES3); + } + + } + + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + } + finally { + } + return ; + } + // $ANTLR end "rules" + + + // $ANTLR start "mode" + // BasicSemanticTriggers.g:147:1: mode : ^( MODE ID ( . )* ) ; + public final void mode() throws RecognitionException { + GrammarAST ID4=null; + + try { + // BasicSemanticTriggers.g:147:6: ( ^( MODE ID ( . )* ) ) + // BasicSemanticTriggers.g:147:8: ^( MODE ID ( . )* ) + { + match(input,MODE,FOLLOW_MODE_in_mode365); if (state.failed) return ; + + match(input, Token.DOWN, null); if (state.failed) return ; + ID4=(GrammarAST)match(input,ID,FOLLOW_ID_in_mode367); if (state.failed) return ; + // BasicSemanticTriggers.g:147:18: ( . )* + loop10: + do { + int alt10=2; + int LA10_0 = input.LA(1); + + if ( ((LA10_0>=SEMPRED && LA10_0<=ALT_REWRITE)) ) { + alt10=1; + } + else if ( (LA10_0==UP) ) { + alt10=2; + } + + + switch (alt10) { + case 1 : + // BasicSemanticTriggers.g:147:18: . + { + matchAny(input); if (state.failed) return ; + + } + break; + + default : + break loop10; + } + } while (true); + + + match(input, Token.UP, null); if (state.failed) return ; + if ( state.backtracking==1 ) { + checker.checkMode(ID4.token); + } + + } + + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + } + finally { + } + return ; + } + // $ANTLR end "mode" + + public static class option_return extends TreeRuleReturnScope { + }; + + // $ANTLR start "option" + // BasicSemanticTriggers.g:149:1: option : {...}? ^( ASSIGN o= ID optionValue ) ; + public final BasicSemanticTriggers.option_return option() throws RecognitionException { + BasicSemanticTriggers.option_return retval = new BasicSemanticTriggers.option_return(); + retval.start = input.LT(1); + + GrammarAST o=null; + BasicSemanticTriggers.optionValue_return optionValue5 = null; + + + try { + // BasicSemanticTriggers.g:150:5: ({...}? ^( ASSIGN o= ID optionValue ) ) + // BasicSemanticTriggers.g:150:9: {...}? ^( ASSIGN o= ID optionValue ) + { + if ( !((inContext("OPTIONS"))) ) { + if (state.backtracking>0) {state.failed=true; return retval;} + throw new FailedPredicateException(input, "option", "inContext(\"OPTIONS\")"); + } + match(input,ASSIGN,FOLLOW_ASSIGN_in_option392); if (state.failed) return retval; + + match(input, Token.DOWN, null); if (state.failed) return retval; + o=(GrammarAST)match(input,ID,FOLLOW_ID_in_option396); if (state.failed) return retval; + pushFollow(FOLLOW_optionValue_in_option398); + optionValue5=optionValue(); + + state._fsp--; + if (state.failed) return retval; + + match(input, Token.UP, null); if (state.failed) return retval; + if ( state.backtracking==1 ) { + + GrammarAST parent = (GrammarAST)((GrammarAST)retval.start).getParent(); // OPTION + GrammarAST parentWithOptionKind = (GrammarAST)parent.getParent(); + boolean ok = checker.checkOptions(parentWithOptionKind, + o.token, (optionValue5!=null?optionValue5.v:null)); + // store options into XXX_GRAMMAR, RULE, BLOCK nodes + if ( ok ) { + ((GrammarASTWithOptions)parentWithOptionKind).setOption((o!=null?o.getText():null), (optionValue5!=null?optionValue5.v:null)); + } + + } + + } + + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + } + finally { + } + return retval; + } + // $ANTLR end "option" + + public static class optionValue_return extends TreeRuleReturnScope { + public String v; + }; + + // $ANTLR start "optionValue" + // BasicSemanticTriggers.g:163:1: optionValue returns [String v] : ( ID | STRING_LITERAL | INT | STAR ); + public final BasicSemanticTriggers.optionValue_return optionValue() throws RecognitionException { + BasicSemanticTriggers.optionValue_return retval = new BasicSemanticTriggers.optionValue_return(); + retval.start = input.LT(1); + + retval.v = ((GrammarAST)retval.start).token.getText(); + try { + // BasicSemanticTriggers.g:165:5: ( ID | STRING_LITERAL | INT | STAR ) + // BasicSemanticTriggers.g: + { + if ( input.LA(1)==STAR||input.LA(1)==INT||input.LA(1)==STRING_LITERAL||input.LA(1)==ID ) { + input.consume(); + state.errorRecovery=false;state.failed=false; + } + else { + if (state.backtracking>0) {state.failed=true; return retval;} + MismatchedSetException mse = new MismatchedSetException(null,input); + throw mse; + } + + + } + + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + } + finally { + } + return retval; + } + // $ANTLR end "optionValue" + + + // $ANTLR start "rule" + // BasicSemanticTriggers.g:171:1: rule : ^( RULE r= ID ( . )* ) ; + public final void rule() throws RecognitionException { + GrammarAST r=null; + + try { + // BasicSemanticTriggers.g:171:5: ( ^( RULE r= ID ( . )* ) ) + // BasicSemanticTriggers.g:171:9: ^( RULE r= ID ( . )* ) + { + match(input,RULE,FOLLOW_RULE_in_rule480); if (state.failed) return ; + + match(input, Token.DOWN, null); if (state.failed) return ; + r=(GrammarAST)match(input,ID,FOLLOW_ID_in_rule484); if (state.failed) return ; + // BasicSemanticTriggers.g:171:22: ( . )* + loop11: + do { + int alt11=2; + int LA11_0 = input.LA(1); + + if ( ((LA11_0>=SEMPRED && LA11_0<=ALT_REWRITE)) ) { + alt11=1; + } + else if ( (LA11_0==UP) ) { + alt11=2; + } + + + switch (alt11) { + case 1 : + // BasicSemanticTriggers.g:171:22: . + { + matchAny(input); if (state.failed) return ; + + } + break; + + default : + break loop11; + } + } while (true); + + + match(input, Token.UP, null); if (state.failed) return ; + if ( state.backtracking==1 ) { + checker.checkInvalidRuleDef(r.token); + } + + } + + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + } + finally { + } + return ; + } + // $ANTLR end "rule" + + + // $ANTLR start "ruleref" + // BasicSemanticTriggers.g:174:1: ruleref : RULE_REF ; + public final void ruleref() throws RecognitionException { + GrammarAST RULE_REF6=null; + + try { + // BasicSemanticTriggers.g:175:5: ( RULE_REF ) + // BasicSemanticTriggers.g:175:7: RULE_REF + { + RULE_REF6=(GrammarAST)match(input,RULE_REF,FOLLOW_RULE_REF_in_ruleref507); if (state.failed) return ; + if ( state.backtracking==1 ) { + checker.checkInvalidRuleRef(RULE_REF6.token); + } + + } + + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + } + finally { + } + return ; + } + // $ANTLR end "ruleref" + + + // $ANTLR start "tokenAlias" + // BasicSemanticTriggers.g:178:1: tokenAlias : {...}? ^( ASSIGN ID STRING_LITERAL ) ; + public final void tokenAlias() throws RecognitionException { + GrammarAST ID7=null; + + try { + // BasicSemanticTriggers.g:179:2: ({...}? ^( ASSIGN ID STRING_LITERAL ) ) + // BasicSemanticTriggers.g:179:4: {...}? ^( ASSIGN ID STRING_LITERAL ) + { + if ( !((inContext("TOKENS"))) ) { + if (state.backtracking>0) {state.failed=true; return ;} + throw new FailedPredicateException(input, "tokenAlias", "inContext(\"TOKENS\")"); + } + match(input,ASSIGN,FOLLOW_ASSIGN_in_tokenAlias526); if (state.failed) return ; + + match(input, Token.DOWN, null); if (state.failed) return ; + ID7=(GrammarAST)match(input,ID,FOLLOW_ID_in_tokenAlias528); if (state.failed) return ; + match(input,STRING_LITERAL,FOLLOW_STRING_LITERAL_in_tokenAlias530); if (state.failed) return ; + + match(input, Token.UP, null); if (state.failed) return ; + if ( state.backtracking==1 ) { + checker.checkTokenAlias(ID7.token); + } + + } + + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + } + finally { + } + return ; + } + // $ANTLR end "tokenAlias" + + + // $ANTLR start "tokenRefWithArgs" + // BasicSemanticTriggers.g:183:1: tokenRefWithArgs : {...}? ^( TOKEN_REF ARG_ACTION ) ; + public final void tokenRefWithArgs() throws RecognitionException { + GrammarAST TOKEN_REF8=null; + + try { + // BasicSemanticTriggers.g:184:2: ({...}? ^( TOKEN_REF ARG_ACTION ) ) + // BasicSemanticTriggers.g:184:4: {...}? ^( TOKEN_REF ARG_ACTION ) + { + if ( !((!inContext("RESULT ..."))) ) { + if (state.backtracking>0) {state.failed=true; return ;} + throw new FailedPredicateException(input, "tokenRefWithArgs", "!inContext(\"RESULT ...\")"); + } + TOKEN_REF8=(GrammarAST)match(input,TOKEN_REF,FOLLOW_TOKEN_REF_in_tokenRefWithArgs555); if (state.failed) return ; + + match(input, Token.DOWN, null); if (state.failed) return ; + match(input,ARG_ACTION,FOLLOW_ARG_ACTION_in_tokenRefWithArgs557); if (state.failed) return ; + + match(input, Token.UP, null); if (state.failed) return ; + if ( state.backtracking==1 ) { + checker.checkTokenArgs(TOKEN_REF8.token); + } + + } + + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + } + finally { + } + return ; + } + // $ANTLR end "tokenRefWithArgs" + + public static class elementOption_return extends TreeRuleReturnScope { + }; + + // $ANTLR start "elementOption" + // BasicSemanticTriggers.g:189:1: elementOption : {...}? ^( ELEMENT_OPTIONS ( ^( ASSIGN o= ID value= ID ) | ^( ASSIGN o= ID value= STRING_LITERAL ) | o= ID ) ) ; + public final BasicSemanticTriggers.elementOption_return elementOption() throws RecognitionException { + BasicSemanticTriggers.elementOption_return retval = new BasicSemanticTriggers.elementOption_return(); + retval.start = input.LT(1); + + GrammarAST o=null; + GrammarAST value=null; + + try { + // BasicSemanticTriggers.g:190:5: ({...}? ^( ELEMENT_OPTIONS ( ^( ASSIGN o= ID value= ID ) | ^( ASSIGN o= ID value= STRING_LITERAL ) | o= ID ) ) ) + // BasicSemanticTriggers.g:190:7: {...}? ^( ELEMENT_OPTIONS ( ^( ASSIGN o= ID value= ID ) | ^( ASSIGN o= ID value= STRING_LITERAL ) | o= ID ) ) + { + if ( !((!inContext("RESULT ..."))) ) { + if (state.backtracking>0) {state.failed=true; return retval;} + throw new FailedPredicateException(input, "elementOption", "!inContext(\"RESULT ...\")"); + } + match(input,ELEMENT_OPTIONS,FOLLOW_ELEMENT_OPTIONS_in_elementOption586); if (state.failed) return retval; + + match(input, Token.DOWN, null); if (state.failed) return retval; + // BasicSemanticTriggers.g:192:7: ( ^( ASSIGN o= ID value= ID ) | ^( ASSIGN o= ID value= STRING_LITERAL ) | o= ID ) + int alt12=3; + int LA12_0 = input.LA(1); + + if ( (LA12_0==ASSIGN) ) { + int LA12_1 = input.LA(2); + + if ( (LA12_1==DOWN) ) { + int LA12_3 = input.LA(3); + + if ( (LA12_3==ID) ) { + int LA12_4 = input.LA(4); + + if ( (LA12_4==ID) ) { + alt12=1; + } + else if ( (LA12_4==STRING_LITERAL) ) { + alt12=2; + } + else { + if (state.backtracking>0) {state.failed=true; return retval;} + NoViableAltException nvae = + new NoViableAltException("", 12, 4, input); + + throw nvae; + } + } + else { + if (state.backtracking>0) {state.failed=true; return retval;} + NoViableAltException nvae = + new NoViableAltException("", 12, 3, input); + + throw nvae; + } + } + else { + if (state.backtracking>0) {state.failed=true; return retval;} + NoViableAltException nvae = + new NoViableAltException("", 12, 1, input); + + throw nvae; + } + } + else if ( (LA12_0==ID) ) { + alt12=3; + } + else { + if (state.backtracking>0) {state.failed=true; return retval;} + NoViableAltException nvae = + new NoViableAltException("", 12, 0, input); + + throw nvae; + } + switch (alt12) { + case 1 : + // BasicSemanticTriggers.g:192:9: ^( ASSIGN o= ID value= ID ) + { + match(input,ASSIGN,FOLLOW_ASSIGN_in_elementOption597); if (state.failed) return retval; + + match(input, Token.DOWN, null); if (state.failed) return retval; + o=(GrammarAST)match(input,ID,FOLLOW_ID_in_elementOption601); if (state.failed) return retval; + value=(GrammarAST)match(input,ID,FOLLOW_ID_in_elementOption605); if (state.failed) return retval; + + match(input, Token.UP, null); if (state.failed) return retval; + + } + break; + case 2 : + // BasicSemanticTriggers.g:193:11: ^( ASSIGN o= ID value= STRING_LITERAL ) + { + match(input,ASSIGN,FOLLOW_ASSIGN_in_elementOption619); if (state.failed) return retval; + + match(input, Token.DOWN, null); if (state.failed) return retval; + o=(GrammarAST)match(input,ID,FOLLOW_ID_in_elementOption623); if (state.failed) return retval; + value=(GrammarAST)match(input,STRING_LITERAL,FOLLOW_STRING_LITERAL_in_elementOption627); if (state.failed) return retval; + + match(input, Token.UP, null); if (state.failed) return retval; + + } + break; + case 3 : + // BasicSemanticTriggers.g:194:10: o= ID + { + o=(GrammarAST)match(input,ID,FOLLOW_ID_in_elementOption641); if (state.failed) return retval; + + } + break; + + } + + + match(input, Token.UP, null); if (state.failed) return retval; + if ( state.backtracking==1 ) { + + boolean ok = checker.checkTokenOptions((GrammarAST)o.getParent(), + o.token, (value!=null?value.getText():null)); + if ( ok ) { + if ( value!=null ) { + TerminalAST terminal = (TerminalAST)((GrammarAST)retval.start).getParent(); + terminal.setOption((o!=null?o.getText():null), (value!=null?value.getText():null)); + } + else { + TerminalAST terminal = (TerminalAST)((GrammarAST)retval.start).getParent(); + terminal.setOption(TerminalAST.defaultTokenOption, (o!=null?o.getText():null)); + } + } + + } + + } + + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + } + finally { + } + return retval; + } + // $ANTLR end "elementOption" + + public static class multiElementAltInTreeGrammar_return extends TreeRuleReturnScope { + }; + + // $ANTLR start "multiElementAltInTreeGrammar" + // BasicSemanticTriggers.g:214:1: multiElementAltInTreeGrammar : {...}? ^( ALT ~ ( SEMPRED | ACTION ) (~ ( SEMPRED | ACTION ) )+ ) ; + public final BasicSemanticTriggers.multiElementAltInTreeGrammar_return multiElementAltInTreeGrammar() throws RecognitionException { + BasicSemanticTriggers.multiElementAltInTreeGrammar_return retval = new BasicSemanticTriggers.multiElementAltInTreeGrammar_return(); + retval.start = input.LT(1); + + try { + // BasicSemanticTriggers.g:215:2: ({...}? ^( ALT ~ ( SEMPRED | ACTION ) (~ ( SEMPRED | ACTION ) )+ ) ) + // BasicSemanticTriggers.g:215:4: {...}? ^( ALT ~ ( SEMPRED | ACTION ) (~ ( SEMPRED | ACTION ) )+ ) + { + if ( !((inContext("ALT_REWRITE"))) ) { + if (state.backtracking>0) {state.failed=true; return retval;} + throw new FailedPredicateException(input, "multiElementAltInTreeGrammar", "inContext(\"ALT_REWRITE\")"); + } + match(input,ALT,FOLLOW_ALT_in_multiElementAltInTreeGrammar681); if (state.failed) return retval; + + match(input, Token.DOWN, null); if (state.failed) return retval; + if ( (input.LA(1)>=FORCED_ACTION && input.LA(1)<=NESTED_ACTION)||(input.LA(1)>=ACTION_ESC && input.LA(1)<=ALT_REWRITE) ) { + input.consume(); + state.errorRecovery=false;state.failed=false; + } + else { + if (state.backtracking>0) {state.failed=true; return retval;} + MismatchedSetException mse = new MismatchedSetException(null,input); + throw mse; + } + + // BasicSemanticTriggers.g:216:28: (~ ( SEMPRED | ACTION ) )+ + int cnt13=0; + loop13: + do { + int alt13=2; + int LA13_0 = input.LA(1); + + if ( ((LA13_0>=FORCED_ACTION && LA13_0<=NESTED_ACTION)||(LA13_0>=ACTION_ESC && LA13_0<=ALT_REWRITE)) ) { + alt13=1; + } + + + switch (alt13) { + case 1 : + // BasicSemanticTriggers.g:216:28: ~ ( SEMPRED | ACTION ) + { + if ( (input.LA(1)>=FORCED_ACTION && input.LA(1)<=NESTED_ACTION)||(input.LA(1)>=ACTION_ESC && input.LA(1)<=ALT_REWRITE) ) { + input.consume(); + state.errorRecovery=false;state.failed=false; + } + else { + if (state.backtracking>0) {state.failed=true; return retval;} + MismatchedSetException mse = new MismatchedSetException(null,input); + throw mse; + } + + + } + break; + + default : + if ( cnt13 >= 1 ) break loop13; + if (state.backtracking>0) {state.failed=true; return retval;} + EarlyExitException eee = + new EarlyExitException(13, input); + throw eee; + } + cnt13++; + } while (true); + + + match(input, Token.UP, null); if (state.failed) return retval; + if ( state.backtracking==1 ) { + + int altNum = ((GrammarAST)retval.start).getParent().getChildIndex() + 1; // alts are 1..n + GrammarAST firstNode = (GrammarAST)((GrammarAST)retval.start).getChild(0); + checker.checkRewriteForMultiRootAltInTreeGrammar(root.getOptions(), + firstNode.token, + altNum); + + } + + } + + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + } + finally { + } + return retval; + } + // $ANTLR end "multiElementAltInTreeGrammar" + + public static class astOps_return extends TreeRuleReturnScope { + }; + + // $ANTLR start "astOps" + // BasicSemanticTriggers.g:227:1: astOps : ( ^( ROOT el= . ) | ^( BANG el= . ) ); + public final BasicSemanticTriggers.astOps_return astOps() throws RecognitionException { + BasicSemanticTriggers.astOps_return retval = new BasicSemanticTriggers.astOps_return(); + retval.start = input.LT(1); + + GrammarAST el=null; + + try { + // BasicSemanticTriggers.g:228:2: ( ^( ROOT el= . ) | ^( BANG el= . ) ) + int alt14=2; + int LA14_0 = input.LA(1); + + if ( (LA14_0==ROOT) ) { + alt14=1; + } + else if ( (LA14_0==BANG) ) { + alt14=2; + } + else { + if (state.backtracking>0) {state.failed=true; return retval;} + NoViableAltException nvae = + new NoViableAltException("", 14, 0, input); + + throw nvae; + } + switch (alt14) { + case 1 : + // BasicSemanticTriggers.g:228:4: ^( ROOT el= . ) + { + match(input,ROOT,FOLLOW_ROOT_in_astOps716); if (state.failed) return retval; + + match(input, Token.DOWN, null); if (state.failed) return retval; + el=(GrammarAST)input.LT(1); + matchAny(input); if (state.failed) return retval; + + match(input, Token.UP, null); if (state.failed) return retval; + if ( state.backtracking==1 ) { + checker.checkASTOps(root.getOptions(), ((GrammarAST)retval.start), el); + } + + } + break; + case 2 : + // BasicSemanticTriggers.g:229:4: ^( BANG el= . ) + { + match(input,BANG,FOLLOW_BANG_in_astOps729); if (state.failed) return retval; + + match(input, Token.DOWN, null); if (state.failed) return retval; + el=(GrammarAST)input.LT(1); + matchAny(input); if (state.failed) return retval; + + match(input, Token.UP, null); if (state.failed) return retval; + if ( state.backtracking==1 ) { + checker.checkASTOps(root.getOptions(), ((GrammarAST)retval.start), el); + } + + } + break; + + } + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + } + finally { + } + return retval; + } + // $ANTLR end "astOps" + + public static class rewrite_return extends TreeRuleReturnScope { + }; + + // $ANTLR start "rewrite" + // BasicSemanticTriggers.g:232:1: rewrite : ( RESULT | ST_RESULT ) ; + public final BasicSemanticTriggers.rewrite_return rewrite() throws RecognitionException { + BasicSemanticTriggers.rewrite_return retval = new BasicSemanticTriggers.rewrite_return(); + retval.start = input.LT(1); + + try { + // BasicSemanticTriggers.g:233:2: ( ( RESULT | ST_RESULT ) ) + // BasicSemanticTriggers.g:233:4: ( RESULT | ST_RESULT ) + { + if ( (input.LA(1)>=ST_RESULT && input.LA(1)<=RESULT) ) { + input.consume(); + state.errorRecovery=false;state.failed=false; + } + else { + if (state.backtracking>0) {state.failed=true; return retval;} + MismatchedSetException mse = new MismatchedSetException(null,input); + throw mse; + } + + if ( state.backtracking==1 ) { + checker.checkRewriteOk(root.getOptions(),((GrammarAST)retval.start)); + } + + } + + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + } + finally { + } + return retval; + } + // $ANTLR end "rewrite" + + + // $ANTLR start "wildcardRoot" + // BasicSemanticTriggers.g:237:1: wildcardRoot : ^( TREE_BEGIN WILDCARD ( . )* ) ; + public final void wildcardRoot() throws RecognitionException { + GrammarAST WILDCARD9=null; + + try { + // BasicSemanticTriggers.g:238:5: ( ^( TREE_BEGIN WILDCARD ( . )* ) ) + // BasicSemanticTriggers.g:238:7: ^( TREE_BEGIN WILDCARD ( . )* ) + { + match(input,TREE_BEGIN,FOLLOW_TREE_BEGIN_in_wildcardRoot770); if (state.failed) return ; + + match(input, Token.DOWN, null); if (state.failed) return ; + WILDCARD9=(GrammarAST)match(input,WILDCARD,FOLLOW_WILDCARD_in_wildcardRoot772); if (state.failed) return ; + // BasicSemanticTriggers.g:238:29: ( . )* + loop15: + do { + int alt15=2; + int LA15_0 = input.LA(1); + + if ( ((LA15_0>=SEMPRED && LA15_0<=ALT_REWRITE)) ) { + alt15=1; + } + else if ( (LA15_0==UP) ) { + alt15=2; + } + + + switch (alt15) { + case 1 : + // BasicSemanticTriggers.g:238:29: . + { + matchAny(input); if (state.failed) return ; + + } + break; + + default : + break loop15; + } + } while (true); + + + match(input, Token.UP, null); if (state.failed) return ; + if ( state.backtracking==1 ) { + checker.checkWildcardRoot(WILDCARD9.token); + } + + } + + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + } + finally { + } + return ; + } + // $ANTLR end "wildcardRoot" + + // Delegated rules + + + protected DFA1 dfa1 = new DFA1(this); + static final String DFA1_eotS = + "\15\uffff"; + static final String DFA1_eofS = + "\15\uffff"; + static final String DFA1_minS = + "\1\33\3\uffff\1\2\2\uffff\1\127\1\61\1\3\1\uffff\1\0\1\uffff"; + static final String DFA1_maxS = + "\1\145\3\uffff\1\2\2\uffff\2\127\1\3\1\uffff\1\0\1\uffff"; + static final String DFA1_acceptS = + "\1\uffff\1\1\1\2\1\3\1\uffff\1\5\1\7\3\uffff\1\4\1\uffff\1\6"; + static final String DFA1_specialS = + "\13\uffff\1\0\1\uffff}>"; + static final String[] DFA1_transitionS = { + "\1\1\10\uffff\1\3\11\uffff\1\4\32\uffff\1\5\1\2\31\uffff\2\6", + "", + "", + "", + "\1\7", + "", + "", + "\1\10", + "\1\12\17\uffff\1\12\2\uffff\1\11\22\uffff\1\12", + "\1\13", + "", + "\1\uffff", + "" + }; + + static final short[] DFA1_eot = DFA.unpackEncodedString(DFA1_eotS); + static final short[] DFA1_eof = DFA.unpackEncodedString(DFA1_eofS); + static final char[] DFA1_min = DFA.unpackEncodedStringToUnsignedChars(DFA1_minS); + static final char[] DFA1_max = DFA.unpackEncodedStringToUnsignedChars(DFA1_maxS); + static final short[] DFA1_accept = DFA.unpackEncodedString(DFA1_acceptS); + static final short[] DFA1_special = DFA.unpackEncodedString(DFA1_specialS); + static final short[][] DFA1_transition; + + static { + int numStates = DFA1_transitionS.length; + DFA1_transition = new short[numStates][]; + for (int i=0; i=0 ) return s; + break; + } + if (state.backtracking>0) {state.failed=true; return -1;} + NoViableAltException nvae = + new NoViableAltException(getDescription(), 1, _s, input); + error(nvae); + throw nvae; + } + } + + + public static final BitSet FOLLOW_grammarSpec_in_topdown87 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_rules_in_topdown92 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_mode_in_topdown97 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_option_in_topdown102 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_rule_in_topdown107 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_tokenAlias_in_topdown112 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_rewrite_in_topdown117 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_multiElementAltInTreeGrammar_in_bottomup129 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_astOps_in_bottomup134 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_ruleref_in_bottomup139 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_tokenRefWithArgs_in_bottomup144 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_elementOption_in_bottomup149 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_checkGrammarOptions_in_bottomup154 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_wildcardRoot_in_bottomup160 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_GRAMMAR_in_grammarSpec178 = new BitSet(new long[]{0x0000000000000004L}); + public static final BitSet FOLLOW_ID_in_grammarSpec180 = new BitSet(new long[]{0x0000000000580040L,0x0000000000000400L}); + public static final BitSet FOLLOW_DOC_COMMENT_in_grammarSpec182 = new BitSet(new long[]{0x0000000000580000L,0x0000000000000400L}); + public static final BitSet FOLLOW_prequelConstructs_in_grammarSpec199 = new BitSet(new long[]{0x0000000000000000L,0x0000000000000400L}); + public static final BitSet FOLLOW_RULES_in_grammarSpec202 = new BitSet(new long[]{0x0000000000000004L}); + public static final BitSet FOLLOW_GRAMMAR_in_checkGrammarOptions225 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_OPTIONS_in_prequelConstructs248 = new BitSet(new long[]{0x0000000000000004L}); + public static final BitSet FOLLOW_IMPORT_in_prequelConstructs261 = new BitSet(new long[]{0x0000000000000004L}); + public static final BitSet FOLLOW_delegateGrammar_in_prequelConstructs263 = new BitSet(new long[]{0x0000400000000008L,0x0000000000800000L}); + public static final BitSet FOLLOW_TOKENS_in_prequelConstructs274 = new BitSet(new long[]{0x0000000000000004L}); + public static final BitSet FOLLOW_ASSIGN_in_delegateGrammar306 = new BitSet(new long[]{0x0000000000000004L}); + public static final BitSet FOLLOW_ID_in_delegateGrammar308 = new BitSet(new long[]{0x0000000000000000L,0x0000000000800000L}); + public static final BitSet FOLLOW_ID_in_delegateGrammar312 = new BitSet(new long[]{0x0000000000000008L}); + public static final BitSet FOLLOW_ID_in_delegateGrammar326 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_RULES_in_rules353 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_MODE_in_mode365 = new BitSet(new long[]{0x0000000000000004L}); + public static final BitSet FOLLOW_ID_in_mode367 = new BitSet(new long[]{0xFFFFFFFFFFFFFFF8L,0x0000007FFFFFFFFFL}); + public static final BitSet FOLLOW_ASSIGN_in_option392 = new BitSet(new long[]{0x0000000000000004L}); + public static final BitSet FOLLOW_ID_in_option396 = new BitSet(new long[]{0x0002000000000000L,0x0000000000800012L}); + public static final BitSet FOLLOW_optionValue_in_option398 = new BitSet(new long[]{0x0000000000000008L}); + public static final BitSet FOLLOW_set_in_optionValue0 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_RULE_in_rule480 = new BitSet(new long[]{0x0000000000000004L}); + public static final BitSet FOLLOW_ID_in_rule484 = new BitSet(new long[]{0xFFFFFFFFFFFFFFF8L,0x0000007FFFFFFFFFL}); + public static final BitSet FOLLOW_RULE_REF_in_ruleref507 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_ASSIGN_in_tokenAlias526 = new BitSet(new long[]{0x0000000000000004L}); + public static final BitSet FOLLOW_ID_in_tokenAlias528 = new BitSet(new long[]{0x0000000000000000L,0x0000000000000010L}); + public static final BitSet FOLLOW_STRING_LITERAL_in_tokenAlias530 = new BitSet(new long[]{0x0000000000000008L}); + public static final BitSet FOLLOW_TOKEN_REF_in_tokenRefWithArgs555 = new BitSet(new long[]{0x0000000000000004L}); + public static final BitSet FOLLOW_ARG_ACTION_in_tokenRefWithArgs557 = new BitSet(new long[]{0x0000000000000008L}); + public static final BitSet FOLLOW_ELEMENT_OPTIONS_in_elementOption586 = new BitSet(new long[]{0x0000000000000004L}); + public static final BitSet FOLLOW_ASSIGN_in_elementOption597 = new BitSet(new long[]{0x0000000000000004L}); + public static final BitSet FOLLOW_ID_in_elementOption601 = new BitSet(new long[]{0x0000000000000000L,0x0000000000800000L}); + public static final BitSet FOLLOW_ID_in_elementOption605 = new BitSet(new long[]{0x0000000000000008L}); + public static final BitSet FOLLOW_ASSIGN_in_elementOption619 = new BitSet(new long[]{0x0000000000000004L}); + public static final BitSet FOLLOW_ID_in_elementOption623 = new BitSet(new long[]{0x0000000000000000L,0x0000000000000010L}); + public static final BitSet FOLLOW_STRING_LITERAL_in_elementOption627 = new BitSet(new long[]{0x0000000000000008L}); + public static final BitSet FOLLOW_ID_in_elementOption641 = new BitSet(new long[]{0x0000000000000008L}); + public static final BitSet FOLLOW_ALT_in_multiElementAltInTreeGrammar681 = new BitSet(new long[]{0x0000000000000004L}); + public static final BitSet FOLLOW_set_in_multiElementAltInTreeGrammar683 = new BitSet(new long[]{0xFFFFFFFFFFFEFFE0L,0x0000007FFFFFFFFFL}); + public static final BitSet FOLLOW_set_in_multiElementAltInTreeGrammar690 = new BitSet(new long[]{0xFFFFFFFFFFFEFFE8L,0x0000007FFFFFFFFFL}); + public static final BitSet FOLLOW_ROOT_in_astOps716 = new BitSet(new long[]{0x0000000000000004L}); + public static final BitSet FOLLOW_BANG_in_astOps729 = new BitSet(new long[]{0x0000000000000004L}); + public static final BitSet FOLLOW_set_in_rewrite747 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_TREE_BEGIN_in_wildcardRoot770 = new BitSet(new long[]{0x0000000000000004L}); + public static final BitSet FOLLOW_WILDCARD_in_wildcardRoot772 = new BitSet(new long[]{0xFFFFFFFFFFFFFFF8L,0x0000007FFFFFFFFFL}); + +} \ No newline at end of file diff --git a/tool/src/org/antlr/v4/semantics/BasicSemanticTriggers.tokens b/tool/src/org/antlr/v4/semantics/BasicSemanticTriggers.tokens new file mode 100644 index 000000000..45caa06d7 --- /dev/null +++ b/tool/src/org/antlr/v4/semantics/BasicSemanticTriggers.tokens @@ -0,0 +1,99 @@ +SEMPRED=4 +FORCED_ACTION=5 +DOC_COMMENT=6 +SRC=7 +NLCHARS=8 +COMMENT=9 +DOUBLE_QUOTE_STRING_LITERAL=10 +DOUBLE_ANGLE_STRING_LITERAL=11 +ACTION_STRING_LITERAL=12 +ACTION_CHAR_LITERAL=13 +ARG_ACTION=14 +NESTED_ACTION=15 +ACTION=16 +ACTION_ESC=17 +WSNLCHARS=18 +OPTIONS=19 +TOKENS=20 +SCOPE=21 +IMPORT=22 +FRAGMENT=23 +LEXER=24 +PARSER=25 +TREE=26 +GRAMMAR=27 +PROTECTED=28 +PUBLIC=29 +PRIVATE=30 +RETURNS=31 +THROWS=32 +CATCH=33 +FINALLY=34 +TEMPLATE=35 +MODE=36 +COLON=37 +COLONCOLON=38 +COMMA=39 +SEMI=40 +LPAREN=41 +RPAREN=42 +IMPLIES=43 +LT=44 +GT=45 +ASSIGN=46 +QUESTION=47 +BANG=48 +STAR=49 +PLUS=50 +PLUS_ASSIGN=51 +OR=52 +ROOT=53 +DOLLAR=54 +DOT=55 +RANGE=56 +ETC=57 +RARROW=58 +TREE_BEGIN=59 +AT=60 +NOT=61 +RBRACE=62 +TOKEN_REF=63 +RULE_REF=64 +INT=65 +WSCHARS=66 +ESC_SEQ=67 +STRING_LITERAL=68 +HEX_DIGIT=69 +UNICODE_ESC=70 +WS=71 +ERRCHAR=72 +RULE=73 +RULES=74 +RULEMODIFIERS=75 +RULEACTIONS=76 +BLOCK=77 +REWRITE_BLOCK=78 +OPTIONAL=79 +CLOSURE=80 +POSITIVE_CLOSURE=81 +SYNPRED=82 +CHAR_RANGE=83 +EPSILON=84 +ALT=85 +ALTLIST=86 +ID=87 +ARG=88 +ARGLIST=89 +RET=90 +COMBINED=91 +INITACTION=92 +LABEL=93 +GATED_SEMPRED=94 +SYN_SEMPRED=95 +BACKTRACK_SEMPRED=96 +WILDCARD=97 +LIST=98 +ELEMENT_OPTIONS=99 +ST_RESULT=100 +RESULT=101 +ALT_REWRITE=102 diff --git a/tool/src/org/antlr/v4/semantics/BlankActionSplitterListener.java b/tool/src/org/antlr/v4/semantics/BlankActionSplitterListener.java new file mode 100644 index 000000000..a3845e972 --- /dev/null +++ b/tool/src/org/antlr/v4/semantics/BlankActionSplitterListener.java @@ -0,0 +1,57 @@ +package org.antlr.v4.semantics; + +import org.antlr.runtime.Token; +import org.antlr.v4.parse.ActionSplitterListener; + +public class BlankActionSplitterListener implements ActionSplitterListener { + public void setQualifiedAttr(String expr, Token x, Token y, Token rhs) { + } + + public void qualifiedAttr(String expr, Token x, Token y) { + } + + public void setAttr(String expr, Token x, Token rhs) { + } + + public void attr(String expr, Token x) { + } + + public void setDynamicScopeAttr(String expr, Token x, Token y, Token rhs) { + } + + public void dynamicScopeAttr(String expr, Token x, Token y) { + } + + public void setDynamicNegativeIndexedScopeAttr(String expr, Token x, Token y, Token index, Token rhs) { + } + + public void dynamicNegativeIndexedScopeAttr(String expr, Token x, Token y, Token index) { + } + + public void setDynamicAbsoluteIndexedScopeAttr(String expr, Token x, Token y, Token index, Token rhs) { + } + + public void dynamicAbsoluteIndexedScopeAttr(String expr, Token x, Token y, Token index) { + } + + public void templateInstance(String expr) { + } + + public void indirectTemplateInstance(String expr) { + } + + public void setExprAttribute(String expr) { + } + + public void setSTAttribute(String expr) { + } + + public void templateExpr(String expr) { + } + + public void unknownSyntax(Token t) { + } + + public void text(String text) { + } +} diff --git a/tool/src/org/antlr/v4/semantics/CollectSymbols.g b/tool/src/org/antlr/v4/semantics/CollectSymbols.g new file mode 100644 index 000000000..b7eb24565 --- /dev/null +++ b/tool/src/org/antlr/v4/semantics/CollectSymbols.g @@ -0,0 +1,310 @@ +/* + [The "BSD license"] + Copyright (c) 2010 Terence Parr + All rights reserved. + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions + are met: + 1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + 2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + 3. The name of the author may not be used to endorse or promote products + derived from this software without specific prior written permission. + THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR + IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES + OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. + IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, + INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT + NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF + THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*/ + +/** Collects rules, terminals, strings, actions, scopes etc... from AST + * No side-effects + */ +tree grammar CollectSymbols; +options { + language = Java; + tokenVocab = ANTLRParser; + ASTLabelType = GrammarAST; + filter = true; + //superClass = 'org.antlr.v4.runtime.tree.TreeFilter'; +} + +// Include the copyright in this source and also the generated source +@header { +/* + [The "BSD license"] + Copyright (c) 2010 Terence Parr + All rights reserved. + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions + are met: + 1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + 2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + 3. The name of the author may not be used to endorse or promote products + derived from this software without specific prior written permission. + THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR + IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES + OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. + IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, + INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT + NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF + THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*/ +package org.antlr.v4.semantics; +import org.antlr.v4.tool.*; +import org.antlr.v4.parse.*; +import java.util.Set; +import java.util.HashSet; +import org.stringtemplate.v4.misc.MultiMap; +} + +@members { +Rule currentRule; +String currentMode = LexerGrammar.DEFAULT_MODE_NAME; +int currentAlt = 1; // 1..n +public List rules = new ArrayList(); +public List rulerefs = new ArrayList(); +public List qualifiedRulerefs = new ArrayList(); +public List terminals = new ArrayList(); +public List tokenIDRefs = new ArrayList(); +public Set strings = new HashSet(); +public List tokensDefs = new ArrayList(); +public List scopes = new ArrayList(); +public List actions = new ArrayList(); +Grammar g; // which grammar are we checking +public CollectSymbols(TreeNodeStream input, Grammar g) { + this(input); + this.g = g; +} +} + +topdown +//@init {System.out.println("topdown: "+((Tree)input.LT(1)).getText());} + : globalScope + | globalNamedAction + | tokensSection + | mode + | rule + | ruleArg + | ruleReturns + | ruleNamedAction + | ruleScopeSpec + | ruleref + | rewriteElement // make sure we check this before terminal etc... + // want to match rewrite stuff all here + | terminal + | labeledElement + | setAlt + | ruleAction + | sempred + | finallyClause + | exceptionHandler + ; + +bottomup + : finishRule + ; + +globalScope + : {inContext("GRAMMAR")}? ^(SCOPE ID ACTION) + { + AttributeDict s = ScopeParser.parseDynamicScope($ACTION.text); + s.type = AttributeDict.DictType.GLOBAL_SCOPE; + s.name = $ID.text; + s.ast = $ACTION; + scopes.add(s); + } + ; + +globalNamedAction + : {inContext("GRAMMAR")}? ^(AT ID? ID ACTION) + {actions.add($AT); ((ActionAST)$ACTION).resolver = g;} + ; + +tokensSection + : {inContext("TOKENS")}? + ( ^(ASSIGN t=ID STRING_LITERAL) + {terminals.add($t); tokenIDRefs.add($t); + tokensDefs.add($ASSIGN); strings.add($STRING_LITERAL.text);} + | t=ID + {terminals.add($t); tokenIDRefs.add($t); tokensDefs.add($t);} + ) + ; + +mode: ^(MODE ID .+) {currentMode = $ID.text;} ; + +rule +@init {List modifiers = new ArrayList();} + : ^( RULE + name=ID (options {greedy=false;}:.)* + (^(RULEMODIFIERS (m=. {modifiers.add($m);})+))? + (^(AT ID ACTION))* + ^(BLOCK .+) + .* + ) + { + int numAlts = $RULE.getFirstChildWithType(BLOCK).getChildCount(); + Rule r = new Rule(g, $name.text, (RuleAST)$RULE, numAlts); + if ( g.isLexer() ) r.mode = currentMode; + if ( modifiers.size()>0 ) r.modifiers = modifiers; + rules.add(r); + currentRule = r; + currentAlt = 1; + } + ; + +setAlt + : {inContext("RULE BLOCK")}? ( ALT | ALT_REWRITE ) + { + currentAlt = $start.getChildIndex()+1; + currentRule.alt[currentAlt].ast = (AltAST)$start; + } + ; + +finishRule + : RULE {currentRule = null;} + ; + +ruleNamedAction + : {inContext("RULE")}? ^(AT ID ACTION) + { + currentRule.namedActions.put($ID.text,(ActionAST)$ACTION); + ((ActionAST)$ACTION).resolver = currentRule; + } + ; + +ruleAction + : {inContext("RULE ...")&&!inContext("SCOPE")&& + !inContext("CATCH")&&!inContext("FINALLY")&&!inContext("AT")}? + ACTION + { + currentRule.defineActionInAlt(currentAlt, (ActionAST)$ACTION); + ((ActionAST)$ACTION).resolver = currentRule.alt[currentAlt]; + } + | FORCED_ACTION + { + currentRule.defineActionInAlt(currentAlt, (ActionAST)$FORCED_ACTION); + ((ActionAST)$FORCED_ACTION).resolver = currentRule.alt[currentAlt]; + } + ; + +sempred + : {inContext("RULE ...")}? + SEMPRED + { + currentRule.definePredicateInAlt(currentAlt, (PredAST)$SEMPRED); + ((PredAST)$SEMPRED).resolver = currentRule.alt[currentAlt]; + } + ; + +exceptionHandler + : ^(CATCH ARG_ACTION ACTION) + { + currentRule.exceptionActions.add((ActionAST)$ACTION); + ((ActionAST)$ACTION).resolver = currentRule; + } + ; + +finallyClause + : ^(FINALLY ACTION) + { + currentRule.finallyAction = (ActionAST)$ACTION; + ((ActionAST)$ACTION).resolver = currentRule; + } + ; + +ruleArg + : {inContext("RULE")}? ARG_ACTION + { + currentRule.args = ScopeParser.parseTypeList($ARG_ACTION.text); + currentRule.args.type = AttributeDict.DictType.ARG; + currentRule.args.ast = $ARG_ACTION; + } + ; + +ruleReturns + : ^(RETURNS ARG_ACTION) + { + currentRule.retvals = ScopeParser.parseTypeList($ARG_ACTION.text); + currentRule.retvals.type = AttributeDict.DictType.RET; + currentRule.retvals.ast = $ARG_ACTION; + } + ; + +ruleScopeSpec + : {inContext("RULE")}? + ( ^(SCOPE ACTION) + { + currentRule.scope = ScopeParser.parseDynamicScope($ACTION.text); + currentRule.scope.type = AttributeDict.DictType.RULE_SCOPE; + currentRule.scope.name = currentRule.name; + currentRule.scope.ast = $ACTION; + } + | ^(SCOPE ids+=ID+) {currentRule.useScopes = $ids;} + ) + ; + +rewriteElement +//@init {System.out.println("rewriteElement: "+((Tree)input.LT(1)).getText());} + : + {inContext("RESULT ...")}? (TOKEN_REF|RULE_REF|STRING_LITERAL|LABEL) + {currentRule.alt[currentAlt].rewriteElements.add($start);} + ; + +labeledElement +@after { +LabelElementPair lp = new LabelElementPair(g, $id, $e, $start.getType()); +//currentRule.labelDefs.map($id.text, lp); +currentRule.alt[currentAlt].labelDefs.map($id.text, lp); +} + : {inContext("RULE ...")}? + ( ^(ASSIGN id=ID e=.) + | ^(PLUS_ASSIGN id=ID e=.) + ) + ; + +terminal + : {!inContext("TOKENS ASSIGN")}? STRING_LITERAL + { + terminals.add($start); + strings.add($STRING_LITERAL.text); + if ( currentRule!=null ) { + currentRule.alt[currentAlt].tokenRefs.map($STRING_LITERAL.text, (TerminalAST)$STRING_LITERAL); + } + } + | TOKEN_REF + { + terminals.add($TOKEN_REF); + tokenIDRefs.add($TOKEN_REF); + if ( currentRule!=null ) { + currentRule.alt[currentAlt].tokenRefs.map($TOKEN_REF.text, (TerminalAST)$TOKEN_REF); + } + } + ; + +ruleref +//@init {System.out.println("ruleref: "+((Tree)input.LT(1)).getText());} + : ( {inContext("DOT ...")}? + r=RULE_REF {qualifiedRulerefs.add((GrammarAST)$r.getParent());} + | r=RULE_REF + ) + { + rulerefs.add($r); + if ( currentRule!=null ) { + currentRule.alt[currentAlt].ruleRefs.map($r.text, $r); + } + } + ; diff --git a/tool/src/org/antlr/v4/semantics/CollectSymbols.java b/tool/src/org/antlr/v4/semantics/CollectSymbols.java new file mode 100644 index 000000000..e134896b1 --- /dev/null +++ b/tool/src/org/antlr/v4/semantics/CollectSymbols.java @@ -0,0 +1,2037 @@ +// $ANTLR 3.3 Nov 30, 2010 12:50:56 CollectSymbols.g 2011-06-11 10:28:59 + +/* + [The "BSD license"] + Copyright (c) 2010 Terence Parr + All rights reserved. + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions + are met: + 1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + 2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + 3. The name of the author may not be used to endorse or promote products + derived from this software without specific prior written permission. + THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR + IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES + OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. + IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, + INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT + NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF + THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*/ +package org.antlr.v4.semantics; + +import org.antlr.runtime.*; +import org.antlr.runtime.BitSet; +import org.antlr.runtime.tree.*; +import org.antlr.v4.parse.ScopeParser; +import org.antlr.v4.tool.*; + +import java.util.*; +/** Collects rules, terminals, strings, actions, scopes etc... from AST + * No side-effects + */ +public class CollectSymbols extends TreeFilter { + public static final String[] tokenNames = new String[] { + "", "", "", "", "SEMPRED", "FORCED_ACTION", "DOC_COMMENT", "SRC", "NLCHARS", "COMMENT", "DOUBLE_QUOTE_STRING_LITERAL", "DOUBLE_ANGLE_STRING_LITERAL", "ACTION_STRING_LITERAL", "ACTION_CHAR_LITERAL", "ARG_ACTION", "NESTED_ACTION", "ACTION", "ACTION_ESC", "WSNLCHARS", "OPTIONS", "TOKENS", "SCOPE", "IMPORT", "FRAGMENT", "LEXER", "PARSER", "TREE", "GRAMMAR", "PROTECTED", "PUBLIC", "PRIVATE", "RETURNS", "THROWS", "CATCH", "FINALLY", "TEMPLATE", "MODE", "COLON", "COLONCOLON", "COMMA", "SEMI", "LPAREN", "RPAREN", "IMPLIES", "LT", "GT", "ASSIGN", "QUESTION", "BANG", "STAR", "PLUS", "PLUS_ASSIGN", "OR", "ROOT", "DOLLAR", "DOT", "RANGE", "ETC", "RARROW", "TREE_BEGIN", "AT", "NOT", "RBRACE", "TOKEN_REF", "RULE_REF", "INT", "WSCHARS", "ESC_SEQ", "STRING_LITERAL", "HEX_DIGIT", "UNICODE_ESC", "WS", "ERRCHAR", "RULE", "RULES", "RULEMODIFIERS", "RULEACTIONS", "BLOCK", "REWRITE_BLOCK", "OPTIONAL", "CLOSURE", "POSITIVE_CLOSURE", "SYNPRED", "CHAR_RANGE", "EPSILON", "ALT", "ALTLIST", "ID", "ARG", "ARGLIST", "RET", "COMBINED", "INITACTION", "LABEL", "GATED_SEMPRED", "SYN_SEMPRED", "BACKTRACK_SEMPRED", "WILDCARD", "LIST", "ELEMENT_OPTIONS", "ST_RESULT", "RESULT", "ALT_REWRITE" + }; + public static final int EOF=-1; + public static final int SEMPRED=4; + public static final int FORCED_ACTION=5; + public static final int DOC_COMMENT=6; + public static final int SRC=7; + public static final int NLCHARS=8; + public static final int COMMENT=9; + public static final int DOUBLE_QUOTE_STRING_LITERAL=10; + public static final int DOUBLE_ANGLE_STRING_LITERAL=11; + public static final int ACTION_STRING_LITERAL=12; + public static final int ACTION_CHAR_LITERAL=13; + public static final int ARG_ACTION=14; + public static final int NESTED_ACTION=15; + public static final int ACTION=16; + public static final int ACTION_ESC=17; + public static final int WSNLCHARS=18; + public static final int OPTIONS=19; + public static final int TOKENS=20; + public static final int SCOPE=21; + public static final int IMPORT=22; + public static final int FRAGMENT=23; + public static final int LEXER=24; + public static final int PARSER=25; + public static final int TREE=26; + public static final int GRAMMAR=27; + public static final int PROTECTED=28; + public static final int PUBLIC=29; + public static final int PRIVATE=30; + public static final int RETURNS=31; + public static final int THROWS=32; + public static final int CATCH=33; + public static final int FINALLY=34; + public static final int TEMPLATE=35; + public static final int MODE=36; + public static final int COLON=37; + public static final int COLONCOLON=38; + public static final int COMMA=39; + public static final int SEMI=40; + public static final int LPAREN=41; + public static final int RPAREN=42; + public static final int IMPLIES=43; + public static final int LT=44; + public static final int GT=45; + public static final int ASSIGN=46; + public static final int QUESTION=47; + public static final int BANG=48; + public static final int STAR=49; + public static final int PLUS=50; + public static final int PLUS_ASSIGN=51; + public static final int OR=52; + public static final int ROOT=53; + public static final int DOLLAR=54; + public static final int DOT=55; + public static final int RANGE=56; + public static final int ETC=57; + public static final int RARROW=58; + public static final int TREE_BEGIN=59; + public static final int AT=60; + public static final int NOT=61; + public static final int RBRACE=62; + public static final int TOKEN_REF=63; + public static final int RULE_REF=64; + public static final int INT=65; + public static final int WSCHARS=66; + public static final int ESC_SEQ=67; + public static final int STRING_LITERAL=68; + public static final int HEX_DIGIT=69; + public static final int UNICODE_ESC=70; + public static final int WS=71; + public static final int ERRCHAR=72; + public static final int RULE=73; + public static final int RULES=74; + public static final int RULEMODIFIERS=75; + public static final int RULEACTIONS=76; + public static final int BLOCK=77; + public static final int REWRITE_BLOCK=78; + public static final int OPTIONAL=79; + public static final int CLOSURE=80; + public static final int POSITIVE_CLOSURE=81; + public static final int SYNPRED=82; + public static final int CHAR_RANGE=83; + public static final int EPSILON=84; + public static final int ALT=85; + public static final int ALTLIST=86; + public static final int ID=87; + public static final int ARG=88; + public static final int ARGLIST=89; + public static final int RET=90; + public static final int COMBINED=91; + public static final int INITACTION=92; + public static final int LABEL=93; + public static final int GATED_SEMPRED=94; + public static final int SYN_SEMPRED=95; + public static final int BACKTRACK_SEMPRED=96; + public static final int WILDCARD=97; + public static final int LIST=98; + public static final int ELEMENT_OPTIONS=99; + public static final int ST_RESULT=100; + public static final int RESULT=101; + public static final int ALT_REWRITE=102; + + // delegates + // delegators + + + public CollectSymbols(TreeNodeStream input) { + this(input, new RecognizerSharedState()); + } + public CollectSymbols(TreeNodeStream input, RecognizerSharedState state) { + super(input, state); + + } + + + public String[] getTokenNames() { return CollectSymbols.tokenNames; } + public String getGrammarFileName() { return "CollectSymbols.g"; } + + + Rule currentRule; + String currentMode = LexerGrammar.DEFAULT_MODE_NAME; + int currentAlt = 1; // 1..n + public List rules = new ArrayList(); + public List rulerefs = new ArrayList(); + public List qualifiedRulerefs = new ArrayList(); + public List terminals = new ArrayList(); + public List tokenIDRefs = new ArrayList(); + public Set strings = new HashSet(); + public List tokensDefs = new ArrayList(); + public List scopes = new ArrayList(); + public List actions = new ArrayList(); + Grammar g; // which grammar are we checking + public CollectSymbols(TreeNodeStream input, Grammar g) { + this(input); + this.g = g; + } + + + + // $ANTLR start "topdown" + // CollectSymbols.g:94:1: topdown : ( globalScope | globalNamedAction | tokensSection | mode | rule | ruleArg | ruleReturns | ruleNamedAction | ruleScopeSpec | ruleref | rewriteElement | terminal | labeledElement | setAlt | ruleAction | sempred | finallyClause | exceptionHandler ); + public final void topdown() throws RecognitionException { + try { + // CollectSymbols.g:96:5: ( globalScope | globalNamedAction | tokensSection | mode | rule | ruleArg | ruleReturns | ruleNamedAction | ruleScopeSpec | ruleref | rewriteElement | terminal | labeledElement | setAlt | ruleAction | sempred | finallyClause | exceptionHandler ) + int alt1=18; + alt1 = dfa1.predict(input); + switch (alt1) { + case 1 : + // CollectSymbols.g:96:7: globalScope + { + pushFollow(FOLLOW_globalScope_in_topdown89); + globalScope(); + + state._fsp--; + if (state.failed) return ; + + } + break; + case 2 : + // CollectSymbols.g:97:7: globalNamedAction + { + pushFollow(FOLLOW_globalNamedAction_in_topdown97); + globalNamedAction(); + + state._fsp--; + if (state.failed) return ; + + } + break; + case 3 : + // CollectSymbols.g:98:7: tokensSection + { + pushFollow(FOLLOW_tokensSection_in_topdown105); + tokensSection(); + + state._fsp--; + if (state.failed) return ; + + } + break; + case 4 : + // CollectSymbols.g:99:7: mode + { + pushFollow(FOLLOW_mode_in_topdown113); + mode(); + + state._fsp--; + if (state.failed) return ; + + } + break; + case 5 : + // CollectSymbols.g:100:7: rule + { + pushFollow(FOLLOW_rule_in_topdown121); + rule(); + + state._fsp--; + if (state.failed) return ; + + } + break; + case 6 : + // CollectSymbols.g:101:7: ruleArg + { + pushFollow(FOLLOW_ruleArg_in_topdown129); + ruleArg(); + + state._fsp--; + if (state.failed) return ; + + } + break; + case 7 : + // CollectSymbols.g:102:7: ruleReturns + { + pushFollow(FOLLOW_ruleReturns_in_topdown137); + ruleReturns(); + + state._fsp--; + if (state.failed) return ; + + } + break; + case 8 : + // CollectSymbols.g:103:7: ruleNamedAction + { + pushFollow(FOLLOW_ruleNamedAction_in_topdown145); + ruleNamedAction(); + + state._fsp--; + if (state.failed) return ; + + } + break; + case 9 : + // CollectSymbols.g:104:7: ruleScopeSpec + { + pushFollow(FOLLOW_ruleScopeSpec_in_topdown153); + ruleScopeSpec(); + + state._fsp--; + if (state.failed) return ; + + } + break; + case 10 : + // CollectSymbols.g:105:7: ruleref + { + pushFollow(FOLLOW_ruleref_in_topdown161); + ruleref(); + + state._fsp--; + if (state.failed) return ; + + } + break; + case 11 : + // CollectSymbols.g:106:7: rewriteElement + { + pushFollow(FOLLOW_rewriteElement_in_topdown169); + rewriteElement(); + + state._fsp--; + if (state.failed) return ; + + } + break; + case 12 : + // CollectSymbols.g:108:7: terminal + { + pushFollow(FOLLOW_terminal_in_topdown190); + terminal(); + + state._fsp--; + if (state.failed) return ; + + } + break; + case 13 : + // CollectSymbols.g:109:7: labeledElement + { + pushFollow(FOLLOW_labeledElement_in_topdown198); + labeledElement(); + + state._fsp--; + if (state.failed) return ; + + } + break; + case 14 : + // CollectSymbols.g:110:7: setAlt + { + pushFollow(FOLLOW_setAlt_in_topdown206); + setAlt(); + + state._fsp--; + if (state.failed) return ; + + } + break; + case 15 : + // CollectSymbols.g:111:7: ruleAction + { + pushFollow(FOLLOW_ruleAction_in_topdown214); + ruleAction(); + + state._fsp--; + if (state.failed) return ; + + } + break; + case 16 : + // CollectSymbols.g:112:9: sempred + { + pushFollow(FOLLOW_sempred_in_topdown224); + sempred(); + + state._fsp--; + if (state.failed) return ; + + } + break; + case 17 : + // CollectSymbols.g:113:7: finallyClause + { + pushFollow(FOLLOW_finallyClause_in_topdown232); + finallyClause(); + + state._fsp--; + if (state.failed) return ; + + } + break; + case 18 : + // CollectSymbols.g:114:7: exceptionHandler + { + pushFollow(FOLLOW_exceptionHandler_in_topdown240); + exceptionHandler(); + + state._fsp--; + if (state.failed) return ; + + } + break; + + } + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + } + finally { + } + return ; + } + // $ANTLR end "topdown" + + + // $ANTLR start "bottomup" + // CollectSymbols.g:117:1: bottomup : finishRule ; + public final void bottomup() throws RecognitionException { + try { + // CollectSymbols.g:118:2: ( finishRule ) + // CollectSymbols.g:118:4: finishRule + { + pushFollow(FOLLOW_finishRule_in_bottomup251); + finishRule(); + + state._fsp--; + if (state.failed) return ; + + } + + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + } + finally { + } + return ; + } + // $ANTLR end "bottomup" + + + // $ANTLR start "globalScope" + // CollectSymbols.g:121:1: globalScope : {...}? ^( SCOPE ID ACTION ) ; + public final void globalScope() throws RecognitionException { + GrammarAST ACTION1=null; + GrammarAST ID2=null; + + try { + // CollectSymbols.g:122:2: ({...}? ^( SCOPE ID ACTION ) ) + // CollectSymbols.g:122:4: {...}? ^( SCOPE ID ACTION ) + { + if ( !((inContext("GRAMMAR"))) ) { + if (state.backtracking>0) {state.failed=true; return ;} + throw new FailedPredicateException(input, "globalScope", "inContext(\"GRAMMAR\")"); + } + match(input,SCOPE,FOLLOW_SCOPE_in_globalScope265); if (state.failed) return ; + + match(input, Token.DOWN, null); if (state.failed) return ; + ID2=(GrammarAST)match(input,ID,FOLLOW_ID_in_globalScope267); if (state.failed) return ; + ACTION1=(GrammarAST)match(input,ACTION,FOLLOW_ACTION_in_globalScope269); if (state.failed) return ; + + match(input, Token.UP, null); if (state.failed) return ; + if ( state.backtracking==1 ) { + + AttributeDict s = ScopeParser.parseDynamicScope((ACTION1!=null?ACTION1.getText():null)); + s.type = AttributeDict.DictType.GLOBAL_SCOPE; + s.name = (ID2!=null?ID2.getText():null); + s.ast = ACTION1; + scopes.add(s); + + } + + } + + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + } + finally { + } + return ; + } + // $ANTLR end "globalScope" + + + // $ANTLR start "globalNamedAction" + // CollectSymbols.g:132:1: globalNamedAction : {...}? ^( AT ( ID )? ID ACTION ) ; + public final void globalNamedAction() throws RecognitionException { + GrammarAST AT3=null; + GrammarAST ACTION4=null; + + try { + // CollectSymbols.g:133:2: ({...}? ^( AT ( ID )? ID ACTION ) ) + // CollectSymbols.g:133:4: {...}? ^( AT ( ID )? ID ACTION ) + { + if ( !((inContext("GRAMMAR"))) ) { + if (state.backtracking>0) {state.failed=true; return ;} + throw new FailedPredicateException(input, "globalNamedAction", "inContext(\"GRAMMAR\")"); + } + AT3=(GrammarAST)match(input,AT,FOLLOW_AT_in_globalNamedAction288); if (state.failed) return ; + + match(input, Token.DOWN, null); if (state.failed) return ; + // CollectSymbols.g:133:33: ( ID )? + int alt2=2; + int LA2_0 = input.LA(1); + + if ( (LA2_0==ID) ) { + int LA2_1 = input.LA(2); + + if ( (LA2_1==ID) ) { + alt2=1; + } + } + switch (alt2) { + case 1 : + // CollectSymbols.g:133:33: ID + { + match(input,ID,FOLLOW_ID_in_globalNamedAction290); if (state.failed) return ; + + } + break; + + } + + match(input,ID,FOLLOW_ID_in_globalNamedAction293); if (state.failed) return ; + ACTION4=(GrammarAST)match(input,ACTION,FOLLOW_ACTION_in_globalNamedAction295); if (state.failed) return ; + + match(input, Token.UP, null); if (state.failed) return ; + if ( state.backtracking==1 ) { + actions.add(AT3); ((ActionAST)ACTION4).resolver = g; + } + + } + + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + } + finally { + } + return ; + } + // $ANTLR end "globalNamedAction" + + + // $ANTLR start "tokensSection" + // CollectSymbols.g:137:1: tokensSection : {...}? ( ^( ASSIGN t= ID STRING_LITERAL ) | t= ID ) ; + public final void tokensSection() throws RecognitionException { + GrammarAST t=null; + GrammarAST ASSIGN5=null; + GrammarAST STRING_LITERAL6=null; + + try { + // CollectSymbols.g:138:2: ({...}? ( ^( ASSIGN t= ID STRING_LITERAL ) | t= ID ) ) + // CollectSymbols.g:138:4: {...}? ( ^( ASSIGN t= ID STRING_LITERAL ) | t= ID ) + { + if ( !((inContext("TOKENS"))) ) { + if (state.backtracking>0) {state.failed=true; return ;} + throw new FailedPredicateException(input, "tokensSection", "inContext(\"TOKENS\")"); + } + // CollectSymbols.g:139:3: ( ^( ASSIGN t= ID STRING_LITERAL ) | t= ID ) + int alt3=2; + int LA3_0 = input.LA(1); + + if ( (LA3_0==ASSIGN) ) { + alt3=1; + } + else if ( (LA3_0==ID) ) { + alt3=2; + } + else { + if (state.backtracking>0) {state.failed=true; return ;} + NoViableAltException nvae = + new NoViableAltException("", 3, 0, input); + + throw nvae; + } + switch (alt3) { + case 1 : + // CollectSymbols.g:139:5: ^( ASSIGN t= ID STRING_LITERAL ) + { + ASSIGN5=(GrammarAST)match(input,ASSIGN,FOLLOW_ASSIGN_in_tokensSection318); if (state.failed) return ; + + match(input, Token.DOWN, null); if (state.failed) return ; + t=(GrammarAST)match(input,ID,FOLLOW_ID_in_tokensSection322); if (state.failed) return ; + STRING_LITERAL6=(GrammarAST)match(input,STRING_LITERAL,FOLLOW_STRING_LITERAL_in_tokensSection324); if (state.failed) return ; + + match(input, Token.UP, null); if (state.failed) return ; + if ( state.backtracking==1 ) { + terminals.add(t); tokenIDRefs.add(t); + tokensDefs.add(ASSIGN5); strings.add((STRING_LITERAL6!=null?STRING_LITERAL6.getText():null)); + } + + } + break; + case 2 : + // CollectSymbols.g:142:5: t= ID + { + t=(GrammarAST)match(input,ID,FOLLOW_ID_in_tokensSection338); if (state.failed) return ; + if ( state.backtracking==1 ) { + terminals.add(t); tokenIDRefs.add(t); tokensDefs.add(t); + } + + } + break; + + } + + + } + + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + } + finally { + } + return ; + } + // $ANTLR end "tokensSection" + + + // $ANTLR start "mode" + // CollectSymbols.g:147:1: mode : ^( MODE ID ( . )+ ) ; + public final void mode() throws RecognitionException { + GrammarAST ID7=null; + + try { + // CollectSymbols.g:147:5: ( ^( MODE ID ( . )+ ) ) + // CollectSymbols.g:147:7: ^( MODE ID ( . )+ ) + { + match(input,MODE,FOLLOW_MODE_in_mode357); if (state.failed) return ; + + match(input, Token.DOWN, null); if (state.failed) return ; + ID7=(GrammarAST)match(input,ID,FOLLOW_ID_in_mode359); if (state.failed) return ; + // CollectSymbols.g:147:17: ( . )+ + int cnt4=0; + loop4: + do { + int alt4=2; + int LA4_0 = input.LA(1); + + if ( ((LA4_0>=SEMPRED && LA4_0<=ALT_REWRITE)) ) { + alt4=1; + } + else if ( (LA4_0==UP) ) { + alt4=2; + } + + + switch (alt4) { + case 1 : + // CollectSymbols.g:147:17: . + { + matchAny(input); if (state.failed) return ; + + } + break; + + default : + if ( cnt4 >= 1 ) break loop4; + if (state.backtracking>0) {state.failed=true; return ;} + EarlyExitException eee = + new EarlyExitException(4, input); + throw eee; + } + cnt4++; + } while (true); + + + match(input, Token.UP, null); if (state.failed) return ; + if ( state.backtracking==1 ) { + currentMode = (ID7!=null?ID7.getText():null); + } + + } + + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + } + finally { + } + return ; + } + // $ANTLR end "mode" + + + // $ANTLR start "rule" + // CollectSymbols.g:149:1: rule : ^( RULE name= ID ( options {greedy=false; } : . )* ( ^( RULEMODIFIERS (m= . )+ ) )? ( ^( AT ID ACTION ) )* ^( BLOCK ( . )+ ) ( . )* ) ; + public final void rule() throws RecognitionException { + GrammarAST name=null; + GrammarAST RULE8=null; + GrammarAST m=null; + + List modifiers = new ArrayList(); + try { + // CollectSymbols.g:151:2: ( ^( RULE name= ID ( options {greedy=false; } : . )* ( ^( RULEMODIFIERS (m= . )+ ) )? ( ^( AT ID ACTION ) )* ^( BLOCK ( . )+ ) ( . )* ) ) + // CollectSymbols.g:151:6: ^( RULE name= ID ( options {greedy=false; } : . )* ( ^( RULEMODIFIERS (m= . )+ ) )? ( ^( AT ID ACTION ) )* ^( BLOCK ( . )+ ) ( . )* ) + { + RULE8=(GrammarAST)match(input,RULE,FOLLOW_RULE_in_rule384); if (state.failed) return ; + + match(input, Token.DOWN, null); if (state.failed) return ; + name=(GrammarAST)match(input,ID,FOLLOW_ID_in_rule396); if (state.failed) return ; + // CollectSymbols.g:152:17: ( options {greedy=false; } : . )* + loop5: + do { + int alt5=2; + alt5 = dfa5.predict(input); + switch (alt5) { + case 1 : + // CollectSymbols.g:152:42: . + { + matchAny(input); if (state.failed) return ; + + } + break; + + default : + break loop5; + } + } while (true); + + // CollectSymbols.g:153:9: ( ^( RULEMODIFIERS (m= . )+ ) )? + int alt7=2; + int LA7_0 = input.LA(1); + + if ( (LA7_0==RULEMODIFIERS) ) { + alt7=1; + } + switch (alt7) { + case 1 : + // CollectSymbols.g:153:10: ^( RULEMODIFIERS (m= . )+ ) + { + match(input,RULEMODIFIERS,FOLLOW_RULEMODIFIERS_in_rule420); if (state.failed) return ; + + match(input, Token.DOWN, null); if (state.failed) return ; + // CollectSymbols.g:153:26: (m= . )+ + int cnt6=0; + loop6: + do { + int alt6=2; + int LA6_0 = input.LA(1); + + if ( ((LA6_0>=SEMPRED && LA6_0<=ALT_REWRITE)) ) { + alt6=1; + } + + + switch (alt6) { + case 1 : + // CollectSymbols.g:153:27: m= . + { + m=(GrammarAST)input.LT(1); + matchAny(input); if (state.failed) return ; + if ( state.backtracking==1 ) { + modifiers.add(m); + } + + } + break; + + default : + if ( cnt6 >= 1 ) break loop6; + if (state.backtracking>0) {state.failed=true; return ;} + EarlyExitException eee = + new EarlyExitException(6, input); + throw eee; + } + cnt6++; + } while (true); + + + match(input, Token.UP, null); if (state.failed) return ; + + } + break; + + } + + // CollectSymbols.g:154:9: ( ^( AT ID ACTION ) )* + loop8: + do { + int alt8=2; + int LA8_0 = input.LA(1); + + if ( (LA8_0==AT) ) { + alt8=1; + } + + + switch (alt8) { + case 1 : + // CollectSymbols.g:154:10: ^( AT ID ACTION ) + { + match(input,AT,FOLLOW_AT_in_rule444); if (state.failed) return ; + + match(input, Token.DOWN, null); if (state.failed) return ; + match(input,ID,FOLLOW_ID_in_rule446); if (state.failed) return ; + match(input,ACTION,FOLLOW_ACTION_in_rule448); if (state.failed) return ; + + match(input, Token.UP, null); if (state.failed) return ; + + } + break; + + default : + break loop8; + } + } while (true); + + match(input,BLOCK,FOLLOW_BLOCK_in_rule462); if (state.failed) return ; + + match(input, Token.DOWN, null); if (state.failed) return ; + // CollectSymbols.g:155:17: ( . )+ + int cnt9=0; + loop9: + do { + int alt9=2; + int LA9_0 = input.LA(1); + + if ( ((LA9_0>=SEMPRED && LA9_0<=ALT_REWRITE)) ) { + alt9=1; + } + else if ( (LA9_0==UP) ) { + alt9=2; + } + + + switch (alt9) { + case 1 : + // CollectSymbols.g:155:17: . + { + matchAny(input); if (state.failed) return ; + + } + break; + + default : + if ( cnt9 >= 1 ) break loop9; + if (state.backtracking>0) {state.failed=true; return ;} + EarlyExitException eee = + new EarlyExitException(9, input); + throw eee; + } + cnt9++; + } while (true); + + + match(input, Token.UP, null); if (state.failed) return ; + // CollectSymbols.g:156:9: ( . )* + loop10: + do { + int alt10=2; + int LA10_0 = input.LA(1); + + if ( ((LA10_0>=SEMPRED && LA10_0<=ALT_REWRITE)) ) { + alt10=1; + } + else if ( (LA10_0==UP) ) { + alt10=2; + } + + + switch (alt10) { + case 1 : + // CollectSymbols.g:156:9: . + { + matchAny(input); if (state.failed) return ; + + } + break; + + default : + break loop10; + } + } while (true); + + + match(input, Token.UP, null); if (state.failed) return ; + if ( state.backtracking==1 ) { + + int numAlts = RULE8.getFirstChildWithType(BLOCK).getChildCount(); + Rule r = new Rule(g, (name!=null?name.getText():null), (RuleAST)RULE8, numAlts); + if ( g.isLexer() ) r.mode = currentMode; + if ( modifiers.size()>0 ) r.modifiers = modifiers; + rules.add(r); + currentRule = r; + currentAlt = 1; + + } + + } + + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + } + finally { + } + return ; + } + // $ANTLR end "rule" + + public static class setAlt_return extends TreeRuleReturnScope { + }; + + // $ANTLR start "setAlt" + // CollectSymbols.g:169:1: setAlt : {...}? ( ALT | ALT_REWRITE ) ; + public final CollectSymbols.setAlt_return setAlt() throws RecognitionException { + CollectSymbols.setAlt_return retval = new CollectSymbols.setAlt_return(); + retval.start = input.LT(1); + + try { + // CollectSymbols.g:170:2: ({...}? ( ALT | ALT_REWRITE ) ) + // CollectSymbols.g:170:4: {...}? ( ALT | ALT_REWRITE ) + { + if ( !((inContext("RULE BLOCK"))) ) { + if (state.backtracking>0) {state.failed=true; return retval;} + throw new FailedPredicateException(input, "setAlt", "inContext(\"RULE BLOCK\")"); + } + if ( input.LA(1)==ALT||input.LA(1)==ALT_REWRITE ) { + input.consume(); + state.errorRecovery=false;state.failed=false; + } + else { + if (state.backtracking>0) {state.failed=true; return retval;} + MismatchedSetException mse = new MismatchedSetException(null,input); + throw mse; + } + + if ( state.backtracking==1 ) { + + currentAlt = ((GrammarAST)retval.start).getChildIndex()+1; + currentRule.alt[currentAlt].ast = (AltAST)((GrammarAST)retval.start); + + } + + } + + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + } + finally { + } + return retval; + } + // $ANTLR end "setAlt" + + + // $ANTLR start "finishRule" + // CollectSymbols.g:177:1: finishRule : RULE ; + public final void finishRule() throws RecognitionException { + try { + // CollectSymbols.g:178:2: ( RULE ) + // CollectSymbols.g:178:4: RULE + { + match(input,RULE,FOLLOW_RULE_in_finishRule528); if (state.failed) return ; + if ( state.backtracking==1 ) { + currentRule = null; + } + + } + + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + } + finally { + } + return ; + } + // $ANTLR end "finishRule" + + + // $ANTLR start "ruleNamedAction" + // CollectSymbols.g:181:1: ruleNamedAction : {...}? ^( AT ID ACTION ) ; + public final void ruleNamedAction() throws RecognitionException { + GrammarAST ID9=null; + GrammarAST ACTION10=null; + + try { + // CollectSymbols.g:182:2: ({...}? ^( AT ID ACTION ) ) + // CollectSymbols.g:182:4: {...}? ^( AT ID ACTION ) + { + if ( !((inContext("RULE"))) ) { + if (state.backtracking>0) {state.failed=true; return ;} + throw new FailedPredicateException(input, "ruleNamedAction", "inContext(\"RULE\")"); + } + match(input,AT,FOLLOW_AT_in_ruleNamedAction544); if (state.failed) return ; + + match(input, Token.DOWN, null); if (state.failed) return ; + ID9=(GrammarAST)match(input,ID,FOLLOW_ID_in_ruleNamedAction546); if (state.failed) return ; + ACTION10=(GrammarAST)match(input,ACTION,FOLLOW_ACTION_in_ruleNamedAction548); if (state.failed) return ; + + match(input, Token.UP, null); if (state.failed) return ; + if ( state.backtracking==1 ) { + + currentRule.namedActions.put((ID9!=null?ID9.getText():null),(ActionAST)ACTION10); + ((ActionAST)ACTION10).resolver = currentRule; + + } + + } + + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + } + finally { + } + return ; + } + // $ANTLR end "ruleNamedAction" + + + // $ANTLR start "ruleAction" + // CollectSymbols.g:189:1: ruleAction : ({...}? ACTION | FORCED_ACTION ); + public final void ruleAction() throws RecognitionException { + GrammarAST ACTION11=null; + GrammarAST FORCED_ACTION12=null; + + try { + // CollectSymbols.g:190:2: ({...}? ACTION | FORCED_ACTION ) + int alt11=2; + int LA11_0 = input.LA(1); + + if ( (LA11_0==ACTION) ) { + alt11=1; + } + else if ( (LA11_0==FORCED_ACTION) ) { + alt11=2; + } + else { + if (state.backtracking>0) {state.failed=true; return ;} + NoViableAltException nvae = + new NoViableAltException("", 11, 0, input); + + throw nvae; + } + switch (alt11) { + case 1 : + // CollectSymbols.g:190:4: {...}? ACTION + { + if ( !((inContext("RULE ...")&&!inContext("SCOPE")&& + !inContext("CATCH")&&!inContext("FINALLY")&&!inContext("AT"))) ) { + if (state.backtracking>0) {state.failed=true; return ;} + throw new FailedPredicateException(input, "ruleAction", "inContext(\"RULE ...\")&&!inContext(\"SCOPE\")&&\n\t\t !inContext(\"CATCH\")&&!inContext(\"FINALLY\")&&!inContext(\"AT\")"); + } + ACTION11=(GrammarAST)match(input,ACTION,FOLLOW_ACTION_in_ruleAction568); if (state.failed) return ; + if ( state.backtracking==1 ) { + + currentRule.defineActionInAlt(currentAlt, (ActionAST)ACTION11); + ((ActionAST)ACTION11).resolver = currentRule.alt[currentAlt]; + + } + + } + break; + case 2 : + // CollectSymbols.g:197:9: FORCED_ACTION + { + FORCED_ACTION12=(GrammarAST)match(input,FORCED_ACTION,FOLLOW_FORCED_ACTION_in_ruleAction582); if (state.failed) return ; + if ( state.backtracking==1 ) { + + currentRule.defineActionInAlt(currentAlt, (ActionAST)FORCED_ACTION12); + ((ActionAST)FORCED_ACTION12).resolver = currentRule.alt[currentAlt]; + + } + + } + break; + + } + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + } + finally { + } + return ; + } + // $ANTLR end "ruleAction" + + + // $ANTLR start "sempred" + // CollectSymbols.g:204:1: sempred : {...}? SEMPRED ; + public final void sempred() throws RecognitionException { + GrammarAST SEMPRED13=null; + + try { + // CollectSymbols.g:205:2: ({...}? SEMPRED ) + // CollectSymbols.g:205:4: {...}? SEMPRED + { + if ( !((inContext("RULE ..."))) ) { + if (state.backtracking>0) {state.failed=true; return ;} + throw new FailedPredicateException(input, "sempred", "inContext(\"RULE ...\")"); + } + SEMPRED13=(GrammarAST)match(input,SEMPRED,FOLLOW_SEMPRED_in_sempred601); if (state.failed) return ; + if ( state.backtracking==1 ) { + + currentRule.definePredicateInAlt(currentAlt, (PredAST)SEMPRED13); + ((PredAST)SEMPRED13).resolver = currentRule.alt[currentAlt]; + + } + + } + + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + } + finally { + } + return ; + } + // $ANTLR end "sempred" + + + // $ANTLR start "exceptionHandler" + // CollectSymbols.g:213:1: exceptionHandler : ^( CATCH ARG_ACTION ACTION ) ; + public final void exceptionHandler() throws RecognitionException { + GrammarAST ACTION14=null; + + try { + // CollectSymbols.g:214:2: ( ^( CATCH ARG_ACTION ACTION ) ) + // CollectSymbols.g:214:4: ^( CATCH ARG_ACTION ACTION ) + { + match(input,CATCH,FOLLOW_CATCH_in_exceptionHandler617); if (state.failed) return ; + + match(input, Token.DOWN, null); if (state.failed) return ; + match(input,ARG_ACTION,FOLLOW_ARG_ACTION_in_exceptionHandler619); if (state.failed) return ; + ACTION14=(GrammarAST)match(input,ACTION,FOLLOW_ACTION_in_exceptionHandler621); if (state.failed) return ; + + match(input, Token.UP, null); if (state.failed) return ; + if ( state.backtracking==1 ) { + + currentRule.exceptionActions.add((ActionAST)ACTION14); + ((ActionAST)ACTION14).resolver = currentRule; + + } + + } + + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + } + finally { + } + return ; + } + // $ANTLR end "exceptionHandler" + + + // $ANTLR start "finallyClause" + // CollectSymbols.g:221:1: finallyClause : ^( FINALLY ACTION ) ; + public final void finallyClause() throws RecognitionException { + GrammarAST ACTION15=null; + + try { + // CollectSymbols.g:222:2: ( ^( FINALLY ACTION ) ) + // CollectSymbols.g:222:4: ^( FINALLY ACTION ) + { + match(input,FINALLY,FOLLOW_FINALLY_in_finallyClause638); if (state.failed) return ; + + match(input, Token.DOWN, null); if (state.failed) return ; + ACTION15=(GrammarAST)match(input,ACTION,FOLLOW_ACTION_in_finallyClause640); if (state.failed) return ; + + match(input, Token.UP, null); if (state.failed) return ; + if ( state.backtracking==1 ) { + + currentRule.finallyAction = (ActionAST)ACTION15; + ((ActionAST)ACTION15).resolver = currentRule; + + } + + } + + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + } + finally { + } + return ; + } + // $ANTLR end "finallyClause" + + + // $ANTLR start "ruleArg" + // CollectSymbols.g:229:1: ruleArg : {...}? ARG_ACTION ; + public final void ruleArg() throws RecognitionException { + GrammarAST ARG_ACTION16=null; + + try { + // CollectSymbols.g:230:2: ({...}? ARG_ACTION ) + // CollectSymbols.g:230:4: {...}? ARG_ACTION + { + if ( !((inContext("RULE"))) ) { + if (state.backtracking>0) {state.failed=true; return ;} + throw new FailedPredicateException(input, "ruleArg", "inContext(\"RULE\")"); + } + ARG_ACTION16=(GrammarAST)match(input,ARG_ACTION,FOLLOW_ARG_ACTION_in_ruleArg658); if (state.failed) return ; + if ( state.backtracking==1 ) { + + currentRule.args = ScopeParser.parseTypeList((ARG_ACTION16!=null?ARG_ACTION16.getText():null)); + currentRule.args.type = AttributeDict.DictType.ARG; + currentRule.args.ast = ARG_ACTION16; + + } + + } + + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + } + finally { + } + return ; + } + // $ANTLR end "ruleArg" + + + // $ANTLR start "ruleReturns" + // CollectSymbols.g:238:1: ruleReturns : ^( RETURNS ARG_ACTION ) ; + public final void ruleReturns() throws RecognitionException { + GrammarAST ARG_ACTION17=null; + + try { + // CollectSymbols.g:239:2: ( ^( RETURNS ARG_ACTION ) ) + // CollectSymbols.g:239:4: ^( RETURNS ARG_ACTION ) + { + match(input,RETURNS,FOLLOW_RETURNS_in_ruleReturns674); if (state.failed) return ; + + match(input, Token.DOWN, null); if (state.failed) return ; + ARG_ACTION17=(GrammarAST)match(input,ARG_ACTION,FOLLOW_ARG_ACTION_in_ruleReturns676); if (state.failed) return ; + + match(input, Token.UP, null); if (state.failed) return ; + if ( state.backtracking==1 ) { + + currentRule.retvals = ScopeParser.parseTypeList((ARG_ACTION17!=null?ARG_ACTION17.getText():null)); + currentRule.retvals.type = AttributeDict.DictType.RET; + currentRule.retvals.ast = ARG_ACTION17; + + } + + } + + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + } + finally { + } + return ; + } + // $ANTLR end "ruleReturns" + + + // $ANTLR start "ruleScopeSpec" + // CollectSymbols.g:247:1: ruleScopeSpec : {...}? ( ^( SCOPE ACTION ) | ^( SCOPE (ids+= ID )+ ) ) ; + public final void ruleScopeSpec() throws RecognitionException { + GrammarAST ACTION18=null; + GrammarAST ids=null; + List list_ids=null; + + try { + // CollectSymbols.g:248:2: ({...}? ( ^( SCOPE ACTION ) | ^( SCOPE (ids+= ID )+ ) ) ) + // CollectSymbols.g:248:4: {...}? ( ^( SCOPE ACTION ) | ^( SCOPE (ids+= ID )+ ) ) + { + if ( !((inContext("RULE"))) ) { + if (state.backtracking>0) {state.failed=true; return ;} + throw new FailedPredicateException(input, "ruleScopeSpec", "inContext(\"RULE\")"); + } + // CollectSymbols.g:249:3: ( ^( SCOPE ACTION ) | ^( SCOPE (ids+= ID )+ ) ) + int alt13=2; + int LA13_0 = input.LA(1); + + if ( (LA13_0==SCOPE) ) { + int LA13_1 = input.LA(2); + + if ( (LA13_1==DOWN) ) { + int LA13_2 = input.LA(3); + + if ( (LA13_2==ACTION) ) { + alt13=1; + } + else if ( (LA13_2==ID) ) { + alt13=2; + } + else { + if (state.backtracking>0) {state.failed=true; return ;} + NoViableAltException nvae = + new NoViableAltException("", 13, 2, input); + + throw nvae; + } + } + else { + if (state.backtracking>0) {state.failed=true; return ;} + NoViableAltException nvae = + new NoViableAltException("", 13, 1, input); + + throw nvae; + } + } + else { + if (state.backtracking>0) {state.failed=true; return ;} + NoViableAltException nvae = + new NoViableAltException("", 13, 0, input); + + throw nvae; + } + switch (alt13) { + case 1 : + // CollectSymbols.g:249:5: ^( SCOPE ACTION ) + { + match(input,SCOPE,FOLLOW_SCOPE_in_ruleScopeSpec699); if (state.failed) return ; + + match(input, Token.DOWN, null); if (state.failed) return ; + ACTION18=(GrammarAST)match(input,ACTION,FOLLOW_ACTION_in_ruleScopeSpec701); if (state.failed) return ; + + match(input, Token.UP, null); if (state.failed) return ; + if ( state.backtracking==1 ) { + + currentRule.scope = ScopeParser.parseDynamicScope((ACTION18!=null?ACTION18.getText():null)); + currentRule.scope.type = AttributeDict.DictType.RULE_SCOPE; + currentRule.scope.name = currentRule.name; + currentRule.scope.ast = ACTION18; + + } + + } + break; + case 2 : + // CollectSymbols.g:256:5: ^( SCOPE (ids+= ID )+ ) + { + match(input,SCOPE,FOLLOW_SCOPE_in_ruleScopeSpec714); if (state.failed) return ; + + match(input, Token.DOWN, null); if (state.failed) return ; + // CollectSymbols.g:256:16: (ids+= ID )+ + int cnt12=0; + loop12: + do { + int alt12=2; + int LA12_0 = input.LA(1); + + if ( (LA12_0==ID) ) { + alt12=1; + } + + + switch (alt12) { + case 1 : + // CollectSymbols.g:256:16: ids+= ID + { + ids=(GrammarAST)match(input,ID,FOLLOW_ID_in_ruleScopeSpec718); if (state.failed) return ; + if (list_ids==null) list_ids=new ArrayList(); + list_ids.add(ids); + + + } + break; + + default : + if ( cnt12 >= 1 ) break loop12; + if (state.backtracking>0) {state.failed=true; return ;} + EarlyExitException eee = + new EarlyExitException(12, input); + throw eee; + } + cnt12++; + } while (true); + + + match(input, Token.UP, null); if (state.failed) return ; + if ( state.backtracking==1 ) { + currentRule.useScopes = list_ids; + } + + } + break; + + } + + + } + + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + } + finally { + } + return ; + } + // $ANTLR end "ruleScopeSpec" + + public static class rewriteElement_return extends TreeRuleReturnScope { + }; + + // $ANTLR start "rewriteElement" + // CollectSymbols.g:260:1: rewriteElement : {...}? ( TOKEN_REF | RULE_REF | STRING_LITERAL | LABEL ) ; + public final CollectSymbols.rewriteElement_return rewriteElement() throws RecognitionException { + CollectSymbols.rewriteElement_return retval = new CollectSymbols.rewriteElement_return(); + retval.start = input.LT(1); + + try { + // CollectSymbols.g:262:2: ({...}? ( TOKEN_REF | RULE_REF | STRING_LITERAL | LABEL ) ) + // CollectSymbols.g:263:6: {...}? ( TOKEN_REF | RULE_REF | STRING_LITERAL | LABEL ) + { + if ( !((inContext("RESULT ..."))) ) { + if (state.backtracking>0) {state.failed=true; return retval;} + throw new FailedPredicateException(input, "rewriteElement", "inContext(\"RESULT ...\")"); + } + if ( (input.LA(1)>=TOKEN_REF && input.LA(1)<=RULE_REF)||input.LA(1)==STRING_LITERAL||input.LA(1)==LABEL ) { + input.consume(); + state.errorRecovery=false;state.failed=false; + } + else { + if (state.backtracking>0) {state.failed=true; return retval;} + MismatchedSetException mse = new MismatchedSetException(null,input); + throw mse; + } + + if ( state.backtracking==1 ) { + currentRule.alt[currentAlt].rewriteElements.add(((GrammarAST)retval.start)); + } + + } + + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + } + finally { + } + return retval; + } + // $ANTLR end "rewriteElement" + + public static class labeledElement_return extends TreeRuleReturnScope { + }; + + // $ANTLR start "labeledElement" + // CollectSymbols.g:267:1: labeledElement : {...}? ( ^( ASSIGN id= ID e= . ) | ^( PLUS_ASSIGN id= ID e= . ) ) ; + public final CollectSymbols.labeledElement_return labeledElement() throws RecognitionException { + CollectSymbols.labeledElement_return retval = new CollectSymbols.labeledElement_return(); + retval.start = input.LT(1); + + GrammarAST id=null; + GrammarAST e=null; + + try { + // CollectSymbols.g:273:2: ({...}? ( ^( ASSIGN id= ID e= . ) | ^( PLUS_ASSIGN id= ID e= . ) ) ) + // CollectSymbols.g:273:4: {...}? ( ^( ASSIGN id= ID e= . ) | ^( PLUS_ASSIGN id= ID e= . ) ) + { + if ( !((inContext("RULE ..."))) ) { + if (state.backtracking>0) {state.failed=true; return retval;} + throw new FailedPredicateException(input, "labeledElement", "inContext(\"RULE ...\")"); + } + // CollectSymbols.g:274:3: ( ^( ASSIGN id= ID e= . ) | ^( PLUS_ASSIGN id= ID e= . ) ) + int alt14=2; + int LA14_0 = input.LA(1); + + if ( (LA14_0==ASSIGN) ) { + alt14=1; + } + else if ( (LA14_0==PLUS_ASSIGN) ) { + alt14=2; + } + else { + if (state.backtracking>0) {state.failed=true; return retval;} + NoViableAltException nvae = + new NoViableAltException("", 14, 0, input); + + throw nvae; + } + switch (alt14) { + case 1 : + // CollectSymbols.g:274:5: ^( ASSIGN id= ID e= . ) + { + match(input,ASSIGN,FOLLOW_ASSIGN_in_labeledElement780); if (state.failed) return retval; + + match(input, Token.DOWN, null); if (state.failed) return retval; + id=(GrammarAST)match(input,ID,FOLLOW_ID_in_labeledElement784); if (state.failed) return retval; + e=(GrammarAST)input.LT(1); + matchAny(input); if (state.failed) return retval; + + match(input, Token.UP, null); if (state.failed) return retval; + + } + break; + case 2 : + // CollectSymbols.g:275:5: ^( PLUS_ASSIGN id= ID e= . ) + { + match(input,PLUS_ASSIGN,FOLLOW_PLUS_ASSIGN_in_labeledElement796); if (state.failed) return retval; + + match(input, Token.DOWN, null); if (state.failed) return retval; + id=(GrammarAST)match(input,ID,FOLLOW_ID_in_labeledElement800); if (state.failed) return retval; + e=(GrammarAST)input.LT(1); + matchAny(input); if (state.failed) return retval; + + match(input, Token.UP, null); if (state.failed) return retval; + + } + break; + + } + + + } + + if ( state.backtracking==1 ) { + + LabelElementPair lp = new LabelElementPair(g, id, e, ((GrammarAST)retval.start).getType()); + //currentRule.labelDefs.map((id!=null?id.getText():null), lp); + currentRule.alt[currentAlt].labelDefs.map((id!=null?id.getText():null), lp); + + } + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + } + finally { + } + return retval; + } + // $ANTLR end "labeledElement" + + public static class terminal_return extends TreeRuleReturnScope { + }; + + // $ANTLR start "terminal" + // CollectSymbols.g:279:1: terminal : ({...}? STRING_LITERAL | TOKEN_REF ); + public final CollectSymbols.terminal_return terminal() throws RecognitionException { + CollectSymbols.terminal_return retval = new CollectSymbols.terminal_return(); + retval.start = input.LT(1); + + GrammarAST STRING_LITERAL19=null; + GrammarAST TOKEN_REF20=null; + + try { + // CollectSymbols.g:280:5: ({...}? STRING_LITERAL | TOKEN_REF ) + int alt15=2; + int LA15_0 = input.LA(1); + + if ( (LA15_0==STRING_LITERAL) ) { + alt15=1; + } + else if ( (LA15_0==TOKEN_REF) ) { + alt15=2; + } + else { + if (state.backtracking>0) {state.failed=true; return retval;} + NoViableAltException nvae = + new NoViableAltException("", 15, 0, input); + + throw nvae; + } + switch (alt15) { + case 1 : + // CollectSymbols.g:280:7: {...}? STRING_LITERAL + { + if ( !((!inContext("TOKENS ASSIGN"))) ) { + if (state.backtracking>0) {state.failed=true; return retval;} + throw new FailedPredicateException(input, "terminal", "!inContext(\"TOKENS ASSIGN\")"); + } + STRING_LITERAL19=(GrammarAST)match(input,STRING_LITERAL,FOLLOW_STRING_LITERAL_in_terminal825); if (state.failed) return retval; + if ( state.backtracking==1 ) { + + terminals.add(((GrammarAST)retval.start)); + strings.add((STRING_LITERAL19!=null?STRING_LITERAL19.getText():null)); + if ( currentRule!=null ) { + currentRule.alt[currentAlt].tokenRefs.map((STRING_LITERAL19!=null?STRING_LITERAL19.getText():null), (TerminalAST)STRING_LITERAL19); + } + + } + + } + break; + case 2 : + // CollectSymbols.g:288:7: TOKEN_REF + { + TOKEN_REF20=(GrammarAST)match(input,TOKEN_REF,FOLLOW_TOKEN_REF_in_terminal840); if (state.failed) return retval; + if ( state.backtracking==1 ) { + + terminals.add(TOKEN_REF20); + tokenIDRefs.add(TOKEN_REF20); + if ( currentRule!=null ) { + currentRule.alt[currentAlt].tokenRefs.map((TOKEN_REF20!=null?TOKEN_REF20.getText():null), (TerminalAST)TOKEN_REF20); + } + + } + + } + break; + + } + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + } + finally { + } + return retval; + } + // $ANTLR end "terminal" + + + // $ANTLR start "ruleref" + // CollectSymbols.g:298:1: ruleref : ({...}?r= RULE_REF | r= RULE_REF ) ; + public final void ruleref() throws RecognitionException { + GrammarAST r=null; + + try { + // CollectSymbols.g:300:5: ( ({...}?r= RULE_REF | r= RULE_REF ) ) + // CollectSymbols.g:300:7: ({...}?r= RULE_REF | r= RULE_REF ) + { + // CollectSymbols.g:300:7: ({...}?r= RULE_REF | r= RULE_REF ) + int alt16=2; + int LA16_0 = input.LA(1); + + if ( (LA16_0==RULE_REF) ) { + int LA16_1 = input.LA(2); + + if ( ((inContext("DOT ..."))) ) { + alt16=1; + } + else if ( (true) ) { + alt16=2; + } + else { + if (state.backtracking>0) {state.failed=true; return ;} + NoViableAltException nvae = + new NoViableAltException("", 16, 1, input); + + throw nvae; + } + } + else { + if (state.backtracking>0) {state.failed=true; return ;} + NoViableAltException nvae = + new NoViableAltException("", 16, 0, input); + + throw nvae; + } + switch (alt16) { + case 1 : + // CollectSymbols.g:300:9: {...}?r= RULE_REF + { + if ( !((inContext("DOT ..."))) ) { + if (state.backtracking>0) {state.failed=true; return ;} + throw new FailedPredicateException(input, "ruleref", "inContext(\"DOT ...\")"); + } + r=(GrammarAST)match(input,RULE_REF,FOLLOW_RULE_REF_in_ruleref877); if (state.failed) return ; + if ( state.backtracking==1 ) { + qualifiedRulerefs.add((GrammarAST)r.getParent()); + } + + } + break; + case 2 : + // CollectSymbols.g:302:8: r= RULE_REF + { + r=(GrammarAST)match(input,RULE_REF,FOLLOW_RULE_REF_in_ruleref890); if (state.failed) return ; + + } + break; + + } + + if ( state.backtracking==1 ) { + + rulerefs.add(r); + if ( currentRule!=null ) { + currentRule.alt[currentAlt].ruleRefs.map((r!=null?r.getText():null), r); + } + + } + + } + + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + } + finally { + } + return ; + } + // $ANTLR end "ruleref" + + // Delegated rules + + + protected DFA1 dfa1 = new DFA1(this); + protected DFA5 dfa5 = new DFA5(this); + static final String DFA1_eotS = + "\43\uffff"; + static final String DFA1_eofS = + "\43\uffff"; + static final String DFA1_minS = + "\1\4\3\2\5\uffff\3\0\7\uffff\1\20\2\127\2\uffff\1\3\1\uffff\1\20"+ + "\1\4\1\uffff\1\3\1\uffff\1\2\2\0\1\uffff"; + static final String DFA1_maxS = + "\1\146\3\2\5\uffff\3\0\7\uffff\3\127\2\uffff\1\127\1\uffff\1\127"+ + "\1\146\1\uffff\1\3\1\uffff\1\3\2\0\1\uffff"; + static final String DFA1_acceptS = + "\4\uffff\1\3\1\4\1\5\1\6\1\7\3\uffff\1\13\1\15\1\16\1\17\1\20\1"+ + "\21\1\22\3\uffff\1\12\1\14\1\uffff\1\11\2\uffff\1\1\1\uffff\1\2"+ + "\3\uffff\1\10"; + static final String DFA1_specialS = + "\11\uffff\1\4\1\0\1\2\24\uffff\1\3\1\1\1\uffff}>"; + static final String[] DFA1_transitionS = { + "\1\20\1\17\10\uffff\1\7\1\uffff\1\17\4\uffff\1\1\11\uffff\1"+ + "\10\1\uffff\1\22\1\21\1\uffff\1\5\11\uffff\1\3\4\uffff\1\15"+ + "\10\uffff\1\2\2\uffff\1\13\1\11\3\uffff\1\12\4\uffff\1\6\13"+ + "\uffff\1\16\1\uffff\1\4\5\uffff\1\14\10\uffff\1\16", + "\1\23", + "\1\24", + "\1\25", + "", + "", + "", + "", + "", + "\1\uffff", + "\1\uffff", + "\1\uffff", + "", + "", + "", + "", + "", + "", + "", + "\1\31\106\uffff\1\30", + "\1\32", + "\1\33", + "", + "", + "\1\31\14\uffff\1\34\106\uffff\1\31", + "", + "\1\35\106\uffff\1\36", + "\100\15\1\37\42\15", + "", + "\1\40", + "", + "\1\15\1\41", + "\1\uffff", + "\1\uffff", + "" + }; + + static final short[] DFA1_eot = DFA.unpackEncodedString(DFA1_eotS); + static final short[] DFA1_eof = DFA.unpackEncodedString(DFA1_eofS); + static final char[] DFA1_min = DFA.unpackEncodedStringToUnsignedChars(DFA1_minS); + static final char[] DFA1_max = DFA.unpackEncodedStringToUnsignedChars(DFA1_maxS); + static final short[] DFA1_accept = DFA.unpackEncodedString(DFA1_acceptS); + static final short[] DFA1_special = DFA.unpackEncodedString(DFA1_specialS); + static final short[][] DFA1_transition; + + static { + int numStates = DFA1_transitionS.length; + DFA1_transition = new short[numStates][]; + for (int i=0; i=0 ) return s; + break; + case 1 : + int LA1_33 = input.LA(1); + + + int index1_33 = input.index(); + input.rewind(); + s = -1; + if ( ((inContext("TOKENS"))) ) {s = 4;} + + else if ( ((inContext("RULE ..."))) ) {s = 13;} + + + input.seek(index1_33); + if ( s>=0 ) return s; + break; + case 2 : + int LA1_11 = input.LA(1); + + + int index1_11 = input.index(); + input.rewind(); + s = -1; + if ( ((inContext("RESULT ..."))) ) {s = 12;} + + else if ( (true) ) {s = 23;} + + + input.seek(index1_11); + if ( s>=0 ) return s; + break; + case 3 : + int LA1_32 = input.LA(1); + + + int index1_32 = input.index(); + input.rewind(); + s = -1; + if ( ((inContext("GRAMMAR"))) ) {s = 30;} + + else if ( ((inContext("RULE"))) ) {s = 34;} + + + input.seek(index1_32); + if ( s>=0 ) return s; + break; + case 4 : + int LA1_9 = input.LA(1); + + + int index1_9 = input.index(); + input.rewind(); + s = -1; + if ( (!(((inContext("RESULT ..."))))) ) {s = 22;} + + else if ( ((inContext("RESULT ..."))) ) {s = 12;} + + + input.seek(index1_9); + if ( s>=0 ) return s; + break; + } + if (state.backtracking>0) {state.failed=true; return -1;} + NoViableAltException nvae = + new NoViableAltException(getDescription(), 1, _s, input); + error(nvae); + throw nvae; + } + } + static final String DFA5_eotS = + "\41\uffff"; + static final String DFA5_eofS = + "\41\uffff"; + static final String DFA5_minS = + "\1\4\3\2\1\uffff\3\4\1\2\1\3\1\2\2\uffff\2\3\1\uffff\4\2\4\4\1\2"+ + "\2\3\1\2\3\3\1\uffff\1\3"; + static final String DFA5_maxS = + "\4\146\1\uffff\6\146\2\uffff\2\146\1\uffff\17\146\1\uffff\1\146"; + static final String DFA5_acceptS = + "\4\uffff\1\1\6\uffff\2\2\2\uffff\1\2\17\uffff\1\2\1\uffff"; + static final String DFA5_specialS = + "\41\uffff}>"; + static final String[] DFA5_transitionS = { + "\70\4\1\2\16\4\1\1\1\4\1\3\31\4", + "\1\5\1\uffff\143\4", + "\1\6\1\uffff\143\4", + "\1\7\1\uffff\143\4", + "", + "\143\10", + "\123\4\1\11\17\4", + "\143\12", + "\1\13\1\14\143\10", + "\15\4\1\15\126\4", + "\1\14\1\16\143\12", + "", + "", + "\1\17\143\4", + "\1\17\70\23\1\21\16\23\1\20\1\23\1\22\31\23", + "", + "\1\24\1\17\70\23\1\21\16\23\1\20\1\23\1\22\31\23", + "\1\25\1\17\70\23\1\21\16\23\1\20\1\23\1\22\31\23", + "\1\26\1\17\70\23\1\21\16\23\1\20\1\23\1\22\31\23", + "\1\27\1\17\70\23\1\21\16\23\1\20\1\23\1\22\31\23", + "\143\30", + "\123\32\1\31\17\32", + "\143\33", + "\143\32", + "\1\4\1\34\143\30", + "\1\36\14\32\1\35\126\32", + "\1\36\143\32", + "\1\4\1\37\143\33", + "\1\37\70\23\1\21\16\23\1\20\1\23\1\22\31\23", + "\1\40\143\32", + "\1\37\70\23\1\21\16\23\1\20\1\23\1\22\31\23", + "", + "\1\37\70\23\1\21\16\23\1\20\1\23\1\22\31\23" + }; + + static final short[] DFA5_eot = DFA.unpackEncodedString(DFA5_eotS); + static final short[] DFA5_eof = DFA.unpackEncodedString(DFA5_eofS); + static final char[] DFA5_min = DFA.unpackEncodedStringToUnsignedChars(DFA5_minS); + static final char[] DFA5_max = DFA.unpackEncodedStringToUnsignedChars(DFA5_maxS); + static final short[] DFA5_accept = DFA.unpackEncodedString(DFA5_acceptS); + static final short[] DFA5_special = DFA.unpackEncodedString(DFA5_specialS); + static final short[][] DFA5_transition; + + static { + int numStates = DFA5_transitionS.length; + DFA5_transition = new short[numStates][]; + for (int i=0; i0 ) return; + // hmm...we don't get missing arg errors and such if we bail out here + + // STORE RULES/ACTIONS/SCOPES IN GRAMMAR + for (Rule r : collector.rules) g.defineRule(r); + for (AttributeDict s : collector.scopes) g.defineScope(s); + for (GrammarAST a : collector.actions) g.defineAction(a); + + // LINK ALT NODES WITH Alternatives + for (Rule r : g.rules.values()) { + for (int i=1; i<=r.numberOfAlts; i++) { + r.alt[i].ast.alt = r.alt[i]; + } + } + + // CHECK RULE REFS NOW (that we've defined rules in grammar) + symcheck.checkRuleArgs(g, collector.rulerefs); + identifyStartRules(collector); + symcheck.checkForQualifiedRuleIssues(g, collector.qualifiedRulerefs); + + // don't continue if we got symbol errors + if ( g.tool.getNumErrors()>0 ) return; + + // CHECK ATTRIBUTE EXPRESSIONS FOR SEMANTIC VALIDITY + AttributeChecks.checkAllAttributeExpressions(g); + + // ASSIGN TOKEN TYPES + String vocab = g.getOption("tokenVocab"); + if ( vocab!=null ) { + TokenVocabParser vparser = new TokenVocabParser(g.tool, vocab); + Map tokens = vparser.load(); + System.out.println("tokens="+tokens); + for (String t : tokens.keySet()) { + if ( t.charAt(0)=='\'' ) g.defineStringLiteral(t, tokens.get(t)); + else g.defineTokenName(t, tokens.get(t)); + } + } + if ( g.isLexer() ) assignLexerTokenTypes(g, collector); + else assignTokenTypes(g, collector, symcheck); + + UseDefAnalyzer.checkRewriteElementsPresentOnLeftSide(g); + UseDefAnalyzer.trackTokenRuleRefsInActions(g); + } + + void identifyStartRules(CollectSymbols collector) { + for (GrammarAST ref : collector.rulerefs) { + String ruleName = ref.getText(); + Rule r = g.getRule(ruleName); + if ( r!=null ) r.isStartRule = false; + } + } + + void assignLexerTokenTypes(Grammar g, CollectSymbols collector) { + Grammar G = g.getOutermostGrammar(); // put in root, even if imported + for (GrammarAST def : collector.tokensDefs) { + if ( def.getType()== ANTLRParser.ID ) G.defineTokenName(def.getText()); + } + + // DEFINE TOKEN TYPES FOR NONFRAGMENT RULES + for (Rule r : g.rules.values()) { + if ( !r.isFragment() ) G.defineTokenName(r.name); + } + + // FOR ALL X : 'xxx'; RULES, DEFINE 'xxx' AS TYPE X + Map litAliases = Grammar.getStringLiteralAliasesFromLexerRules(g.ast); + if ( litAliases!=null ) { + for (String lit : litAliases.keySet()) { + G.defineTokenAlias(litAliases.get(lit), lit); + } + } + + } + + void assignTokenTypes(Grammar g, CollectSymbols collector, SymbolChecks symcheck) { + if ( g.implicitLexerOwner!=null ) { + // copy vocab from combined to implicit lexer + g.importVocab(g.implicitLexerOwner); + System.out.println("tokens="+g.tokenNameToTypeMap); + System.out.println("strings="+g.stringLiteralToTypeMap); + } + else { + Grammar G = g.getOutermostGrammar(); // put in root, even if imported + + // DEFINE tokens { X='x'; } ALIASES + for (GrammarAST alias : collector.tokensDefs) { + if ( alias.getType()== ANTLRParser.ASSIGN ) { + String name = alias.getChild(0).getText(); + String lit = alias.getChild(1).getText(); + G.defineTokenAlias(name, lit); + } + } + + // DEFINE TOKEN TYPES FOR X : 'x' ; RULES + Map litAliases = Grammar.getStringLiteralAliasesFromLexerRules(g.ast); + if ( litAliases!=null ) { + for (String lit : litAliases.keySet()) { + G.defineTokenAlias(litAliases.get(lit), lit); + } + } + + // DEFINE TOKEN TYPES FOR TOKEN REFS LIKE ID, INT + for (String id : symcheck.tokenIDs) { G.defineTokenName(id); } + + // DEFINE TOKEN TYPES FOR STRING LITERAL REFS LIKE 'while', ';' + for (String s : collector.strings) { G.defineStringLiteral(s); } +// System.out.println("tokens="+G.tokenNameToTypeMap); +// System.out.println("strings="+G.stringLiteralToTypeMap); + } + } +} diff --git a/tool/src/org/antlr/v4/semantics/SymbolChecks.java b/tool/src/org/antlr/v4/semantics/SymbolChecks.java new file mode 100644 index 000000000..742cfd53e --- /dev/null +++ b/tool/src/org/antlr/v4/semantics/SymbolChecks.java @@ -0,0 +1,316 @@ +package org.antlr.v4.semantics; + +import org.antlr.runtime.Token; +import org.antlr.v4.parse.ANTLRParser; +import org.antlr.v4.tool.*; + +import java.util.*; + +/** Check for symbol problems; no side-effects. Inefficient to walk rules + * and such multiple times, but I like isolating all error checking outside + * of code that actually defines symbols etc... + * + * Side-effect: strip away redef'd rules. + */ +public class SymbolChecks { + Grammar g; + CollectSymbols collector; + Map nameToRuleMap = new HashMap(); + Set tokenIDs = new HashSet(); + Set globalScopeNames = new HashSet(); + Map> actionScopeToActionNames = new HashMap>(); + public ErrorManager errMgr; + + public SymbolChecks(Grammar g, CollectSymbols collector) { + this.g = g; + this.collector = collector; + this.errMgr = g.tool.errMgr; + /* + System.out.println("rules="+collector.rules); + System.out.println("rulerefs="+collector.rulerefs); + System.out.println("tokenIDRefs="+collector.tokenIDRefs); + System.out.println("terminals="+collector.terminals); + System.out.println("strings="+collector.strings); + System.out.println("tokensDef="+collector.tokensDefs); + System.out.println("actions="+collector.actions); + System.out.println("scopes="+collector.scopes); + */ + } + + public void examine() { + // methods affect fields, but no side-effects outside this object + // So, call order sensitive + checkScopeRedefinitions(collector.scopes); // sets globalScopeNames + //checkForImportedRuleIssues(collector.qualifiedRulerefs); + checkForRuleConflicts(collector.rules); // sets nameToRuleMap + checkActionRedefinitions(collector.actions); // sets actionScopeToActionNames + checkTokenAliasRedefinitions(collector.tokensDefs); + //checkRuleArgs(collector.rulerefs); + checkForTokenConflicts(collector.tokenIDRefs); // sets tokenIDs + checkForLabelConflicts(collector.rules); + //checkRewriteElementsPresentOnLeftSide(collector.rules); // move to after token type assignment + } + + public void checkForRuleConflicts(List rules) { + if ( rules==null ) return; + for (Rule r : collector.rules) { + if ( nameToRuleMap.get(r.name)==null ) { + nameToRuleMap.put(r.name, r); + } + else { + GrammarAST idNode = (GrammarAST)r.ast.getChild(0); + errMgr.grammarError(ErrorType.RULE_REDEFINITION, + g.fileName, idNode.token, r.name); + } + if ( globalScopeNames.contains(r.name) ) { + GrammarAST idNode = (GrammarAST)r.ast.getChild(0); + errMgr.grammarError(ErrorType.SYMBOL_CONFLICTS_WITH_GLOBAL_SCOPE, + g.fileName, idNode.token, r.name); + } + } + } + + public void checkScopeRedefinitions(List dicts) { + if ( dicts ==null ) return; + for (int i=0; i< dicts.size(); i++) { + AttributeDict s = dicts.get(i); + //GrammarAST idNode = (GrammarAST)s.getChild(0); + if ( !globalScopeNames.contains(s.getName()) ) { + globalScopeNames.add(s.getName()); + } + else { + Token idNode = ((GrammarAST) s.ast.getParent().getChild(0)).token; + errMgr.grammarError(ErrorType.SCOPE_REDEFINITION, + g.fileName, idNode, s.getName()); + } + } + } + + public void checkTokenAliasRedefinitions(List aliases) { + if ( aliases==null ) return; + Map aliasTokenNames = new HashMap(); + for (int i=0; i< aliases.size(); i++) { + GrammarAST a = aliases.get(i); + GrammarAST idNode = a; + if ( a.getType()== ANTLRParser.ASSIGN ) { + idNode = (GrammarAST)a.getChild(0); + if ( g!=g.getOutermostGrammar() ) { + errMgr.grammarError(ErrorType.TOKEN_ALIAS_IN_DELEGATE, + g.fileName, idNode.token, idNode.getText(), g.name); + } + } + GrammarAST prev = aliasTokenNames.get(idNode.getText()); + if ( prev==null ) { + aliasTokenNames.put(idNode.getText(), a); + } + else { + GrammarAST value = (GrammarAST)prev.getChild(1); + String valueText = null; + if ( value!=null ) valueText = value.getText(); + errMgr.grammarError(ErrorType.TOKEN_ALIAS_REASSIGNMENT, + g.fileName, idNode.token, idNode.getText(), valueText); + } + } + } + + public void checkForTokenConflicts(List tokenIDRefs) { + for (GrammarAST a : tokenIDRefs) { + Token t = a.token; + String ID = t.getText(); + tokenIDs.add(ID); + if ( globalScopeNames.contains(t.getText()) ) { + errMgr.grammarError(ErrorType.SYMBOL_CONFLICTS_WITH_GLOBAL_SCOPE, + g.fileName, t, ID); + } + } + } + + public void checkActionRedefinitions(List actions) { + if ( actions==null ) return; + String scope = g.getDefaultActionScope(); + String name = null; + GrammarAST nameNode = null; + for (GrammarAST ampersandAST : actions) { + nameNode = (GrammarAST)ampersandAST.getChild(0); + if ( ampersandAST.getChildCount()==2 ) { + name = nameNode.getText(); + } + else { + scope = nameNode.getText(); + name = ampersandAST.getChild(1).getText(); + } + Set scopeActions = actionScopeToActionNames.get(scope); + if ( scopeActions==null ) { // init scope + scopeActions = new HashSet(); + actionScopeToActionNames.put(scope, scopeActions); + } + if ( !scopeActions.contains(name) ) { + scopeActions.add(name); + } + else { + errMgr.grammarError(ErrorType.ACTION_REDEFINITION, + g.fileName, nameNode.token, name); + } + } + } + + /** Make sure a label doesn't conflict with another symbol. + * Labels must not conflict with: rules, tokens, scope names, + * return values, parameters, and rule-scope dynamic attributes + * defined in surrounding rule. Also they must have same type + * for repeated defs. + */ + public void checkForLabelConflicts(List rules) { + for (Rule r : rules) { + checkForRuleArgumentAndReturnValueConflicts(r); + checkForRuleScopeAttributeConflict(r); + Map labelNameSpace = + new HashMap(); + for (int i=1; i<=r.numberOfAlts; i++) { + Alternative a = r.alt[i]; + for (List pairs : a.labelDefs.values() ) { + for (LabelElementPair p : pairs) { + checkForLabelConflict(r, p.label); + String name = p.label.getText(); + LabelElementPair prev = labelNameSpace.get(name); + if ( prev==null ) labelNameSpace.put(name, p); + else checkForTypeMismatch(prev, p); + } + } + } + } + } + + void checkForTypeMismatch(LabelElementPair prevLabelPair, + LabelElementPair labelPair) + { + // label already defined; if same type, no problem + if ( prevLabelPair.type != labelPair.type ) { + String typeMismatchExpr = labelPair.type+"!="+prevLabelPair.type; + errMgr.grammarError( + ErrorType.LABEL_TYPE_CONFLICT, + g.fileName, + labelPair.label.token, + labelPair.label.getText(), + typeMismatchExpr); + } + } + + public void checkForLabelConflict(Rule r, GrammarAST labelID) { + ErrorType etype = ErrorType.INVALID; + Object arg2 = null; + String name = labelID.getText(); + if ( globalScopeNames.contains(name) ) { + etype = ErrorType.SYMBOL_CONFLICTS_WITH_GLOBAL_SCOPE; + } + else if ( nameToRuleMap.containsKey(name) ) { + etype = ErrorType.LABEL_CONFLICTS_WITH_RULE; + } + else if ( tokenIDs.contains(name) ) { + etype = ErrorType.LABEL_CONFLICTS_WITH_TOKEN; + } + else if ( r.scope !=null && r.scope.get(name)!=null ) { + etype = ErrorType.LABEL_CONFLICTS_WITH_RULE_SCOPE_ATTRIBUTE; + arg2 = r.name; + } + else if ( (r.retvals!=null&&r.retvals.get(name)!=null) || + (r.args!=null&&r.args.get(name)!=null) ) + { + etype = ErrorType.LABEL_CONFLICTS_WITH_RULE_ARG_RETVAL; + arg2 = r.name; + } + if ( etype!=ErrorType.INVALID ) { + errMgr.grammarError(etype,g.fileName,labelID.token,name,arg2); + } + } + + public void checkForRuleArgumentAndReturnValueConflicts(Rule r) { + if ( r.retvals!=null ) { + Set conflictingKeys = r.retvals.intersection(r.args); + if (conflictingKeys!=null) { + for (Iterator it = conflictingKeys.iterator(); it.hasNext();) { + String key = (String) it.next(); + errMgr.grammarError( + ErrorType.ARG_RETVAL_CONFLICT, + g.fileName, + ((GrammarAST)r.ast.getChild(0)).token, + key, + r.name); + } + } + } + } + + /** Check for collision of a rule-scope dynamic attribute with: + * arg, return value, rule name itself. Labels are checked elsewhere. + */ + public void checkForRuleScopeAttributeConflict(Rule r) { + if ( r.scope ==null ) return; + for (Attribute a : r.scope.attributes.values()) { + ErrorType msgID = ErrorType.INVALID; + Object arg2 = null; + String attrName = a.name; + if ( r.name.equals(attrName) ) { + msgID = ErrorType.ATTRIBUTE_CONFLICTS_WITH_RULE; + arg2 = r.name; + } + else if ( (r.retvals!=null&&r.retvals.get(attrName)!=null) || + (r.args!=null&&r.args.get(attrName)!=null) ) + { + msgID = ErrorType.ATTRIBUTE_CONFLICTS_WITH_RULE_ARG_RETVAL; + arg2 = r.name; + } + if ( msgID!=ErrorType.INVALID ) { + errMgr.grammarError(msgID,g.fileName, + r.scope.ast.token, + attrName,arg2); + } + } + } + + // CAN ONLY CALL THE TWO NEXT METHODS AFTER GRAMMAR HAS RULE DEFS (see semanticpipeline) + + public void checkRuleArgs(Grammar g, List rulerefs) { + if ( rulerefs==null ) return; + for (GrammarAST ref : rulerefs) { + String ruleName = ref.getText(); + Rule r = g.getRule(ruleName); + if ( r==null && !ref.hasAncestor(ANTLRParser.DOT)) { + // only give error for unqualified rule refs now + errMgr.grammarError(ErrorType.UNDEFINED_RULE_REF, + g.fileName, ref.token, ruleName); + } + GrammarAST arg = (GrammarAST)ref.getChild(0); + if ( arg!=null && r.args==null ) { + errMgr.grammarError(ErrorType.RULE_HAS_NO_ARGS, + g.fileName, ref.token, ruleName); + + } + else if ( arg==null && (r!=null&&r.args!=null) ) { + errMgr.grammarError(ErrorType.MISSING_RULE_ARGS, + g.fileName, ref.token, ruleName); + } + } + } + + public void checkForQualifiedRuleIssues(Grammar g, List qualifiedRuleRefs) { + for (GrammarAST dot : qualifiedRuleRefs) { + GrammarAST grammar = (GrammarAST)dot.getChild(0); + GrammarAST rule = (GrammarAST)dot.getChild(1); + System.out.println(grammar.getText()+"."+rule.getText()); + Grammar delegate = g.getImportedGrammar(grammar.getText()); + if ( delegate==null ) { + errMgr.grammarError(ErrorType.NO_SUCH_GRAMMAR_SCOPE, + g.fileName, grammar.token, grammar.getText(), + rule.getText()); + } + else { + if ( g.getRule(grammar.getText(), rule.getText())==null ) { + errMgr.grammarError(ErrorType.NO_SUCH_RULE_IN_SCOPE, + g.fileName, rule.token, grammar.getText(), + rule.getText()); + } + } + } + } +} diff --git a/tool/src/org/antlr/v4/semantics/UseDefAnalyzer.java b/tool/src/org/antlr/v4/semantics/UseDefAnalyzer.java new file mode 100644 index 000000000..78fddfd4a --- /dev/null +++ b/tool/src/org/antlr/v4/semantics/UseDefAnalyzer.java @@ -0,0 +1,69 @@ +package org.antlr.v4.semantics; + +import org.antlr.v4.parse.ANTLRParser; +import org.antlr.v4.runtime.Token; +import org.antlr.v4.tool.*; + +import java.util.*; + +/** Look for errors and deadcode stuff */ +public class UseDefAnalyzer { + public static void checkRewriteElementsPresentOnLeftSide(Grammar g) { + for (Rule r : g.rules.values()) { + for (int a=1; a<=r.numberOfAlts; a++) { + Alternative alt = r.alt[a]; + for (GrammarAST e : alt.rewriteElements) { + if ( !(alt.ruleRefs.containsKey(e.getText()) || + g.getTokenType(e.getText())!= Token.INVALID_TYPE || + alt.labelDefs.containsKey(e.getText()) || + e.getText().equals(r.name)) ) // $r ok in rule r + { + g.tool.errMgr.grammarError(ErrorType.REWRITE_ELEMENT_NOT_PRESENT_ON_LHS, + g.fileName, e.token, e.getText()); + } + } + } + } + } + + // side-effect: updates Alternative with refs in actions + public static void trackTokenRuleRefsInActions(Grammar g) { + for (Rule r : g.rules.values()) { + for (int i=1; i<=r.numberOfAlts; i++) { + Alternative alt = r.alt[i]; + for (ActionAST a : alt.actions) { + ActionSniffer sniffer = new ActionSniffer(g, r, alt, a, a.token); + sniffer.examineAction(); + } + } + } + } + + /** Find all rules reachable from r directly or indirectly for all r in g */ + public static Map> getRuleDependencies(Grammar g) { + return getRuleDependencies(g, g.rules.values()); + } + + public static Map> getRuleDependencies(LexerGrammar g, String modeName) { + return getRuleDependencies(g, g.modes.get(modeName)); + } + + public static Map> getRuleDependencies(Grammar g, Collection rules) { + Map> dependencies = new HashMap>(); + + for (Rule r : rules) { + List tokenRefs = r.ast.getNodesWithType(ANTLRParser.TOKEN_REF); + for (GrammarAST tref : tokenRefs) { + Set calls = dependencies.get(r); + if ( calls==null ) { + calls = new HashSet(); + dependencies.put(r, calls); + } + calls.add(g.getRule(tref.getText())); + } + } + + return dependencies; + } + +} diff --git a/tool/src/org/antlr/v4/tool/LexerGrammar.java b/tool/src/org/antlr/v4/tool/LexerGrammar.java index b6430e57d..cb4caec88 100644 --- a/tool/src/org/antlr/v4/tool/LexerGrammar.java +++ b/tool/src/org/antlr/v4/tool/LexerGrammar.java @@ -1,9 +1,8 @@ package org.antlr.v4.tool; -import org.antlr.misc.MultiMap; import org.antlr.runtime.RecognitionException; -import org.antlr.tool.Rule; import org.antlr.v4.Tool; +import org.stringtemplate.v4.misc.MultiMap; /** */ public class LexerGrammar extends Grammar {