forked from jasder/antlr
Bug fixes + update C++ template.
- Also the ArrayPredictionContext has parent references (like the SingletonPredictionContext) which need to be strong refs or we may lose some of the parent contexts if they are not held somewhere else. - Don't use WCHAR_MIN as lower bounds for char input checks, it's not 0 as you would expect but -2G, making so EOF succeed even though it should fail this check. - Don't resize the parents array when merging parents + return states in PredictionContext or we will try to access parents outside of the available range. - Use an unordered set when merging parents in PredictionContext, so that the normal equality pattern kicks in when comparing contexts. - Some parameters in AbstractParseTreeVisitor where wrongly outcommented where only the param name should. C++ template: - No longer include the DEFAULT_MODE in the generated lexer (it's defined elsewhere). - Corrected formatting and finished some reference rules that were not done yet.
This commit is contained in:
parent
23873b4a14
commit
313c971cae
|
@ -841,7 +841,7 @@
|
|||
276E5C161CDB57AA003FF4B4 /* ActionTransition.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = ActionTransition.h; sourceTree = "<group>"; };
|
||||
276E5C171CDB57AA003FF4B4 /* AmbiguityInfo.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = AmbiguityInfo.cpp; sourceTree = "<group>"; };
|
||||
276E5C181CDB57AA003FF4B4 /* AmbiguityInfo.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = AmbiguityInfo.h; sourceTree = "<group>"; };
|
||||
276E5C191CDB57AA003FF4B4 /* ArrayPredictionContext.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = ArrayPredictionContext.cpp; sourceTree = "<group>"; };
|
||||
276E5C191CDB57AA003FF4B4 /* ArrayPredictionContext.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = ArrayPredictionContext.cpp; sourceTree = "<group>"; wrapsLines = 0; };
|
||||
276E5C1A1CDB57AA003FF4B4 /* ArrayPredictionContext.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = ArrayPredictionContext.h; sourceTree = "<group>"; };
|
||||
276E5C1B1CDB57AA003FF4B4 /* ATN.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = ATN.cpp; sourceTree = "<group>"; };
|
||||
276E5C1C1CDB57AA003FF4B4 /* ATN.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = ATN.h; sourceTree = "<group>"; };
|
||||
|
@ -890,7 +890,7 @@
|
|||
276E5C491CDB57AA003FF4B4 /* LexerActionType.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = LexerActionType.h; sourceTree = "<group>"; };
|
||||
276E5C4A1CDB57AA003FF4B4 /* LexerATNConfig.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = LexerATNConfig.cpp; sourceTree = "<group>"; wrapsLines = 0; };
|
||||
276E5C4B1CDB57AA003FF4B4 /* LexerATNConfig.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = LexerATNConfig.h; sourceTree = "<group>"; };
|
||||
276E5C4C1CDB57AA003FF4B4 /* LexerATNSimulator.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = LexerATNSimulator.cpp; sourceTree = "<group>"; };
|
||||
276E5C4C1CDB57AA003FF4B4 /* LexerATNSimulator.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = LexerATNSimulator.cpp; sourceTree = "<group>"; wrapsLines = 0; };
|
||||
276E5C4D1CDB57AA003FF4B4 /* LexerATNSimulator.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = LexerATNSimulator.h; sourceTree = "<group>"; };
|
||||
276E5C4E1CDB57AA003FF4B4 /* LexerChannelAction.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = LexerChannelAction.cpp; sourceTree = "<group>"; };
|
||||
276E5C4F1CDB57AA003FF4B4 /* LexerChannelAction.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = LexerChannelAction.h; sourceTree = "<group>"; };
|
||||
|
@ -953,7 +953,7 @@
|
|||
276E5C891CDB57AA003FF4B4 /* SetTransition.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = SetTransition.cpp; sourceTree = "<group>"; };
|
||||
276E5C8A1CDB57AA003FF4B4 /* SetTransition.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = SetTransition.h; sourceTree = "<group>"; };
|
||||
276E5C8B1CDB57AA003FF4B4 /* SingletonPredictionContext.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = SingletonPredictionContext.cpp; sourceTree = "<group>"; };
|
||||
276E5C8C1CDB57AA003FF4B4 /* SingletonPredictionContext.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = SingletonPredictionContext.h; sourceTree = "<group>"; };
|
||||
276E5C8C1CDB57AA003FF4B4 /* SingletonPredictionContext.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = SingletonPredictionContext.h; sourceTree = "<group>"; wrapsLines = 0; };
|
||||
276E5C8D1CDB57AA003FF4B4 /* StarBlockStartState.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = StarBlockStartState.cpp; sourceTree = "<group>"; };
|
||||
276E5C8E1CDB57AA003FF4B4 /* StarBlockStartState.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = StarBlockStartState.h; sourceTree = "<group>"; };
|
||||
276E5C8F1CDB57AA003FF4B4 /* StarLoopbackState.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = StarLoopbackState.cpp; sourceTree = "<group>"; };
|
||||
|
|
|
@ -55,8 +55,8 @@ void Lexer::reset() {
|
|||
type = Token::INVALID_TYPE;
|
||||
channel = Token::DEFAULT_CHANNEL;
|
||||
tokenStartCharIndex = -1;
|
||||
tokenStartCharPositionInLine = -1;
|
||||
tokenStartLine = -1;
|
||||
tokenStartCharPositionInLine = 0;
|
||||
tokenStartLine = 0;
|
||||
text = L"";
|
||||
|
||||
hitEOF = false;
|
||||
|
|
|
@ -378,7 +378,7 @@ std::vector<size_t> ATNSerializer::serialize() {
|
|||
|
||||
// don't adjust the first value since that's the version number
|
||||
for (size_t i = 1; i < data.size(); i++) {
|
||||
if ((wchar_t)data.at(i) < WCHAR_MIN || data.at(i) > WCHAR_MAX) {
|
||||
if ((wchar_t)data.at(i) < 0 || data.at(i) > WCHAR_MAX) {
|
||||
throw UnsupportedOperationException("Serialized ATN data element out of range.");
|
||||
}
|
||||
|
||||
|
@ -569,7 +569,7 @@ std::wstring ATNSerializer::getTokenName(ssize_t t) {
|
|||
return L"EOF";
|
||||
}
|
||||
|
||||
if (atn->grammarType == ATNType::LEXER && t >= WCHAR_MIN &&
|
||||
if (atn->grammarType == ATNType::LEXER && t >= 0 &&
|
||||
t <= WCHAR_MAX) {
|
||||
switch (t) {
|
||||
case L'\n':
|
||||
|
|
|
@ -40,9 +40,9 @@ ArrayPredictionContext::ArrayPredictionContext(Ref<SingletonPredictionContext> a
|
|||
: ArrayPredictionContext({ a->parent }, { a->returnState }) {
|
||||
}
|
||||
|
||||
ArrayPredictionContext::ArrayPredictionContext(const std::vector<std::weak_ptr<PredictionContext> > &parents,
|
||||
ArrayPredictionContext::ArrayPredictionContext(const std::vector<std::weak_ptr<PredictionContext> > &parents_,
|
||||
const std::vector<int> &returnStates)
|
||||
: PredictionContext(calculateHashCode(parents, returnStates)), parents(parents), returnStates(returnStates) {
|
||||
: PredictionContext(calculateHashCode(parents_, returnStates)), parents(makeRef(parents_)), returnStates(returnStates) {
|
||||
assert(parents.size() > 0);
|
||||
assert(returnStates.size() > 0);
|
||||
}
|
||||
|
@ -74,7 +74,8 @@ bool ArrayPredictionContext::operator == (const PredictionContext &o) const {
|
|||
return false; // can't be same if hash is different
|
||||
}
|
||||
|
||||
return antlrcpp::Arrays::equals(returnStates, other->returnStates) && antlrcpp::Arrays::equals(parents, other->parents);
|
||||
return antlrcpp::Arrays::equals(returnStates, other->returnStates) &&
|
||||
antlrcpp::Arrays::equals(parents, other->parents);
|
||||
}
|
||||
|
||||
std::wstring ArrayPredictionContext::toString() {
|
||||
|
@ -93,8 +94,8 @@ std::wstring ArrayPredictionContext::toString() {
|
|||
continue;
|
||||
}
|
||||
ss << returnStates[i];
|
||||
if (!parents[i].expired()) {
|
||||
ss << L" " << parents[i].lock()->toString();
|
||||
if (parents[i] != nullptr) {
|
||||
ss << L" " << parents[i]->toString();
|
||||
} else {
|
||||
ss << L"null";
|
||||
}
|
||||
|
@ -102,3 +103,11 @@ std::wstring ArrayPredictionContext::toString() {
|
|||
ss << L"]";
|
||||
return ss.str();
|
||||
}
|
||||
|
||||
std::vector<Ref<PredictionContext>> ArrayPredictionContext::makeRef(const std::vector<std::weak_ptr<PredictionContext> > &input) {
|
||||
std::vector<Ref<PredictionContext>> result;
|
||||
for (auto element : input) {
|
||||
result.push_back(element.lock());
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
|
|
@ -47,13 +47,15 @@ namespace atn {
|
|||
/// Parent can be empty only if full ctx mode and we make an array
|
||||
/// from EMPTY and non-empty. We merge EMPTY by using null parent and
|
||||
/// returnState == EMPTY_RETURN_STATE.
|
||||
const std::vector<std::weak_ptr<PredictionContext>> parents;
|
||||
// Also here: we use a strong reference to our parents to avoid having them freed prematurely.
|
||||
// See also SinglePredictionContext.
|
||||
const std::vector<Ref<PredictionContext>> parents;
|
||||
|
||||
/// Sorted for merge, no duplicates; if present, EMPTY_RETURN_STATE is always last.
|
||||
const std::vector<int> returnStates;
|
||||
|
||||
ArrayPredictionContext(Ref<SingletonPredictionContext> a);
|
||||
ArrayPredictionContext(const std::vector<std::weak_ptr<PredictionContext>> &parents,
|
||||
ArrayPredictionContext(const std::vector<std::weak_ptr<PredictionContext>> &parents_,
|
||||
const std::vector<int> &returnStates);
|
||||
virtual ~ArrayPredictionContext() {};
|
||||
|
||||
|
@ -64,6 +66,8 @@ namespace atn {
|
|||
bool operator == (const PredictionContext &o) const override;
|
||||
|
||||
virtual std::wstring toString();
|
||||
private:
|
||||
std::vector<Ref<PredictionContext>> makeRef(const std::vector<std::weak_ptr<PredictionContext> > &input);
|
||||
};
|
||||
|
||||
} // namespace atn
|
||||
|
|
|
@ -329,7 +329,7 @@ void LexerATNSimulator::accept(CharStream *input, Ref<LexerActionExecutor> lexer
|
|||
}
|
||||
|
||||
atn::ATNState *LexerATNSimulator::getReachableTarget(Transition *trans, ssize_t t) {
|
||||
if (trans->matches((int)t, WCHAR_MIN, WCHAR_MAX)) {
|
||||
if (trans->matches((int)t, 0, 65535)) {
|
||||
return trans->target;
|
||||
}
|
||||
|
||||
|
|
|
@ -298,8 +298,8 @@ Ref<PredictionContext> PredictionContext::mergeArrays(Ref<ArrayPredictionContext
|
|||
|
||||
// walk and merge to yield mergedParents, mergedReturnStates
|
||||
while (i < a->returnStates.size() && j < b->returnStates.size()) {
|
||||
Ref<PredictionContext> a_parent = a->parents[i].lock();
|
||||
Ref<PredictionContext> b_parent = b->parents[j].lock();
|
||||
Ref<PredictionContext> a_parent = a->parents[i];
|
||||
Ref<PredictionContext> b_parent = b->parents[j];
|
||||
if (a->returnStates[i] == b->returnStates[j]) {
|
||||
// same payload (stack tops are equal), must yield merged singleton
|
||||
int payload = a->returnStates[i];
|
||||
|
@ -377,7 +377,8 @@ Ref<PredictionContext> PredictionContext::mergeArrays(Ref<ArrayPredictionContext
|
|||
return b;
|
||||
}
|
||||
|
||||
if (combineCommonParents(mergedParents)) // Need to recreate the context as the parents array is copied on creation.
|
||||
// This part differs from Java code. We have to recreate the context as the parents array is copied on creation.
|
||||
if (combineCommonParents(mergedParents))
|
||||
M = std::make_shared<ArrayPredictionContext>(mergedParents, mergedReturnStates);
|
||||
|
||||
if (mergeCache != nullptr) {
|
||||
|
@ -387,9 +388,9 @@ Ref<PredictionContext> PredictionContext::mergeArrays(Ref<ArrayPredictionContext
|
|||
}
|
||||
|
||||
bool PredictionContext::combineCommonParents(std::vector<std::weak_ptr<PredictionContext>> &parents) {
|
||||
std::set<Ref<PredictionContext>> uniqueParents;
|
||||
std::unordered_set<Ref<PredictionContext>, PredictionContextHasher, PredictionContextComparer> uniqueParents;
|
||||
|
||||
for (size_t p = 0; p < parents.size(); p++) {
|
||||
for (size_t p = 0; p < parents.size(); ++p) {
|
||||
Ref<PredictionContext> parent = parents[p].lock();
|
||||
// ml: it's assumed that the == operator of PredictionContext kicks in here.
|
||||
if (uniqueParents.find(parent) == uniqueParents.end()) { // don't replace
|
||||
|
@ -400,8 +401,10 @@ bool PredictionContext::combineCommonParents(std::vector<std::weak_ptr<Predictio
|
|||
if (uniqueParents.size() == parents.size())
|
||||
return false;
|
||||
|
||||
parents.clear();
|
||||
std::copy(uniqueParents.begin(), uniqueParents.end(), parents.begin());
|
||||
// Don't resize the parents array, just update the content.
|
||||
for (size_t p = 0; p < parents.size(); ++p) {
|
||||
parents[p] = *uniqueParents.find(parents[p].lock());
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
|
|
|
@ -42,13 +42,27 @@ namespace runtime {
|
|||
namespace atn {
|
||||
|
||||
// Cannot use PredictionContext> here as this declared below first.
|
||||
typedef std::set<Ref<PredictionContext>> PredictionContextCache;
|
||||
typedef std::unordered_set<Ref<PredictionContext>> PredictionContextCache;
|
||||
|
||||
// For the keys we use raw pointers, as we don't need to access them.
|
||||
typedef std::map<std::pair<PredictionContext *, PredictionContext *>, Ref<PredictionContext>> PredictionContextMergeCache;
|
||||
|
||||
class ANTLR4CPP_PUBLIC PredictionContext {
|
||||
public:
|
||||
struct PredictionContextHasher
|
||||
{
|
||||
size_t operator () (Ref<PredictionContext> k) const {
|
||||
return k->hashCode();
|
||||
}
|
||||
};
|
||||
|
||||
struct PredictionContextComparer {
|
||||
bool operator () (Ref<PredictionContext> lhs, Ref<PredictionContext> rhs) const
|
||||
{
|
||||
return *lhs == *rhs;
|
||||
}
|
||||
};
|
||||
|
||||
/// Represents $ in local context prediction, which means wildcard.
|
||||
/// *+x = *.
|
||||
static const Ref<PredictionContext> EMPTY;
|
||||
|
@ -249,16 +263,3 @@ namespace atn {
|
|||
} // namespace antlr
|
||||
} // namespace org
|
||||
|
||||
// Hash function for PredictionContext, used in the MurmurHash::update function
|
||||
|
||||
namespace std {
|
||||
using org::antlr::v4::runtime::atn::PredictionContext;
|
||||
|
||||
template <> struct hash<PredictionContext>
|
||||
{
|
||||
size_t operator () (const PredictionContext &x) const
|
||||
{
|
||||
return x.hashCode();
|
||||
}
|
||||
};
|
||||
}
|
||||
|
|
|
@ -88,7 +88,7 @@ namespace tree {
|
|||
/// The default implementation returns the result of
|
||||
/// <seealso cref="#defaultResult defaultResult"/>.
|
||||
/// </summary>
|
||||
virtual T* visitTerminal(/* TerminalNode *node */) override {
|
||||
virtual T* visitTerminal(TerminalNode * /*node*/) override {
|
||||
return defaultResult();
|
||||
}
|
||||
|
||||
|
@ -98,7 +98,7 @@ namespace tree {
|
|||
/// The default implementation returns the result of
|
||||
/// <seealso cref="#defaultResult defaultResult"/>.
|
||||
/// </summary>
|
||||
virtual T* visitErrorNode(/* ErrorNode *node */) override {
|
||||
virtual T* visitErrorNode(ErrorNode * /*node*/) override {
|
||||
return defaultResult();
|
||||
}
|
||||
|
||||
|
@ -135,7 +135,7 @@ namespace tree {
|
|||
/// a child node.
|
||||
/// </param>
|
||||
/// <returns> The updated aggregate result. </returns>
|
||||
virtual T* aggregateResult(/* T* aggregate, */ T* nextResult) {
|
||||
virtual T* aggregateResult(T* /*aggregate*/, T* nextResult) {
|
||||
return nextResult;
|
||||
}
|
||||
|
||||
|
@ -162,7 +162,7 @@ namespace tree {
|
|||
/// <returns> {@code true} to continue visiting children. Otherwise return
|
||||
/// {@code false} to stop visiting children and immediately return the
|
||||
/// current aggregate result from <seealso cref="#visitChildren"/>. </returns>
|
||||
virtual bool shouldVisitNextChild(/*RuleNode *node, T currentResult*/) {
|
||||
virtual bool shouldVisitNextChild(RuleNode * /*node*/, T /*currentResult*/) {
|
||||
return true;
|
||||
}
|
||||
|
||||
|
|
|
@ -51,9 +51,9 @@ public:
|
|||
};
|
||||
<endif>
|
||||
|
||||
<if (lexer.modes)>
|
||||
<if (rest(lexer.modes))>
|
||||
enum {
|
||||
<lexer.modes: {m | <m> = <i>,}; separator="\n", anchor>
|
||||
<rest(lexer.modes): {m | <m> = <i>,}; separator="\n", anchor>
|
||||
};
|
||||
<endif>
|
||||
|
||||
|
@ -149,8 +149,8 @@ void <lexer.name>::action(Ref\<RuleContext> context, int ruleIndex, int actionIn
|
|||
switch (ruleIndex) {
|
||||
<lexer.actionFuncs.values: {f | case <f.ruleIndex>: <f.name>Action(std::dynamic_pointer_cast\<<f.ctxType>\>(context), actionIndex); break;}; separator="\n">
|
||||
|
||||
default:
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
<endif>
|
||||
|
@ -160,8 +160,8 @@ bool <lexer.name>::sempred(Ref\<RuleContext> context, int ruleIndex, int predica
|
|||
switch (ruleIndex) {
|
||||
<lexer.sempredFuncs.values: {f | case <f.ruleIndex>: return <f.name>Sempred(std::dynamic_pointer_cast\<<f.ctxType>\>(context), predicateIndex);}; separator="\n">
|
||||
|
||||
default:
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
@ -231,7 +231,7 @@ void <r.factory.grammar.name>::<r.name>Action(Ref\<<r.ctxType>\> context, int ac
|
|||
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
>>
|
||||
|
@ -245,10 +245,10 @@ bool <r.factory.grammar.name>::<r.name>Sempred(Ref\<<r.ctxType>\> /*context*/, i
|
|||
switch (predicateIndex) {
|
||||
<actions: {index | case <index>: return <actions.(index)>;}; separator="\n">
|
||||
|
||||
default:
|
||||
break;
|
||||
}
|
||||
return true;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
>>
|
||||
|
@ -345,8 +345,8 @@ bool <parser.name>::sempred(Ref\<RuleContext> context, int ruleIndex, int predic
|
|||
<parser.sempredFuncs.values: {f |
|
||||
case <f.ruleIndex>: return <f.name>Sempred(std::dynamic_pointer_cast\<<f.ctxType>\>(context), predicateIndex);}; separator="\n">
|
||||
|
||||
default:
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
@ -362,7 +362,7 @@ atn::ATN <parser.name>::_atn;
|
|||
std::wstring <parser.name>::_serializedATN;
|
||||
|
||||
std::vector\<std::wstring> <parser.name>::_ruleNames = {
|
||||
<parser.ruleNames: {r | L"<r>"}; separator = ", ", wrap, anchor>
|
||||
<parser.ruleNames: {r | L"<r>"}; separator = ", ", wrap, anchor>
|
||||
};
|
||||
|
||||
std::vector\<std::wstring> <parser.name>::_literalNames = {
|
||||
|
@ -529,8 +529,10 @@ public:
|
|||
|
||||
<dispatchMethods; separator = "\n">
|
||||
<! TODO: untested !> <extensionMembers; separator = "\n">
|
||||
<if (attrs)>
|
||||
private:
|
||||
<if (attrs)><attrs: {a | <if (a.isLocal)><a><endif>}; separator = "\n"><endif>
|
||||
<attrs: {a | <if (a.isLocal)><a><endif>}; separator = "\n">
|
||||
<endif>
|
||||
};
|
||||
|
||||
>>
|
||||
|
@ -721,9 +723,9 @@ _errHandler->sync(this);
|
|||
alt = getInterpreter\<atn::ParserATNSimulator>()->adaptivePredict(_input, <choice.decision>, _ctx);
|
||||
do {
|
||||
switch (alt) {
|
||||
<alts: {alt | case <i><if (!choice.ast.greedy)> + 1<endif>:
|
||||
<alt>
|
||||
break;
|
||||
<alts: {alt | case <i><if (!choice.ast.greedy)> + 1<endif>:
|
||||
<alt>
|
||||
break;
|
||||
}; separator="\n">
|
||||
default:
|
||||
<error>
|
||||
|
@ -794,10 +796,10 @@ MatchNotSet(m, expr, capture) ::= "<CommonSetStuff(m, expr, capture, true)>"
|
|||
|
||||
CommonSetStuff(m, expr, capture, invert) ::= <<
|
||||
setState(<m.stateNumber>);
|
||||
<if(m.labels)><m.labels: {l | <labelref(l)> = }>_input->LT(1);<endif>
|
||||
<if (m.labels)><m.labels: {l | <labelref(l)> = }>_input->LT(1);<endif>
|
||||
<capture>
|
||||
if (<if(invert)><m.varName> \<= 0 || <else>!<endif>(<expr>)) {
|
||||
<if(m.labels)><m.labels:{l | <labelref(l)> = (Token)}><endif>_errHandler->recoverInline(this);
|
||||
if (<if (invert)><m.varName> \<= 0 || <else>!<endif>(<expr>)) {
|
||||
<if (m.labels)><m.labels:{l | <labelref(l)> = }><endif>_errHandler->recoverInline(this);
|
||||
}
|
||||
consume();
|
||||
>>
|
||||
|
@ -842,34 +844,60 @@ LexerChannelCommand(arg, grammar) ::= "channel = <arg>;"
|
|||
LexerModeCommand(arg, grammar) ::= "mode = <grammar.name>Mode::<arg>;"
|
||||
LexerPushModeCommand(arg, grammar) ::= "pushMode(<grammar.name>Mode::<arg>);"
|
||||
|
||||
// TODO: untested
|
||||
ActionTextHeader(t) ::= "<t.text>"
|
||||
ActionText(t) ::= "<t.text>"
|
||||
|
||||
ActionTemplateHeader(t) ::= "<! Unused but must be present. !>"
|
||||
ActionTemplateHeader(t) ::= "<! Required but unused. !>"
|
||||
ActionTemplate(t) ::= "<t.st>"
|
||||
|
||||
ArgRef(a) ::= "ArgRef(a) _localctx.<a.name>"
|
||||
LocalRef(a) ::= "LocalRef(a) _localctx.<a.name>"
|
||||
RetValueRef(a) ::= "RetValueRef(a) _localctx.<a.name>"
|
||||
QRetValueRef(a) ::= "QRetValueRef(a) <ctx(a)>.<a.dict>.<a.name>"
|
||||
ArgRefHeader(t) ::= "<! Required but unused. !>"
|
||||
ArgRef(a) ::= "_localctx-><a.name>"
|
||||
|
||||
LocalRefHeader(t) ::= "<! Required but unused. !>"
|
||||
LocalRef(a) ::= "_localctx-><a.name>"
|
||||
|
||||
RetValueRefHeader(t) ::= "<! Required but unused. !>"
|
||||
RetValueRef(a) ::= "_localctx-><a.name>"
|
||||
|
||||
QRetValueRefHeader(t) ::= "<! Required but unused. !>"
|
||||
QRetValueRef(a) ::= "<ctx(a)>-><a.dict>.<a.name>"
|
||||
/** How to translate $tokenLabel */
|
||||
TokenRef(t) ::= "TokenRef(t) <ctx(t)>.<t.name>"
|
||||
LabelRef(t) ::= "LabelRef(t) <ctx(t)>.<t.name>"
|
||||
ListLabelRef(t) ::= "ListLabelRef(t) <ctx(t)>.<ListLabelName(t.name)>"
|
||||
SetAttr(s,rhsChunks) ::= "SetAttr(s,rhsChunks) <ctx(s)>.<s.name> = <rhsChunks>;"
|
||||
|
||||
TokenRefHeader(t) ::= "<! Required but unused. !>"
|
||||
TokenRef(t) ::= "<ctx(t)>-><t.name>"
|
||||
|
||||
LabelRefHeader(t) ::= "<! Required but unused. !>"
|
||||
LabelRef(t) ::= "<ctx(t)>-><t.name>"
|
||||
|
||||
ListLabelRefHeader(t) ::= "<! Required but unused. !>"
|
||||
ListLabelRef(t) ::= "<ctx(t)>-><ListLabelName(t.name)>"
|
||||
|
||||
SetAttrHeader(t) ::= "<! Required but unused. !>"
|
||||
SetAttr(s,rhsChunks) ::= "<ctx(s)>-><s.name> = <rhsChunks>;"
|
||||
|
||||
InputSymbolType() ::= "<file.InputSymbolType; null = {Token}> *"
|
||||
|
||||
TokenPropertyRef_textHeader(t) ::= "<! Required but unused. !>"
|
||||
TokenPropertyRef_text(t) ::= "(<ctx(t)>-><t.label> != nullptr ? <ctx(t)>-><t.label>->getText() : nullptr)"
|
||||
TokenPropertyRef_text(t) ::= <<(<ctx(t)>-><t.label> != nullptr ? <ctx(t)>-><t.label>->getText() : L"")>>
|
||||
|
||||
TokenPropertyRef_typeHeader(t) ::= "<! Required but unused. !>"
|
||||
TokenPropertyRef_type(t) ::= "(<ctx(t)>-><t.label> != nullptr ? <ctx(t)>-><t.label>->getType() : 0)"
|
||||
|
||||
TokenPropertyRef_lineHeader(t) ::= "<! Required but unused. !>"
|
||||
TokenPropertyRef_line(t) ::= "(<ctx(t)>-><t.label> != nullptr ? <ctx(t)>-><t.label>->getLine() : 0)"
|
||||
|
||||
TokenPropertyRef_posHeader(t) ::= "<! Required but unused. !>"
|
||||
TokenPropertyRef_pos(t) ::= "(<ctx(t)>-><t.label> != nullptr ? <ctx(t)>-><t.label>->getCharPositionInLine() : 0)"
|
||||
|
||||
TokenPropertyRef_channelHeader(t) ::= "<! Required but unused. !>"
|
||||
TokenPropertyRef_channel(t) ::= "(<ctx(t)>-><t.label> != nullptr ? <ctx(t)>-><t.label>->getChannel() : 0)"
|
||||
|
||||
TokenPropertyRef_indexHeader(t) ::= "<! Required but unused. !>"
|
||||
TokenPropertyRef_index(t) ::= "(<ctx(t)>-><t.label> != nullptr ? <ctx(t)>-><t.label>->getTokenIndex() : 0)"
|
||||
|
||||
TokenPropertyRef_intHeader(t) ::= "<! Required but unused. !>"
|
||||
TokenPropertyRef_int(t) ::= "(<ctx(t)>-><t.label> != nullptr ? std::stol(<ctx(t)>-><t.label>->getText()) : 0)"
|
||||
|
||||
TokenPropertyRef_type(t) ::= "TokenPropertyRef_type(t) (<ctx(t)>.<t.label> != nullptr ? <ctx(t)>.<t.label>.getType() : 0)"
|
||||
TokenPropertyRef_line(t) ::= "TokenPropertyRef_line(t) (<ctx(t)>.<t.label> != nullptr ? <ctx(t)>.<t.label>.getLine() : 0)"
|
||||
TokenPropertyRef_pos(t) ::= "TokenPropertyRef_pos(t) (<ctx(t)>.<t.label> != nullptr ? <ctx(t)>.<t.label>.getCharPositionInLine() : 0)"
|
||||
TokenPropertyRef_channel(t) ::= "TokenPropertyRef_channel(t) (<ctx(t)>.<t.label> != nullptr ? <ctx(t)>.<t.label>.getChannel() : 0)"
|
||||
TokenPropertyRef_index(t) ::= "TokenPropertyRef_index(t) (<ctx(t)>.<t.label> != nullptr ? <ctx(t)>.<t.label>.getTokenIndex() : 0)"
|
||||
TokenPropertyRef_int(t) ::= "TokenPropertyRef_int(t) (<ctx(t)>.<t.label> != nullptr ? Integer.valueOf(<ctx(t)>.<t.label>.getText()) : 0)"
|
||||
|
||||
RulePropertyRef_start(r) ::= "RulePropertyRef_start(r) (<ctx(r)>.<r.label> != nullptr ? (<ctx(r)>.<r.label>.start) : null)"
|
||||
RulePropertyRef_stop(r) ::= "RulePropertyRef_stop(r) (<ctx(r)>.<r.label> != nullptr ? (<ctx(r)>.<r.label>.stop) : null)"
|
||||
|
|
Loading…
Reference in New Issue