Merge pull request #2771 from carocad/atn-es6
Javascript: migrate prototypical ATN objects to ES6
This commit is contained in:
commit
1284814c21
|
@ -7,8 +7,8 @@ var Token = require('./Token').Token;
|
|||
var ParseTreeListener = require('./tree/Tree').ParseTreeListener;
|
||||
var Recognizer = require('./Recognizer').Recognizer;
|
||||
var DefaultErrorStrategy = require('./error/ErrorStrategy').DefaultErrorStrategy;
|
||||
var ATNDeserializer = require('./atn/ATNDeserializer').ATNDeserializer;
|
||||
var ATNDeserializationOptions = require('./atn/ATNDeserializationOptions').ATNDeserializationOptions;
|
||||
var ATNDeserializer = require('./atn/ATNDeserializer');
|
||||
var ATNDeserializationOptions = require('./atn/ATNDeserializationOptions');
|
||||
var TerminalNode = require('./tree/Tree').TerminalNode;
|
||||
var ErrorNode = require('./tree/Tree').ErrorNode;
|
||||
|
||||
|
|
|
@ -3,139 +3,153 @@
|
|||
* can be found in the LICENSE.txt file in the project root.
|
||||
*/
|
||||
|
||||
var LL1Analyzer = require('./../LL1Analyzer').LL1Analyzer;
|
||||
var IntervalSet = require('./../IntervalSet').IntervalSet;
|
||||
const {LL1Analyzer} = require('./../LL1Analyzer');
|
||||
const {IntervalSet} = require('./../IntervalSet');
|
||||
const {Token} = require('./../Token');
|
||||
|
||||
function ATN(grammarType , maxTokenType) {
|
||||
class ATN {
|
||||
constructor(grammarType , maxTokenType) {
|
||||
|
||||
// Used for runtime deserialization of ATNs from strings///
|
||||
// The type of the ATN.
|
||||
this.grammarType = grammarType;
|
||||
// The maximum value for any symbol recognized by a transition in the ATN.
|
||||
this.maxTokenType = maxTokenType;
|
||||
this.states = [];
|
||||
// Each subrule/rule is a decision point and we must track them so we
|
||||
// can go back later and build DFA predictors for them. This includes
|
||||
// all the rules, subrules, optional blocks, ()+, ()* etc...
|
||||
this.decisionToState = [];
|
||||
// Maps from rule index to starting state number.
|
||||
this.ruleToStartState = [];
|
||||
// Maps from rule index to stop state number.
|
||||
this.ruleToStopState = null;
|
||||
this.modeNameToStartState = {};
|
||||
// For lexer ATNs, this maps the rule index to the resulting token type.
|
||||
// For parser ATNs, this maps the rule index to the generated bypass token
|
||||
// type if the
|
||||
// {@link ATNDeserializationOptions//isGenerateRuleBypassTransitions}
|
||||
// deserialization option was specified; otherwise, this is {@code null}.
|
||||
this.ruleToTokenType = null;
|
||||
// For lexer ATNs, this is an array of {@link LexerAction} objects which may
|
||||
// be referenced by action transitions in the ATN.
|
||||
this.lexerActions = null;
|
||||
this.modeToStartState = [];
|
||||
/**
|
||||
* Used for runtime deserialization of ATNs from strings
|
||||
* The type of the ATN.
|
||||
*/
|
||||
this.grammarType = grammarType;
|
||||
// The maximum value for any symbol recognized by a transition in the ATN.
|
||||
this.maxTokenType = maxTokenType;
|
||||
this.states = [];
|
||||
/**
|
||||
* Each subrule/rule is a decision point and we must track them so we
|
||||
* can go back later and build DFA predictors for them. This includes
|
||||
* all the rules, subrules, optional blocks, ()+, ()* etc...
|
||||
*/
|
||||
this.decisionToState = [];
|
||||
// Maps from rule index to starting state number.
|
||||
this.ruleToStartState = [];
|
||||
// Maps from rule index to stop state number.
|
||||
this.ruleToStopState = null;
|
||||
this.modeNameToStartState = {};
|
||||
/**
|
||||
* For lexer ATNs, this maps the rule index to the resulting token type.
|
||||
* For parser ATNs, this maps the rule index to the generated bypass token
|
||||
* type if the {@link ATNDeserializationOptions//isGenerateRuleBypassTransitions}
|
||||
* deserialization option was specified; otherwise, this is {@code null}
|
||||
*/
|
||||
this.ruleToTokenType = null;
|
||||
/**
|
||||
* For lexer ATNs, this is an array of {@link LexerAction} objects which may
|
||||
* be referenced by action transitions in the ATN
|
||||
*/
|
||||
this.lexerActions = null;
|
||||
this.modeToStartState = [];
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* Compute the set of valid tokens that can occur starting in state {@code s}.
|
||||
* If {@code ctx} is null, the set of tokens will not include what can follow
|
||||
* the rule surrounding {@code s}. In other words, the set will be
|
||||
* restricted to tokens reachable staying within {@code s}'s rule
|
||||
*/
|
||||
nextTokensInContext(s, ctx) {
|
||||
const anal = new LL1Analyzer(this);
|
||||
return anal.LOOK(s, null, ctx);
|
||||
}
|
||||
|
||||
// Compute the set of valid tokens that can occur starting in state {@code s}.
|
||||
// If {@code ctx} is null, the set of tokens will not include what can follow
|
||||
// the rule surrounding {@code s}. In other words, the set will be
|
||||
// restricted to tokens reachable staying within {@code s}'s rule.
|
||||
ATN.prototype.nextTokensInContext = function(s, ctx) {
|
||||
var anal = new LL1Analyzer(this);
|
||||
return anal.LOOK(s, null, ctx);
|
||||
};
|
||||
|
||||
// Compute the set of valid tokens that can occur starting in {@code s} and
|
||||
// staying in same rule. {@link Token//EPSILON} is in set if we reach end of
|
||||
// rule.
|
||||
ATN.prototype.nextTokensNoContext = function(s) {
|
||||
if (s.nextTokenWithinRule !== null ) {
|
||||
/**
|
||||
* Compute the set of valid tokens that can occur starting in {@code s} and
|
||||
* staying in same rule. {@link Token//EPSILON} is in set if we reach end of
|
||||
* rule
|
||||
*/
|
||||
nextTokensNoContext(s) {
|
||||
if (s.nextTokenWithinRule !== null ) {
|
||||
return s.nextTokenWithinRule;
|
||||
}
|
||||
s.nextTokenWithinRule = this.nextTokensInContext(s, null);
|
||||
s.nextTokenWithinRule.readOnly = true;
|
||||
return s.nextTokenWithinRule;
|
||||
}
|
||||
s.nextTokenWithinRule = this.nextTokensInContext(s, null);
|
||||
s.nextTokenWithinRule.readOnly = true;
|
||||
return s.nextTokenWithinRule;
|
||||
};
|
||||
|
||||
ATN.prototype.nextTokens = function(s, ctx) {
|
||||
if ( ctx===undefined ) {
|
||||
return this.nextTokensNoContext(s);
|
||||
} else {
|
||||
return this.nextTokensInContext(s, ctx);
|
||||
nextTokens(s, ctx) {
|
||||
if ( ctx===undefined ) {
|
||||
return this.nextTokensNoContext(s);
|
||||
} else {
|
||||
return this.nextTokensInContext(s, ctx);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
ATN.prototype.addState = function( state) {
|
||||
if ( state !== null ) {
|
||||
state.atn = this;
|
||||
state.stateNumber = this.states.length;
|
||||
addState(state) {
|
||||
if ( state !== null ) {
|
||||
state.atn = this;
|
||||
state.stateNumber = this.states.length;
|
||||
}
|
||||
this.states.push(state);
|
||||
}
|
||||
this.states.push(state);
|
||||
};
|
||||
|
||||
ATN.prototype.removeState = function( state) {
|
||||
this.states[state.stateNumber] = null; // just free mem, don't shift states in list
|
||||
};
|
||||
|
||||
ATN.prototype.defineDecisionState = function( s) {
|
||||
this.decisionToState.push(s);
|
||||
s.decision = this.decisionToState.length-1;
|
||||
return s.decision;
|
||||
};
|
||||
|
||||
ATN.prototype.getDecisionState = function( decision) {
|
||||
if (this.decisionToState.length===0) {
|
||||
return null;
|
||||
} else {
|
||||
return this.decisionToState[decision];
|
||||
removeState(state) {
|
||||
this.states[state.stateNumber] = null; // just free mem, don't shift states in list
|
||||
}
|
||||
};
|
||||
|
||||
// Computes the set of input symbols which could follow ATN state number
|
||||
// {@code stateNumber} in the specified full {@code context}. This method
|
||||
// considers the complete parser context, but does not evaluate semantic
|
||||
// predicates (i.e. all predicates encountered during the calculation are
|
||||
// assumed true). If a path in the ATN exists from the starting state to the
|
||||
// {@link RuleStopState} of the outermost context without matching any
|
||||
// symbols, {@link Token//EOF} is added to the returned set.
|
||||
//
|
||||
// <p>If {@code context} is {@code null}, it is treated as
|
||||
// {@link ParserRuleContext//EMPTY}.</p>
|
||||
//
|
||||
// @param stateNumber the ATN state number
|
||||
// @param context the full parse context
|
||||
// @return The set of potentially valid input symbols which could follow the
|
||||
// specified state in the specified context.
|
||||
// @throws IllegalArgumentException if the ATN does not contain a state with
|
||||
// number {@code stateNumber}
|
||||
var Token = require('./../Token').Token;
|
||||
defineDecisionState(s) {
|
||||
this.decisionToState.push(s);
|
||||
s.decision = this.decisionToState.length-1;
|
||||
return s.decision;
|
||||
}
|
||||
|
||||
ATN.prototype.getExpectedTokens = function( stateNumber, ctx ) {
|
||||
if ( stateNumber < 0 || stateNumber >= this.states.length ) {
|
||||
throw("Invalid state number.");
|
||||
getDecisionState(decision) {
|
||||
if (this.decisionToState.length===0) {
|
||||
return null;
|
||||
} else {
|
||||
return this.decisionToState[decision];
|
||||
}
|
||||
}
|
||||
var s = this.states[stateNumber];
|
||||
var following = this.nextTokens(s);
|
||||
if (!following.contains(Token.EPSILON)) {
|
||||
return following;
|
||||
}
|
||||
var expected = new IntervalSet();
|
||||
expected.addSet(following);
|
||||
expected.removeOne(Token.EPSILON);
|
||||
while (ctx !== null && ctx.invokingState >= 0 && following.contains(Token.EPSILON)) {
|
||||
var invokingState = this.states[ctx.invokingState];
|
||||
var rt = invokingState.transitions[0];
|
||||
following = this.nextTokens(rt.followState);
|
||||
|
||||
/**
|
||||
* Computes the set of input symbols which could follow ATN state number
|
||||
* {@code stateNumber} in the specified full {@code context}. This method
|
||||
* considers the complete parser context, but does not evaluate semantic
|
||||
* predicates (i.e. all predicates encountered during the calculation are
|
||||
* assumed true). If a path in the ATN exists from the starting state to the
|
||||
* {@link RuleStopState} of the outermost context without matching any
|
||||
* symbols, {@link Token//EOF} is added to the returned set.
|
||||
*
|
||||
* <p>If {@code context} is {@code null}, it is treated as
|
||||
* {@link ParserRuleContext//EMPTY}.</p>
|
||||
*
|
||||
* @param stateNumber the ATN state number
|
||||
* @param ctx the full parse context
|
||||
*
|
||||
* @return {IntervalSet} The set of potentially valid input symbols which could follow the
|
||||
* specified state in the specified context.
|
||||
*
|
||||
* @throws IllegalArgumentException if the ATN does not contain a state with
|
||||
* number {@code stateNumber}
|
||||
*/
|
||||
getExpectedTokens(stateNumber, ctx ) {
|
||||
if ( stateNumber < 0 || stateNumber >= this.states.length ) {
|
||||
throw("Invalid state number.");
|
||||
}
|
||||
const s = this.states[stateNumber];
|
||||
let following = this.nextTokens(s);
|
||||
if (!following.contains(Token.EPSILON)) {
|
||||
return following;
|
||||
}
|
||||
const expected = new IntervalSet();
|
||||
expected.addSet(following);
|
||||
expected.removeOne(Token.EPSILON);
|
||||
ctx = ctx.parentCtx;
|
||||
while (ctx !== null && ctx.invokingState >= 0 && following.contains(Token.EPSILON)) {
|
||||
const invokingState = this.states[ctx.invokingState];
|
||||
const rt = invokingState.transitions[0];
|
||||
following = this.nextTokens(rt.followState);
|
||||
expected.addSet(following);
|
||||
expected.removeOne(Token.EPSILON);
|
||||
ctx = ctx.parentCtx;
|
||||
}
|
||||
if (following.contains(Token.EPSILON)) {
|
||||
expected.addOne(Token.EOF);
|
||||
}
|
||||
return expected;
|
||||
}
|
||||
if (following.contains(Token.EPSILON)) {
|
||||
expected.addOne(Token.EOF);
|
||||
}
|
||||
return expected;
|
||||
};
|
||||
}
|
||||
|
||||
ATN.INVALID_ALT_NUMBER = 0;
|
||||
|
||||
|
|
|
@ -1,32 +1,22 @@
|
|||
//
|
||||
/* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.
|
||||
* Use of this file is governed by the BSD 3-clause license that
|
||||
* can be found in the LICENSE.txt file in the project root.
|
||||
*/
|
||||
///
|
||||
|
||||
// A tuple: (ATN state, predicted alt, syntactic, semantic context).
|
||||
// The syntactic context is a graph-structured stack node whose
|
||||
// path(s) to the root is the rule invocation(s)
|
||||
// chain used to arrive at the state. The semantic context is
|
||||
// the tree of semantic predicates encountered before reaching
|
||||
// an ATN state.
|
||||
///
|
||||
|
||||
var DecisionState = require('./ATNState').DecisionState;
|
||||
var SemanticContext = require('./SemanticContext').SemanticContext;
|
||||
var Hash = require("../Utils").Hash;
|
||||
const {DecisionState} = require('./ATNState');
|
||||
const {SemanticContext} = require('./SemanticContext');
|
||||
const {Hash} = require("../Utils");
|
||||
|
||||
|
||||
function checkParams(params, isCfg) {
|
||||
if(params===null) {
|
||||
var result = { state:null, alt:null, context:null, semanticContext:null };
|
||||
const result = { state:null, alt:null, context:null, semanticContext:null };
|
||||
if(isCfg) {
|
||||
result.reachesIntoOuterContext = 0;
|
||||
}
|
||||
return result;
|
||||
} else {
|
||||
var props = {};
|
||||
const props = {};
|
||||
props.state = params.state || null;
|
||||
props.alt = (params.alt === undefined) ? null : params.alt;
|
||||
props.context = params.context || null;
|
||||
|
@ -39,138 +29,144 @@ function checkParams(params, isCfg) {
|
|||
}
|
||||
}
|
||||
|
||||
function ATNConfig(params, config) {
|
||||
this.checkContext(params, config);
|
||||
params = checkParams(params);
|
||||
config = checkParams(config, true);
|
||||
// The ATN state associated with this configuration///
|
||||
this.state = params.state!==null ? params.state : config.state;
|
||||
// What alt (or lexer rule) is predicted by this configuration///
|
||||
this.alt = params.alt!==null ? params.alt : config.alt;
|
||||
// The stack of invoking states leading to the rule/states associated
|
||||
// with this config. We track only those contexts pushed during
|
||||
// execution of the ATN simulator.
|
||||
this.context = params.context!==null ? params.context : config.context;
|
||||
this.semanticContext = params.semanticContext!==null ? params.semanticContext :
|
||||
(config.semanticContext!==null ? config.semanticContext : SemanticContext.NONE);
|
||||
// We cannot execute predicates dependent upon local context unless
|
||||
// we know for sure we are in the correct context. Because there is
|
||||
// no way to do this efficiently, we simply cannot evaluate
|
||||
// dependent predicates unless we are in the rule that initially
|
||||
// invokes the ATN simulator.
|
||||
//
|
||||
// closure() tracks the depth of how far we dip into the
|
||||
// outer context: depth > 0. Note that it may not be totally
|
||||
// accurate depth since I don't ever decrement. TODO: make it a boolean then
|
||||
this.reachesIntoOuterContext = config.reachesIntoOuterContext;
|
||||
this.precedenceFilterSuppressed = config.precedenceFilterSuppressed;
|
||||
return this;
|
||||
class ATNConfig {
|
||||
/**
|
||||
* @param {Object} params A tuple: (ATN state, predicted alt, syntactic, semantic context).
|
||||
* The syntactic context is a graph-structured stack node whose
|
||||
* path(s) to the root is the rule invocation(s)
|
||||
* chain used to arrive at the state. The semantic context is
|
||||
* the tree of semantic predicates encountered before reaching
|
||||
* an ATN state
|
||||
*/
|
||||
constructor(params, config) {
|
||||
this.checkContext(params, config);
|
||||
params = checkParams(params);
|
||||
config = checkParams(config, true);
|
||||
// The ATN state associated with this configuration///
|
||||
this.state = params.state!==null ? params.state : config.state;
|
||||
// What alt (or lexer rule) is predicted by this configuration///
|
||||
this.alt = params.alt!==null ? params.alt : config.alt;
|
||||
/**
|
||||
* The stack of invoking states leading to the rule/states associated
|
||||
* with this config. We track only those contexts pushed during
|
||||
* execution of the ATN simulator
|
||||
*/
|
||||
this.context = params.context!==null ? params.context : config.context;
|
||||
this.semanticContext = params.semanticContext!==null ? params.semanticContext :
|
||||
(config.semanticContext!==null ? config.semanticContext : SemanticContext.NONE);
|
||||
// TODO: make it a boolean then
|
||||
/**
|
||||
* We cannot execute predicates dependent upon local context unless
|
||||
* we know for sure we are in the correct context. Because there is
|
||||
* no way to do this efficiently, we simply cannot evaluate
|
||||
* dependent predicates unless we are in the rule that initially
|
||||
* invokes the ATN simulator.
|
||||
* closure() tracks the depth of how far we dip into the
|
||||
* outer context: depth > 0. Note that it may not be totally
|
||||
* accurate depth since I don't ever decrement
|
||||
*/
|
||||
this.reachesIntoOuterContext = config.reachesIntoOuterContext;
|
||||
this.precedenceFilterSuppressed = config.precedenceFilterSuppressed;
|
||||
}
|
||||
|
||||
checkContext(params, config) {
|
||||
if((params.context===null || params.context===undefined) &&
|
||||
(config===null || config.context===null || config.context===undefined)) {
|
||||
this.context = null;
|
||||
}
|
||||
}
|
||||
|
||||
hashCode() {
|
||||
const hash = new Hash();
|
||||
this.updateHashCode(hash);
|
||||
return hash.finish();
|
||||
}
|
||||
|
||||
updateHashCode(hash) {
|
||||
hash.update(this.state.stateNumber, this.alt, this.context, this.semanticContext);
|
||||
}
|
||||
|
||||
/**
|
||||
* An ATN configuration is equal to another if both have
|
||||
* the same state, they predict the same alternative, and
|
||||
* syntactic/semantic contexts are the same
|
||||
*/
|
||||
equals(other) {
|
||||
if (this === other) {
|
||||
return true;
|
||||
} else if (! (other instanceof ATNConfig)) {
|
||||
return false;
|
||||
} else {
|
||||
return this.state.stateNumber===other.state.stateNumber &&
|
||||
this.alt===other.alt &&
|
||||
(this.context===null ? other.context===null : this.context.equals(other.context)) &&
|
||||
this.semanticContext.equals(other.semanticContext) &&
|
||||
this.precedenceFilterSuppressed===other.precedenceFilterSuppressed;
|
||||
}
|
||||
}
|
||||
|
||||
hashCodeForConfigSet() {
|
||||
const hash = new Hash();
|
||||
hash.update(this.state.stateNumber, this.alt, this.semanticContext);
|
||||
return hash.finish();
|
||||
}
|
||||
|
||||
equalsForConfigSet(other) {
|
||||
if (this === other) {
|
||||
return true;
|
||||
} else if (! (other instanceof ATNConfig)) {
|
||||
return false;
|
||||
} else {
|
||||
return this.state.stateNumber===other.state.stateNumber &&
|
||||
this.alt===other.alt &&
|
||||
this.semanticContext.equals(other.semanticContext);
|
||||
}
|
||||
}
|
||||
|
||||
toString() {
|
||||
return "(" + this.state + "," + this.alt +
|
||||
(this.context!==null ? ",[" + this.context.toString() + "]" : "") +
|
||||
(this.semanticContext !== SemanticContext.NONE ?
|
||||
("," + this.semanticContext.toString())
|
||||
: "") +
|
||||
(this.reachesIntoOuterContext>0 ?
|
||||
(",up=" + this.reachesIntoOuterContext)
|
||||
: "") + ")";
|
||||
}
|
||||
}
|
||||
|
||||
ATNConfig.prototype.checkContext = function(params, config) {
|
||||
if((params.context===null || params.context===undefined) &&
|
||||
(config===null || config.context===null || config.context===undefined)) {
|
||||
this.context = null;
|
||||
}
|
||||
};
|
||||
|
||||
class LexerATNConfig extends ATNConfig {
|
||||
constructor(params, config) {
|
||||
super(params, config);
|
||||
|
||||
ATNConfig.prototype.hashCode = function() {
|
||||
var hash = new Hash();
|
||||
this.updateHashCode(hash);
|
||||
return hash.finish();
|
||||
};
|
||||
|
||||
|
||||
ATNConfig.prototype.updateHashCode = function(hash) {
|
||||
hash.update(this.state.stateNumber, this.alt, this.context, this.semanticContext);
|
||||
};
|
||||
|
||||
// An ATN configuration is equal to another if both have
|
||||
// the same state, they predict the same alternative, and
|
||||
// syntactic/semantic contexts are the same.
|
||||
|
||||
ATNConfig.prototype.equals = function(other) {
|
||||
if (this === other) {
|
||||
return true;
|
||||
} else if (! (other instanceof ATNConfig)) {
|
||||
return false;
|
||||
} else {
|
||||
return this.state.stateNumber===other.state.stateNumber &&
|
||||
this.alt===other.alt &&
|
||||
(this.context===null ? other.context===null : this.context.equals(other.context)) &&
|
||||
this.semanticContext.equals(other.semanticContext) &&
|
||||
this.precedenceFilterSuppressed===other.precedenceFilterSuppressed;
|
||||
// This is the backing field for {@link //getLexerActionExecutor}.
|
||||
const lexerActionExecutor = params.lexerActionExecutor || null;
|
||||
this.lexerActionExecutor = lexerActionExecutor || (config!==null ? config.lexerActionExecutor : null);
|
||||
this.passedThroughNonGreedyDecision = config!==null ? this.checkNonGreedyDecision(config, this.state) : false;
|
||||
this.hashCodeForConfigSet = LexerATNConfig.prototype.hashCode;
|
||||
this.equalsForConfigSet = LexerATNConfig.prototype.equals;
|
||||
return this;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
ATNConfig.prototype.hashCodeForConfigSet = function() {
|
||||
var hash = new Hash();
|
||||
hash.update(this.state.stateNumber, this.alt, this.semanticContext);
|
||||
return hash.finish();
|
||||
};
|
||||
|
||||
|
||||
ATNConfig.prototype.equalsForConfigSet = function(other) {
|
||||
if (this === other) {
|
||||
return true;
|
||||
} else if (! (other instanceof ATNConfig)) {
|
||||
return false;
|
||||
} else {
|
||||
return this.state.stateNumber===other.state.stateNumber &&
|
||||
this.alt===other.alt &&
|
||||
this.semanticContext.equals(other.semanticContext);
|
||||
updateHashCode(hash) {
|
||||
hash.update(this.state.stateNumber, this.alt, this.context, this.semanticContext, this.passedThroughNonGreedyDecision, this.lexerActionExecutor);
|
||||
}
|
||||
};
|
||||
|
||||
equals(other) {
|
||||
return this === other ||
|
||||
(other instanceof LexerATNConfig &&
|
||||
this.passedThroughNonGreedyDecision == other.passedThroughNonGreedyDecision &&
|
||||
(this.lexerActionExecutor ? this.lexerActionExecutor.equals(other.lexerActionExecutor) : !other.lexerActionExecutor) &&
|
||||
super.equals(other));
|
||||
}
|
||||
|
||||
ATNConfig.prototype.toString = function() {
|
||||
return "(" + this.state + "," + this.alt +
|
||||
(this.context!==null ? ",[" + this.context.toString() + "]" : "") +
|
||||
(this.semanticContext !== SemanticContext.NONE ?
|
||||
("," + this.semanticContext.toString())
|
||||
: "") +
|
||||
(this.reachesIntoOuterContext>0 ?
|
||||
(",up=" + this.reachesIntoOuterContext)
|
||||
: "") + ")";
|
||||
};
|
||||
|
||||
|
||||
function LexerATNConfig(params, config) {
|
||||
ATNConfig.call(this, params, config);
|
||||
|
||||
// This is the backing field for {@link //getLexerActionExecutor}.
|
||||
var lexerActionExecutor = params.lexerActionExecutor || null;
|
||||
this.lexerActionExecutor = lexerActionExecutor || (config!==null ? config.lexerActionExecutor : null);
|
||||
this.passedThroughNonGreedyDecision = config!==null ? this.checkNonGreedyDecision(config, this.state) : false;
|
||||
return this;
|
||||
checkNonGreedyDecision(source, target) {
|
||||
return source.passedThroughNonGreedyDecision ||
|
||||
(target instanceof DecisionState) && target.nonGreedy;
|
||||
}
|
||||
}
|
||||
|
||||
LexerATNConfig.prototype = Object.create(ATNConfig.prototype);
|
||||
LexerATNConfig.prototype.constructor = LexerATNConfig;
|
||||
|
||||
LexerATNConfig.prototype.updateHashCode = function(hash) {
|
||||
hash.update(this.state.stateNumber, this.alt, this.context, this.semanticContext, this.passedThroughNonGreedyDecision, this.lexerActionExecutor);
|
||||
};
|
||||
|
||||
LexerATNConfig.prototype.equals = function(other) {
|
||||
return this === other ||
|
||||
(other instanceof LexerATNConfig &&
|
||||
this.passedThroughNonGreedyDecision == other.passedThroughNonGreedyDecision &&
|
||||
(this.lexerActionExecutor ? this.lexerActionExecutor.equals(other.lexerActionExecutor) : !other.lexerActionExecutor) &&
|
||||
ATNConfig.prototype.equals.call(this, other));
|
||||
};
|
||||
|
||||
LexerATNConfig.prototype.hashCodeForConfigSet = LexerATNConfig.prototype.hashCode;
|
||||
|
||||
LexerATNConfig.prototype.equalsForConfigSet = LexerATNConfig.prototype.equals;
|
||||
|
||||
|
||||
LexerATNConfig.prototype.checkNonGreedyDecision = function(source, target) {
|
||||
return source.passedThroughNonGreedyDecision ||
|
||||
(target instanceof DecisionState) && target.nonGreedy;
|
||||
};
|
||||
|
||||
exports.ATNConfig = ATNConfig;
|
||||
exports.LexerATNConfig = LexerATNConfig;
|
||||
module.exports.ATNConfig = ATNConfig;
|
||||
module.exports.LexerATNConfig = LexerATNConfig;
|
||||
|
|
|
@ -1,21 +1,12 @@
|
|||
//
|
||||
/* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.
|
||||
* Use of this file is governed by the BSD 3-clause license that
|
||||
* can be found in the LICENSE.txt file in the project root.
|
||||
*/
|
||||
|
||||
//
|
||||
// Specialized {@link Set}{@code <}{@link ATNConfig}{@code >} that can track
|
||||
// info about the set, with support for combining similar configurations using a
|
||||
// graph-structured stack.
|
||||
///
|
||||
|
||||
var ATN = require('./ATN');
|
||||
var Utils = require('./../Utils');
|
||||
var Hash = Utils.Hash;
|
||||
var Set = Utils.Set;
|
||||
var SemanticContext = require('./SemanticContext').SemanticContext;
|
||||
var merge = require('./../PredictionContext').merge;
|
||||
const ATN = require('./ATN');
|
||||
const Utils = require('./../Utils');
|
||||
const {SemanticContext} = require('./SemanticContext');
|
||||
const {merge} = require('./../PredictionContext');
|
||||
|
||||
function hashATNConfig(c) {
|
||||
return c.hashCodeForConfigSet();
|
||||
|
@ -30,224 +21,233 @@ function equalATNConfigs(a, b) {
|
|||
return a.equalsForConfigSet(b);
|
||||
}
|
||||
|
||||
/**
|
||||
* Specialized {@link Set}{@code <}{@link ATNConfig}{@code >} that can track
|
||||
* info about the set, with support for combining similar configurations using a
|
||||
* graph-structured stack
|
||||
*/
|
||||
class ATNConfigSet {
|
||||
constructor(fullCtx) {
|
||||
/**
|
||||
* The reason that we need this is because we don't want the hash map to use
|
||||
* the standard hash code and equals. We need all configurations with the
|
||||
* same
|
||||
* {@code (s,i,_,semctx)} to be equal. Unfortunately, this key effectively
|
||||
* doubles
|
||||
* the number of objects associated with ATNConfigs. The other solution is
|
||||
* to
|
||||
* use a hash table that lets us specify the equals/hashcode operation.
|
||||
* All configs but hashed by (s, i, _, pi) not including context. Wiped out
|
||||
* when we go readonly as this set becomes a DFA state
|
||||
*/
|
||||
this.configLookup = new Utils.Set(hashATNConfig, equalATNConfigs);
|
||||
/**
|
||||
* Indicates that this configuration set is part of a full context
|
||||
* LL prediction. It will be used to determine how to merge $. With SLL
|
||||
* it's a wildcard whereas it is not for LL context merge
|
||||
*/
|
||||
this.fullCtx = fullCtx === undefined ? true : fullCtx;
|
||||
/**
|
||||
* Indicates that the set of configurations is read-only. Do not
|
||||
* allow any code to manipulate the set; DFA states will point at
|
||||
* the sets and they must not change. This does not protect the other
|
||||
* fields; in particular, conflictingAlts is set after
|
||||
* we've made this readonly
|
||||
*/
|
||||
this.readOnly = false;
|
||||
// Track the elements as they are added to the set; supports get(i)///
|
||||
this.configs = [];
|
||||
|
||||
function ATNConfigSet(fullCtx) {
|
||||
//
|
||||
// The reason that we need this is because we don't want the hash map to use
|
||||
// the standard hash code and equals. We need all configurations with the
|
||||
// same
|
||||
// {@code (s,i,_,semctx)} to be equal. Unfortunately, this key effectively
|
||||
// doubles
|
||||
// the number of objects associated with ATNConfigs. The other solution is
|
||||
// to
|
||||
// use a hash table that lets us specify the equals/hashcode operation.
|
||||
// All configs but hashed by (s, i, _, pi) not including context. Wiped out
|
||||
// when we go readonly as this set becomes a DFA state.
|
||||
this.configLookup = new Set(hashATNConfig, equalATNConfigs);
|
||||
// Indicates that this configuration set is part of a full context
|
||||
// LL prediction. It will be used to determine how to merge $. With SLL
|
||||
// it's a wildcard whereas it is not for LL context merge.
|
||||
this.fullCtx = fullCtx === undefined ? true : fullCtx;
|
||||
// Indicates that the set of configurations is read-only. Do not
|
||||
// allow any code to manipulate the set; DFA states will point at
|
||||
// the sets and they must not change. This does not protect the other
|
||||
// fields; in particular, conflictingAlts is set after
|
||||
// we've made this readonly.
|
||||
this.readOnly = false;
|
||||
// Track the elements as they are added to the set; supports get(i)///
|
||||
this.configs = [];
|
||||
// TODO: these fields make me pretty uncomfortable but nice to pack up info
|
||||
// together, saves recomputation
|
||||
// TODO: can we track conflicts as they are added to save scanning configs
|
||||
// later?
|
||||
this.uniqueAlt = 0;
|
||||
this.conflictingAlts = null;
|
||||
|
||||
// TODO: these fields make me pretty uncomfortable but nice to pack up info
|
||||
// together, saves recomputation
|
||||
// TODO: can we track conflicts as they are added to save scanning configs
|
||||
// later?
|
||||
this.uniqueAlt = 0;
|
||||
this.conflictingAlts = null;
|
||||
/**
|
||||
* Used in parser and lexer. In lexer, it indicates we hit a pred
|
||||
* while computing a closure operation. Don't make a DFA state from this
|
||||
*/
|
||||
this.hasSemanticContext = false;
|
||||
this.dipsIntoOuterContext = false;
|
||||
|
||||
// Used in parser and lexer. In lexer, it indicates we hit a pred
|
||||
// while computing a closure operation. Don't make a DFA state from this.
|
||||
this.hasSemanticContext = false;
|
||||
this.dipsIntoOuterContext = false;
|
||||
|
||||
this.cachedHashCode = -1;
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
// Adding a new config means merging contexts with existing configs for
|
||||
// {@code (s, i, pi, _)}, where {@code s} is the
|
||||
// {@link ATNConfig//state}, {@code i} is the {@link ATNConfig//alt}, and
|
||||
// {@code pi} is the {@link ATNConfig//semanticContext}. We use
|
||||
// {@code (s,i,pi)} as key.
|
||||
//
|
||||
// <p>This method updates {@link //dipsIntoOuterContext} and
|
||||
// {@link //hasSemanticContext} when necessary.</p>
|
||||
// /
|
||||
ATNConfigSet.prototype.add = function(config, mergeCache) {
|
||||
if (mergeCache === undefined) {
|
||||
mergeCache = null;
|
||||
}
|
||||
if (this.readOnly) {
|
||||
throw "This set is readonly";
|
||||
}
|
||||
if (config.semanticContext !== SemanticContext.NONE) {
|
||||
this.hasSemanticContext = true;
|
||||
}
|
||||
if (config.reachesIntoOuterContext > 0) {
|
||||
this.dipsIntoOuterContext = true;
|
||||
}
|
||||
var existing = this.configLookup.add(config);
|
||||
if (existing === config) {
|
||||
this.cachedHashCode = -1;
|
||||
this.configs.push(config); // track order here
|
||||
}
|
||||
|
||||
/**
|
||||
* Adding a new config means merging contexts with existing configs for
|
||||
* {@code (s, i, pi, _)}, where {@code s} is the
|
||||
* {@link ATNConfig//state}, {@code i} is the {@link ATNConfig//alt}, and
|
||||
* {@code pi} is the {@link ATNConfig//semanticContext}. We use
|
||||
* {@code (s,i,pi)} as key.
|
||||
*
|
||||
* <p>This method updates {@link //dipsIntoOuterContext} and
|
||||
* {@link //hasSemanticContext} when necessary.</p>
|
||||
*/
|
||||
add(config, mergeCache) {
|
||||
if (mergeCache === undefined) {
|
||||
mergeCache = null;
|
||||
}
|
||||
if (this.readOnly) {
|
||||
throw "This set is readonly";
|
||||
}
|
||||
if (config.semanticContext !== SemanticContext.NONE) {
|
||||
this.hasSemanticContext = true;
|
||||
}
|
||||
if (config.reachesIntoOuterContext > 0) {
|
||||
this.dipsIntoOuterContext = true;
|
||||
}
|
||||
const existing = this.configLookup.add(config);
|
||||
if (existing === config) {
|
||||
this.cachedHashCode = -1;
|
||||
this.configs.push(config); // track order here
|
||||
return true;
|
||||
}
|
||||
// a previous (s,i,pi,_), merge with it and save result
|
||||
const rootIsWildcard = !this.fullCtx;
|
||||
const merged = merge(existing.context, config.context, rootIsWildcard, mergeCache);
|
||||
/**
|
||||
* no need to check for existing.context, config.context in cache
|
||||
* since only way to create new graphs is "call rule" and here. We
|
||||
* cache at both places
|
||||
*/
|
||||
existing.reachesIntoOuterContext = Math.max( existing.reachesIntoOuterContext, config.reachesIntoOuterContext);
|
||||
// make sure to preserve the precedence filter suppression during the merge
|
||||
if (config.precedenceFilterSuppressed) {
|
||||
existing.precedenceFilterSuppressed = true;
|
||||
}
|
||||
existing.context = merged; // replace context; no need to alt mapping
|
||||
return true;
|
||||
}
|
||||
// a previous (s,i,pi,_), merge with it and save result
|
||||
var rootIsWildcard = !this.fullCtx;
|
||||
var merged = merge(existing.context, config.context, rootIsWildcard, mergeCache);
|
||||
// no need to check for existing.context, config.context in cache
|
||||
// since only way to create new graphs is "call rule" and here. We
|
||||
// cache at both places.
|
||||
existing.reachesIntoOuterContext = Math.max( existing.reachesIntoOuterContext, config.reachesIntoOuterContext);
|
||||
// make sure to preserve the precedence filter suppression during the merge
|
||||
if (config.precedenceFilterSuppressed) {
|
||||
existing.precedenceFilterSuppressed = true;
|
||||
}
|
||||
existing.context = merged; // replace context; no need to alt mapping
|
||||
return true;
|
||||
};
|
||||
|
||||
ATNConfigSet.prototype.getStates = function() {
|
||||
var states = new Set();
|
||||
for (var i = 0; i < this.configs.length; i++) {
|
||||
states.add(this.configs[i].state);
|
||||
getStates() {
|
||||
const states = new Utils.Set();
|
||||
for (let i = 0; i < this.configs.length; i++) {
|
||||
states.add(this.configs[i].state);
|
||||
}
|
||||
return states;
|
||||
}
|
||||
return states;
|
||||
};
|
||||
|
||||
ATNConfigSet.prototype.getPredicates = function() {
|
||||
var preds = [];
|
||||
for (var i = 0; i < this.configs.length; i++) {
|
||||
var c = this.configs[i].semanticContext;
|
||||
if (c !== SemanticContext.NONE) {
|
||||
preds.push(c.semanticContext);
|
||||
getPredicates() {
|
||||
const preds = [];
|
||||
for (let i = 0; i < this.configs.length; i++) {
|
||||
const c = this.configs[i].semanticContext;
|
||||
if (c !== SemanticContext.NONE) {
|
||||
preds.push(c.semanticContext);
|
||||
}
|
||||
}
|
||||
return preds;
|
||||
}
|
||||
|
||||
optimizeConfigs(interpreter) {
|
||||
if (this.readOnly) {
|
||||
throw "This set is readonly";
|
||||
}
|
||||
if (this.configLookup.length === 0) {
|
||||
return;
|
||||
}
|
||||
for (let i = 0; i < this.configs.length; i++) {
|
||||
const config = this.configs[i];
|
||||
config.context = interpreter.getCachedContext(config.context);
|
||||
}
|
||||
}
|
||||
return preds;
|
||||
};
|
||||
|
||||
Object.defineProperty(ATNConfigSet.prototype, "items", {
|
||||
get : function() {
|
||||
addAll(coll) {
|
||||
for (let i = 0; i < coll.length; i++) {
|
||||
this.add(coll[i]);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
equals(other) {
|
||||
return this === other ||
|
||||
(other instanceof ATNConfigSet &&
|
||||
Utils.equalArrays(this.configs, other.configs) &&
|
||||
this.fullCtx === other.fullCtx &&
|
||||
this.uniqueAlt === other.uniqueAlt &&
|
||||
this.conflictingAlts === other.conflictingAlts &&
|
||||
this.hasSemanticContext === other.hasSemanticContext &&
|
||||
this.dipsIntoOuterContext === other.dipsIntoOuterContext);
|
||||
}
|
||||
|
||||
hashCode() {
|
||||
const hash = new Utils.Hash();
|
||||
hash.update(this.configs);
|
||||
return hash.finish();
|
||||
}
|
||||
|
||||
updateHashCode(hash) {
|
||||
if (this.readOnly) {
|
||||
if (this.cachedHashCode === -1) {
|
||||
this.cachedHashCode = this.hashCode();
|
||||
}
|
||||
hash.update(this.cachedHashCode);
|
||||
} else {
|
||||
hash.update(this.hashCode());
|
||||
}
|
||||
}
|
||||
|
||||
isEmpty() {
|
||||
return this.configs.length === 0;
|
||||
}
|
||||
|
||||
contains(item) {
|
||||
if (this.configLookup === null) {
|
||||
throw "This method is not implemented for readonly sets.";
|
||||
}
|
||||
return this.configLookup.contains(item);
|
||||
}
|
||||
|
||||
containsFast(item) {
|
||||
if (this.configLookup === null) {
|
||||
throw "This method is not implemented for readonly sets.";
|
||||
}
|
||||
return this.configLookup.containsFast(item);
|
||||
}
|
||||
|
||||
clear() {
|
||||
if (this.readOnly) {
|
||||
throw "This set is readonly";
|
||||
}
|
||||
this.configs = [];
|
||||
this.cachedHashCode = -1;
|
||||
this.configLookup = new Utils.Set();
|
||||
}
|
||||
|
||||
setReadonly(readOnly) {
|
||||
this.readOnly = readOnly;
|
||||
if (readOnly) {
|
||||
this.configLookup = null; // can't mod, no need for lookup cache
|
||||
}
|
||||
}
|
||||
|
||||
toString() {
|
||||
return Utils.arrayToString(this.configs) +
|
||||
(this.hasSemanticContext ? ",hasSemanticContext=" + this.hasSemanticContext : "") +
|
||||
(this.uniqueAlt !== ATN.INVALID_ALT_NUMBER ? ",uniqueAlt=" + this.uniqueAlt : "") +
|
||||
(this.conflictingAlts !== null ? ",conflictingAlts=" + this.conflictingAlts : "") +
|
||||
(this.dipsIntoOuterContext ? ",dipsIntoOuterContext" : "");
|
||||
}
|
||||
|
||||
get items(){
|
||||
return this.configs;
|
||||
}
|
||||
});
|
||||
|
||||
ATNConfigSet.prototype.optimizeConfigs = function(interpreter) {
|
||||
if (this.readOnly) {
|
||||
throw "This set is readonly";
|
||||
}
|
||||
if (this.configLookup.length === 0) {
|
||||
return;
|
||||
}
|
||||
for (var i = 0; i < this.configs.length; i++) {
|
||||
var config = this.configs[i];
|
||||
config.context = interpreter.getCachedContext(config.context);
|
||||
}
|
||||
};
|
||||
|
||||
ATNConfigSet.prototype.addAll = function(coll) {
|
||||
for (var i = 0; i < coll.length; i++) {
|
||||
this.add(coll[i]);
|
||||
}
|
||||
return false;
|
||||
};
|
||||
|
||||
ATNConfigSet.prototype.equals = function(other) {
|
||||
return this === other ||
|
||||
(other instanceof ATNConfigSet &&
|
||||
Utils.equalArrays(this.configs, other.configs) &&
|
||||
this.fullCtx === other.fullCtx &&
|
||||
this.uniqueAlt === other.uniqueAlt &&
|
||||
this.conflictingAlts === other.conflictingAlts &&
|
||||
this.hasSemanticContext === other.hasSemanticContext &&
|
||||
this.dipsIntoOuterContext === other.dipsIntoOuterContext);
|
||||
};
|
||||
|
||||
ATNConfigSet.prototype.hashCode = function() {
|
||||
var hash = new Hash();
|
||||
hash.update(this.configs);
|
||||
return hash.finish();
|
||||
};
|
||||
|
||||
|
||||
ATNConfigSet.prototype.updateHashCode = function(hash) {
|
||||
if (this.readOnly) {
|
||||
if (this.cachedHashCode === -1) {
|
||||
this.cachedHashCode = this.hashCode();
|
||||
}
|
||||
hash.update(this.cachedHashCode);
|
||||
} else {
|
||||
hash.update(this.hashCode());
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
Object.defineProperty(ATNConfigSet.prototype, "length", {
|
||||
get : function() {
|
||||
get length(){
|
||||
return this.configs.length;
|
||||
}
|
||||
});
|
||||
|
||||
ATNConfigSet.prototype.isEmpty = function() {
|
||||
return this.configs.length === 0;
|
||||
};
|
||||
|
||||
ATNConfigSet.prototype.contains = function(item) {
|
||||
if (this.configLookup === null) {
|
||||
throw "This method is not implemented for readonly sets.";
|
||||
}
|
||||
return this.configLookup.contains(item);
|
||||
};
|
||||
|
||||
ATNConfigSet.prototype.containsFast = function(item) {
|
||||
if (this.configLookup === null) {
|
||||
throw "This method is not implemented for readonly sets.";
|
||||
}
|
||||
return this.configLookup.containsFast(item);
|
||||
};
|
||||
|
||||
ATNConfigSet.prototype.clear = function() {
|
||||
if (this.readOnly) {
|
||||
throw "This set is readonly";
|
||||
}
|
||||
this.configs = [];
|
||||
this.cachedHashCode = -1;
|
||||
this.configLookup = new Set();
|
||||
};
|
||||
|
||||
ATNConfigSet.prototype.setReadonly = function(readOnly) {
|
||||
this.readOnly = readOnly;
|
||||
if (readOnly) {
|
||||
this.configLookup = null; // can't mod, no need for lookup cache
|
||||
}
|
||||
};
|
||||
|
||||
ATNConfigSet.prototype.toString = function() {
|
||||
return Utils.arrayToString(this.configs) +
|
||||
(this.hasSemanticContext ? ",hasSemanticContext=" + this.hasSemanticContext : "") +
|
||||
(this.uniqueAlt !== ATN.INVALID_ALT_NUMBER ? ",uniqueAlt=" + this.uniqueAlt : "") +
|
||||
(this.conflictingAlts !== null ? ",conflictingAlts=" + this.conflictingAlts : "") +
|
||||
(this.dipsIntoOuterContext ? ",dipsIntoOuterContext" : "");
|
||||
};
|
||||
|
||||
function OrderedATNConfigSet() {
|
||||
ATNConfigSet.call(this);
|
||||
this.configLookup = new Set();
|
||||
return this;
|
||||
}
|
||||
|
||||
OrderedATNConfigSet.prototype = Object.create(ATNConfigSet.prototype);
|
||||
OrderedATNConfigSet.prototype.constructor = OrderedATNConfigSet;
|
||||
|
||||
exports.ATNConfigSet = ATNConfigSet;
|
||||
exports.OrderedATNConfigSet = OrderedATNConfigSet;
|
||||
class OrderedATNConfigSet extends ATNConfigSet {
|
||||
constructor() {
|
||||
super();
|
||||
this.configLookup = new Utils.Set();
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
ATNConfigSet,
|
||||
OrderedATNConfigSet
|
||||
}
|
||||
|
|
|
@ -3,15 +3,15 @@
|
|||
* can be found in the LICENSE.txt file in the project root.
|
||||
*/
|
||||
|
||||
function ATNDeserializationOptions(copyFrom) {
|
||||
if(copyFrom===undefined) {
|
||||
copyFrom = null;
|
||||
class ATNDeserializationOptions {
|
||||
constructor(copyFrom) {
|
||||
if(copyFrom===undefined) {
|
||||
copyFrom = null;
|
||||
}
|
||||
this.readOnly = false;
|
||||
this.verifyATN = copyFrom===null ? true : copyFrom.verifyATN;
|
||||
this.generateRuleBypassTransitions = copyFrom===null ? false : copyFrom.generateRuleBypassTransitions;
|
||||
}
|
||||
this.readOnly = false;
|
||||
this.verifyATN = copyFrom===null ? true : copyFrom.verifyATN;
|
||||
this.generateRuleBypassTransitions = copyFrom===null ? false : copyFrom.generateRuleBypassTransitions;
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
ATNDeserializationOptions.defaultOptions = new ATNDeserializationOptions();
|
||||
|
@ -22,4 +22,4 @@ ATNDeserializationOptions.defaultOptions.readOnly = true;
|
|||
// raise Exception("The object is read only.")
|
||||
// super(type(self), self).__setattr__(key,value)
|
||||
|
||||
exports.ATNDeserializationOptions = ATNDeserializationOptions;
|
||||
module.exports = ATNDeserializationOptions
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -1,52 +1,52 @@
|
|||
//
|
||||
/* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.
|
||||
* Use of this file is governed by the BSD 3-clause license that
|
||||
* can be found in the LICENSE.txt file in the project root.
|
||||
*/
|
||||
///
|
||||
|
||||
var DFAState = require('./../dfa/DFAState').DFAState;
|
||||
var ATNConfigSet = require('./ATNConfigSet').ATNConfigSet;
|
||||
var getCachedPredictionContext = require('./../PredictionContext').getCachedPredictionContext;
|
||||
var Map = require('./../Utils').Map;
|
||||
const {DFAState} = require('./../dfa/DFAState');
|
||||
const {ATNConfigSet} = require('./ATNConfigSet');
|
||||
const {getCachedPredictionContext} = require('./../PredictionContext');
|
||||
const {Map} = require('./../Utils');
|
||||
|
||||
function ATNSimulator(atn, sharedContextCache) {
|
||||
class ATNSimulator {
|
||||
constructor(atn, sharedContextCache) {
|
||||
/**
|
||||
* The context cache maps all PredictionContext objects that are ==
|
||||
* to a single cached copy. This cache is shared across all contexts
|
||||
* in all ATNConfigs in all DFA states. We rebuild each ATNConfigSet
|
||||
* to use only cached nodes/graphs in addDFAState(). We don't want to
|
||||
* fill this during closure() since there are lots of contexts that
|
||||
* pop up but are not used ever again. It also greatly slows down closure().
|
||||
*
|
||||
* <p>This cache makes a huge difference in memory and a little bit in speed.
|
||||
* For the Java grammar on java.*, it dropped the memory requirements
|
||||
* at the end from 25M to 16M. We don't store any of the full context
|
||||
* graphs in the DFA because they are limited to local context only,
|
||||
* but apparently there's a lot of repetition there as well. We optimize
|
||||
* the config contexts before storing the config set in the DFA states
|
||||
* by literally rebuilding them with cached subgraphs only.</p>
|
||||
*
|
||||
* <p>I tried a cache for use during closure operations, that was
|
||||
* whacked after each adaptivePredict(). It cost a little bit
|
||||
* more time I think and doesn't save on the overall footprint
|
||||
* so it's not worth the complexity.</p>
|
||||
*/
|
||||
this.atn = atn;
|
||||
this.sharedContextCache = sharedContextCache;
|
||||
return this;
|
||||
}
|
||||
|
||||
// The context cache maps all PredictionContext objects that are ==
|
||||
// to a single cached copy. This cache is shared across all contexts
|
||||
// in all ATNConfigs in all DFA states. We rebuild each ATNConfigSet
|
||||
// to use only cached nodes/graphs in addDFAState(). We don't want to
|
||||
// fill this during closure() since there are lots of contexts that
|
||||
// pop up but are not used ever again. It also greatly slows down closure().
|
||||
//
|
||||
// <p>This cache makes a huge difference in memory and a little bit in speed.
|
||||
// For the Java grammar on java.*, it dropped the memory requirements
|
||||
// at the end from 25M to 16M. We don't store any of the full context
|
||||
// graphs in the DFA because they are limited to local context only,
|
||||
// but apparently there's a lot of repetition there as well. We optimize
|
||||
// the config contexts before storing the config set in the DFA states
|
||||
// by literally rebuilding them with cached subgraphs only.</p>
|
||||
//
|
||||
// <p>I tried a cache for use during closure operations, that was
|
||||
// whacked after each adaptivePredict(). It cost a little bit
|
||||
// more time I think and doesn't save on the overall footprint
|
||||
// so it's not worth the complexity.</p>
|
||||
///
|
||||
this.atn = atn;
|
||||
this.sharedContextCache = sharedContextCache;
|
||||
return this;
|
||||
getCachedContext(context) {
|
||||
if (this.sharedContextCache ===null) {
|
||||
return context;
|
||||
}
|
||||
const visited = new Map();
|
||||
return getCachedPredictionContext(context, this.sharedContextCache, visited);
|
||||
}
|
||||
}
|
||||
|
||||
// Must distinguish between missing edge and edge we know leads nowhere///
|
||||
ATNSimulator.ERROR = new DFAState(0x7FFFFFFF, new ATNConfigSet());
|
||||
|
||||
|
||||
ATNSimulator.prototype.getCachedContext = function(context) {
|
||||
if (this.sharedContextCache ===null) {
|
||||
return context;
|
||||
}
|
||||
var visited = new Map();
|
||||
return getCachedPredictionContext(context, this.sharedContextCache, visited);
|
||||
};
|
||||
|
||||
exports.ATNSimulator = ATNSimulator;
|
||||
module.exports = ATNSimulator;
|
||||
|
|
|
@ -1,84 +1,115 @@
|
|||
//
|
||||
/* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.
|
||||
* Use of this file is governed by the BSD 3-clause license that
|
||||
* can be found in the LICENSE.txt file in the project root.
|
||||
*/
|
||||
//
|
||||
|
||||
// The following images show the relation of states and
|
||||
// {@link ATNState//transitions} for various grammar constructs.
|
||||
//
|
||||
// <ul>
|
||||
//
|
||||
// <li>Solid edges marked with an &//0949; indicate a required
|
||||
// {@link EpsilonTransition}.</li>
|
||||
//
|
||||
// <li>Dashed edges indicate locations where any transition derived from
|
||||
// {@link Transition} might appear.</li>
|
||||
//
|
||||
// <li>Dashed nodes are place holders for either a sequence of linked
|
||||
// {@link BasicState} states or the inclusion of a block representing a nested
|
||||
// construct in one of the forms below.</li>
|
||||
//
|
||||
// <li>Nodes showing multiple outgoing alternatives with a {@code ...} support
|
||||
// any number of alternatives (one or more). Nodes without the {@code ...} only
|
||||
// support the exact number of alternatives shown in the diagram.</li>
|
||||
//
|
||||
// </ul>
|
||||
//
|
||||
// <h2>Basic Blocks</h2>
|
||||
//
|
||||
// <h3>Rule</h3>
|
||||
//
|
||||
// <embed src="images/Rule.svg" type="image/svg+xml"/>
|
||||
//
|
||||
// <h3>Block of 1 or more alternatives</h3>
|
||||
//
|
||||
// <embed src="images/Block.svg" type="image/svg+xml"/>
|
||||
//
|
||||
// <h2>Greedy Loops</h2>
|
||||
//
|
||||
// <h3>Greedy Closure: {@code (...)*}</h3>
|
||||
//
|
||||
// <embed src="images/ClosureGreedy.svg" type="image/svg+xml"/>
|
||||
//
|
||||
// <h3>Greedy Positive Closure: {@code (...)+}</h3>
|
||||
//
|
||||
// <embed src="images/PositiveClosureGreedy.svg" type="image/svg+xml"/>
|
||||
//
|
||||
// <h3>Greedy Optional: {@code (...)?}</h3>
|
||||
//
|
||||
// <embed src="images/OptionalGreedy.svg" type="image/svg+xml"/>
|
||||
//
|
||||
// <h2>Non-Greedy Loops</h2>
|
||||
//
|
||||
// <h3>Non-Greedy Closure: {@code (...)*?}</h3>
|
||||
//
|
||||
// <embed src="images/ClosureNonGreedy.svg" type="image/svg+xml"/>
|
||||
//
|
||||
// <h3>Non-Greedy Positive Closure: {@code (...)+?}</h3>
|
||||
//
|
||||
// <embed src="images/PositiveClosureNonGreedy.svg" type="image/svg+xml"/>
|
||||
//
|
||||
// <h3>Non-Greedy Optional: {@code (...)??}</h3>
|
||||
//
|
||||
// <embed src="images/OptionalNonGreedy.svg" type="image/svg+xml"/>
|
||||
//
|
||||
const INITIAL_NUM_TRANSITIONS = 4;
|
||||
|
||||
var INITIAL_NUM_TRANSITIONS = 4;
|
||||
/**
|
||||
* The following images show the relation of states and
|
||||
* {@link ATNState//transitions} for various grammar constructs.
|
||||
*
|
||||
* <ul>
|
||||
*
|
||||
* <li>Solid edges marked with an &//0949; indicate a required
|
||||
* {@link EpsilonTransition}.</li>
|
||||
*
|
||||
* <li>Dashed edges indicate locations where any transition derived from
|
||||
* {@link Transition} might appear.</li>
|
||||
*
|
||||
* <li>Dashed nodes are place holders for either a sequence of linked
|
||||
* {@link BasicState} states or the inclusion of a block representing a nested
|
||||
* construct in one of the forms below.</li>
|
||||
*
|
||||
* <li>Nodes showing multiple outgoing alternatives with a {@code ...} support
|
||||
* any number of alternatives (one or more). Nodes without the {@code ...} only
|
||||
* support the exact number of alternatives shown in the diagram.</li>
|
||||
*
|
||||
* </ul>
|
||||
*
|
||||
* <h2>Basic Blocks</h2>
|
||||
*
|
||||
* <h3>Rule</h3>
|
||||
*
|
||||
* <embed src="images/Rule.svg" type="image/svg+xml"/>
|
||||
*
|
||||
* <h3>Block of 1 or more alternatives</h3>
|
||||
*
|
||||
* <embed src="images/Block.svg" type="image/svg+xml"/>
|
||||
*
|
||||
* <h2>Greedy Loops</h2>
|
||||
*
|
||||
* <h3>Greedy Closure: {@code (...)*}</h3>
|
||||
*
|
||||
* <embed src="images/ClosureGreedy.svg" type="image/svg+xml"/>
|
||||
*
|
||||
* <h3>Greedy Positive Closure: {@code (...)+}</h3>
|
||||
*
|
||||
* <embed src="images/PositiveClosureGreedy.svg" type="image/svg+xml"/>
|
||||
*
|
||||
* <h3>Greedy Optional: {@code (...)?}</h3>
|
||||
*
|
||||
* <embed src="images/OptionalGreedy.svg" type="image/svg+xml"/>
|
||||
*
|
||||
* <h2>Non-Greedy Loops</h2>
|
||||
*
|
||||
* <h3>Non-Greedy Closure: {@code (...)*?}</h3>
|
||||
*
|
||||
* <embed src="images/ClosureNonGreedy.svg" type="image/svg+xml"/>
|
||||
*
|
||||
* <h3>Non-Greedy Positive Closure: {@code (...)+?}</h3>
|
||||
*
|
||||
* <embed src="images/PositiveClosureNonGreedy.svg" type="image/svg+xml"/>
|
||||
*
|
||||
* <h3>Non-Greedy Optional: {@code (...)??}</h3>
|
||||
*
|
||||
* <embed src="images/OptionalNonGreedy.svg" type="image/svg+xml"/>
|
||||
*/
|
||||
class ATNState {
|
||||
constructor() {
|
||||
// Which ATN are we in?
|
||||
this.atn = null;
|
||||
this.stateNumber = ATNState.INVALID_STATE_NUMBER;
|
||||
this.stateType = null;
|
||||
this.ruleIndex = 0; // at runtime, we don't have Rule objects
|
||||
this.epsilonOnlyTransitions = false;
|
||||
// Track the transitions emanating from this ATN state.
|
||||
this.transitions = [];
|
||||
// Used to cache lookahead during parsing, not used during construction
|
||||
this.nextTokenWithinRule = null;
|
||||
}
|
||||
|
||||
function ATNState() {
|
||||
// Which ATN are we in?
|
||||
this.atn = null;
|
||||
this.stateNumber = ATNState.INVALID_STATE_NUMBER;
|
||||
this.stateType = null;
|
||||
this.ruleIndex = 0; // at runtime, we don't have Rule objects
|
||||
this.epsilonOnlyTransitions = false;
|
||||
// Track the transitions emanating from this ATN state.
|
||||
this.transitions = [];
|
||||
// Used to cache lookahead during parsing, not used during construction
|
||||
this.nextTokenWithinRule = null;
|
||||
return this;
|
||||
toString() {
|
||||
return this.stateNumber;
|
||||
}
|
||||
|
||||
equals(other) {
|
||||
if (other instanceof ATNState) {
|
||||
return this.stateNumber===other.stateNumber;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
isNonGreedyExitState() {
|
||||
return false;
|
||||
}
|
||||
|
||||
addTransition(trans, index) {
|
||||
if(index===undefined) {
|
||||
index = -1;
|
||||
}
|
||||
if (this.transitions.length===0) {
|
||||
this.epsilonOnlyTransitions = trans.isEpsilon;
|
||||
} else if(this.epsilonOnlyTransitions !== trans.isEpsilon) {
|
||||
this.epsilonOnlyTransitions = false;
|
||||
}
|
||||
if (index===-1) {
|
||||
this.transitions.push(trans);
|
||||
} else {
|
||||
this.transitions.splice(index, 1, trans);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// constants for serialization
|
||||
|
@ -113,214 +144,172 @@ ATNState.serializationNames = [
|
|||
|
||||
ATNState.INVALID_STATE_NUMBER = -1;
|
||||
|
||||
ATNState.prototype.toString = function() {
|
||||
return this.stateNumber;
|
||||
};
|
||||
|
||||
ATNState.prototype.equals = function(other) {
|
||||
if (other instanceof ATNState) {
|
||||
return this.stateNumber===other.stateNumber;
|
||||
} else {
|
||||
return false;
|
||||
class BasicState extends ATNState {
|
||||
constructor() {
|
||||
super();
|
||||
this.stateType = ATNState.BASIC;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
ATNState.prototype.isNonGreedyExitState = function() {
|
||||
return false;
|
||||
};
|
||||
|
||||
|
||||
ATNState.prototype.addTransition = function(trans, index) {
|
||||
if(index===undefined) {
|
||||
index = -1;
|
||||
}
|
||||
if (this.transitions.length===0) {
|
||||
this.epsilonOnlyTransitions = trans.isEpsilon;
|
||||
} else if(this.epsilonOnlyTransitions !== trans.isEpsilon) {
|
||||
this.epsilonOnlyTransitions = false;
|
||||
class DecisionState extends ATNState {
|
||||
constructor() {
|
||||
super();
|
||||
this.decision = -1;
|
||||
this.nonGreedy = false;
|
||||
return this;
|
||||
}
|
||||
if (index===-1) {
|
||||
this.transitions.push(trans);
|
||||
} else {
|
||||
this.transitions.splice(index, 1, trans);
|
||||
}
|
||||
|
||||
/**
|
||||
* The start of a regular {@code (...)} block
|
||||
*/
|
||||
class BlockStartState extends DecisionState {
|
||||
constructor() {
|
||||
super();
|
||||
this.endState = null;
|
||||
return this;
|
||||
}
|
||||
};
|
||||
|
||||
function BasicState() {
|
||||
ATNState.call(this);
|
||||
this.stateType = ATNState.BASIC;
|
||||
return this;
|
||||
}
|
||||
|
||||
BasicState.prototype = Object.create(ATNState.prototype);
|
||||
BasicState.prototype.constructor = BasicState;
|
||||
|
||||
|
||||
function DecisionState() {
|
||||
ATNState.call(this);
|
||||
this.decision = -1;
|
||||
this.nonGreedy = false;
|
||||
return this;
|
||||
class BasicBlockStartState extends BlockStartState {
|
||||
constructor() {
|
||||
super();
|
||||
this.stateType = ATNState.BLOCK_START;
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
DecisionState.prototype = Object.create(ATNState.prototype);
|
||||
DecisionState.prototype.constructor = DecisionState;
|
||||
|
||||
|
||||
// The start of a regular {@code (...)} block.
|
||||
function BlockStartState() {
|
||||
DecisionState.call(this);
|
||||
this.endState = null;
|
||||
return this;
|
||||
/**
|
||||
* Terminal node of a simple {@code (a|b|c)} block
|
||||
*/
|
||||
class BlockEndState extends ATNState {
|
||||
constructor() {
|
||||
super();
|
||||
this.stateType = ATNState.BLOCK_END;
|
||||
this.startState = null;
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
BlockStartState.prototype = Object.create(DecisionState.prototype);
|
||||
BlockStartState.prototype.constructor = BlockStartState;
|
||||
|
||||
|
||||
function BasicBlockStartState() {
|
||||
BlockStartState.call(this);
|
||||
this.stateType = ATNState.BLOCK_START;
|
||||
return this;
|
||||
/**
|
||||
* The last node in the ATN for a rule, unless that rule is the start symbol.
|
||||
* In that case, there is one transition to EOF. Later, we might encode
|
||||
* references to all calls to this rule to compute FOLLOW sets for
|
||||
* error handling
|
||||
*/
|
||||
class RuleStopState extends ATNState {
|
||||
constructor() {
|
||||
super();
|
||||
this.stateType = ATNState.RULE_STOP;
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
BasicBlockStartState.prototype = Object.create(BlockStartState.prototype);
|
||||
BasicBlockStartState.prototype.constructor = BasicBlockStartState;
|
||||
|
||||
|
||||
// Terminal node of a simple {@code (a|b|c)} block.
|
||||
function BlockEndState() {
|
||||
ATNState.call(this);
|
||||
this.stateType = ATNState.BLOCK_END;
|
||||
this.startState = null;
|
||||
return this;
|
||||
class RuleStartState extends ATNState {
|
||||
constructor() {
|
||||
super();
|
||||
this.stateType = ATNState.RULE_START;
|
||||
this.stopState = null;
|
||||
this.isPrecedenceRule = false;
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
BlockEndState.prototype = Object.create(ATNState.prototype);
|
||||
BlockEndState.prototype.constructor = BlockEndState;
|
||||
|
||||
|
||||
// The last node in the ATN for a rule, unless that rule is the start symbol.
|
||||
// In that case, there is one transition to EOF. Later, we might encode
|
||||
// references to all calls to this rule to compute FOLLOW sets for
|
||||
// error handling.
|
||||
//
|
||||
function RuleStopState() {
|
||||
ATNState.call(this);
|
||||
this.stateType = ATNState.RULE_STOP;
|
||||
return this;
|
||||
/**
|
||||
* Decision state for {@code A+} and {@code (A|B)+}. It has two transitions:
|
||||
* one to the loop back to start of the block and one to exit.
|
||||
*/
|
||||
class PlusLoopbackState extends DecisionState {
|
||||
constructor() {
|
||||
super();
|
||||
this.stateType = ATNState.PLUS_LOOP_BACK;
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
RuleStopState.prototype = Object.create(ATNState.prototype);
|
||||
RuleStopState.prototype.constructor = RuleStopState;
|
||||
|
||||
function RuleStartState() {
|
||||
ATNState.call(this);
|
||||
this.stateType = ATNState.RULE_START;
|
||||
this.stopState = null;
|
||||
this.isPrecedenceRule = false;
|
||||
return this;
|
||||
/**
|
||||
* Start of {@code (A|B|...)+} loop. Technically a decision state, but
|
||||
* we don't use for code generation; somebody might need it, so I'm defining
|
||||
* it for completeness. In reality, the {@link PlusLoopbackState} node is the
|
||||
* real decision-making note for {@code A+}
|
||||
*/
|
||||
class PlusBlockStartState extends BlockStartState {
|
||||
constructor() {
|
||||
super();
|
||||
this.stateType = ATNState.PLUS_BLOCK_START;
|
||||
this.loopBackState = null;
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
RuleStartState.prototype = Object.create(ATNState.prototype);
|
||||
RuleStartState.prototype.constructor = RuleStartState;
|
||||
|
||||
// Decision state for {@code A+} and {@code (A|B)+}. It has two transitions:
|
||||
// one to the loop back to start of the block and one to exit.
|
||||
//
|
||||
function PlusLoopbackState() {
|
||||
DecisionState.call(this);
|
||||
this.stateType = ATNState.PLUS_LOOP_BACK;
|
||||
return this;
|
||||
/**
|
||||
* The block that begins a closure loop
|
||||
*/
|
||||
class StarBlockStartState extends BlockStartState {
|
||||
constructor() {
|
||||
super();
|
||||
this.stateType = ATNState.STAR_BLOCK_START;
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
PlusLoopbackState.prototype = Object.create(DecisionState.prototype);
|
||||
PlusLoopbackState.prototype.constructor = PlusLoopbackState;
|
||||
|
||||
|
||||
// Start of {@code (A|B|...)+} loop. Technically a decision state, but
|
||||
// we don't use for code generation; somebody might need it, so I'm defining
|
||||
// it for completeness. In reality, the {@link PlusLoopbackState} node is the
|
||||
// real decision-making note for {@code A+}.
|
||||
//
|
||||
function PlusBlockStartState() {
|
||||
BlockStartState.call(this);
|
||||
this.stateType = ATNState.PLUS_BLOCK_START;
|
||||
this.loopBackState = null;
|
||||
return this;
|
||||
class StarLoopbackState extends ATNState {
|
||||
constructor() {
|
||||
super();
|
||||
this.stateType = ATNState.STAR_LOOP_BACK;
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
PlusBlockStartState.prototype = Object.create(BlockStartState.prototype);
|
||||
PlusBlockStartState.prototype.constructor = PlusBlockStartState;
|
||||
|
||||
// The block that begins a closure loop.
|
||||
function StarBlockStartState() {
|
||||
BlockStartState.call(this);
|
||||
this.stateType = ATNState.STAR_BLOCK_START;
|
||||
return this;
|
||||
class StarLoopEntryState extends DecisionState {
|
||||
constructor() {
|
||||
super();
|
||||
this.stateType = ATNState.STAR_LOOP_ENTRY;
|
||||
this.loopBackState = null;
|
||||
// Indicates whether this state can benefit from a precedence DFA during SLL decision making.
|
||||
this.isPrecedenceDecision = null;
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
StarBlockStartState.prototype = Object.create(BlockStartState.prototype);
|
||||
StarBlockStartState.prototype.constructor = StarBlockStartState;
|
||||
|
||||
|
||||
function StarLoopbackState() {
|
||||
ATNState.call(this);
|
||||
this.stateType = ATNState.STAR_LOOP_BACK;
|
||||
return this;
|
||||
/**
|
||||
* Mark the end of a * or + loop
|
||||
*/
|
||||
class LoopEndState extends ATNState {
|
||||
constructor() {
|
||||
super();
|
||||
this.stateType = ATNState.LOOP_END;
|
||||
this.loopBackState = null;
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
StarLoopbackState.prototype = Object.create(ATNState.prototype);
|
||||
StarLoopbackState.prototype.constructor = StarLoopbackState;
|
||||
|
||||
|
||||
function StarLoopEntryState() {
|
||||
DecisionState.call(this);
|
||||
this.stateType = ATNState.STAR_LOOP_ENTRY;
|
||||
this.loopBackState = null;
|
||||
// Indicates whether this state can benefit from a precedence DFA during SLL decision making.
|
||||
this.isPrecedenceDecision = null;
|
||||
return this;
|
||||
/**
|
||||
* The Tokens rule start state linking to each lexer rule start state
|
||||
*/
|
||||
class TokensStartState extends DecisionState {
|
||||
constructor() {
|
||||
super();
|
||||
this.stateType = ATNState.TOKEN_START;
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
StarLoopEntryState.prototype = Object.create(DecisionState.prototype);
|
||||
StarLoopEntryState.prototype.constructor = StarLoopEntryState;
|
||||
|
||||
|
||||
// Mark the end of a * or + loop.
|
||||
function LoopEndState() {
|
||||
ATNState.call(this);
|
||||
this.stateType = ATNState.LOOP_END;
|
||||
this.loopBackState = null;
|
||||
return this;
|
||||
module.exports = {
|
||||
ATNState,
|
||||
BasicState,
|
||||
DecisionState,
|
||||
BlockStartState,
|
||||
BlockEndState,
|
||||
LoopEndState,
|
||||
RuleStartState,
|
||||
RuleStopState,
|
||||
TokensStartState,
|
||||
PlusLoopbackState,
|
||||
StarLoopbackState,
|
||||
StarLoopEntryState,
|
||||
PlusBlockStartState,
|
||||
StarBlockStartState,
|
||||
BasicBlockStartState
|
||||
}
|
||||
|
||||
LoopEndState.prototype = Object.create(ATNState.prototype);
|
||||
LoopEndState.prototype.constructor = LoopEndState;
|
||||
|
||||
|
||||
// The Tokens rule start state linking to each lexer rule start state */
|
||||
function TokensStartState() {
|
||||
DecisionState.call(this);
|
||||
this.stateType = ATNState.TOKEN_START;
|
||||
return this;
|
||||
}
|
||||
|
||||
TokensStartState.prototype = Object.create(DecisionState.prototype);
|
||||
TokensStartState.prototype.constructor = TokensStartState;
|
||||
|
||||
exports.ATNState = ATNState;
|
||||
exports.BasicState = BasicState;
|
||||
exports.DecisionState = DecisionState;
|
||||
exports.BlockStartState = BlockStartState;
|
||||
exports.BlockEndState = BlockEndState;
|
||||
exports.LoopEndState = LoopEndState;
|
||||
exports.RuleStartState = RuleStartState;
|
||||
exports.RuleStopState = RuleStopState;
|
||||
exports.TokensStartState = TokensStartState;
|
||||
exports.PlusLoopbackState = PlusLoopbackState;
|
||||
exports.StarLoopbackState = StarLoopbackState;
|
||||
exports.StarLoopEntryState = StarLoopEntryState;
|
||||
exports.PlusBlockStartState = PlusBlockStartState;
|
||||
exports.StarBlockStartState = StarBlockStartState;
|
||||
exports.BasicBlockStartState = BasicBlockStartState;
|
||||
|
|
|
@ -2,16 +2,12 @@
|
|||
* Use of this file is governed by the BSD 3-clause license that
|
||||
* can be found in the LICENSE.txt file in the project root.
|
||||
*/
|
||||
///
|
||||
|
||||
// Represents the type of recognizer an ATN applies to.
|
||||
|
||||
function ATNType() {
|
||||
|
||||
}
|
||||
|
||||
ATNType.LEXER = 0;
|
||||
ATNType.PARSER = 1;
|
||||
|
||||
exports.ATNType = ATNType;
|
||||
/**
|
||||
* Represents the type of recognizer an ATN applies to
|
||||
*/
|
||||
module.exports = {
|
||||
LEXER: 0,
|
||||
PARSER: 1
|
||||
};
|
||||
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -1,366 +1,384 @@
|
|||
//
|
||||
/* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.
|
||||
* Use of this file is governed by the BSD 3-clause license that
|
||||
* can be found in the LICENSE.txt file in the project root.
|
||||
*/
|
||||
//
|
||||
|
||||
function LexerActionType() {
|
||||
const LexerActionType = {
|
||||
// The type of a {@link LexerChannelAction} action.
|
||||
CHANNEL: 0,
|
||||
// The type of a {@link LexerCustomAction} action
|
||||
CUSTOM: 1,
|
||||
// The type of a {@link LexerModeAction} action.
|
||||
MODE: 2,
|
||||
//The type of a {@link LexerMoreAction} action.
|
||||
MORE: 3,
|
||||
//The type of a {@link LexerPopModeAction} action.
|
||||
POP_MODE: 4,
|
||||
//The type of a {@link LexerPushModeAction} action.
|
||||
PUSH_MODE: 5,
|
||||
//The type of a {@link LexerSkipAction} action.
|
||||
SKIP: 6,
|
||||
//The type of a {@link LexerTypeAction} action.
|
||||
TYPE: 7
|
||||
}
|
||||
|
||||
LexerActionType.CHANNEL = 0; //The type of a {@link LexerChannelAction} action.
|
||||
LexerActionType.CUSTOM = 1; //The type of a {@link LexerCustomAction} action.
|
||||
LexerActionType.MODE = 2; //The type of a {@link LexerModeAction} action.
|
||||
LexerActionType.MORE = 3; //The type of a {@link LexerMoreAction} action.
|
||||
LexerActionType.POP_MODE = 4; //The type of a {@link LexerPopModeAction} action.
|
||||
LexerActionType.PUSH_MODE = 5; //The type of a {@link LexerPushModeAction} action.
|
||||
LexerActionType.SKIP = 6; //The type of a {@link LexerSkipAction} action.
|
||||
LexerActionType.TYPE = 7; //The type of a {@link LexerTypeAction} action.
|
||||
class LexerAction {
|
||||
constructor(action) {
|
||||
this.actionType = action;
|
||||
this.isPositionDependent = false;
|
||||
}
|
||||
|
||||
function LexerAction(action) {
|
||||
this.actionType = action;
|
||||
this.isPositionDependent = false;
|
||||
return this;
|
||||
hashCode() {
|
||||
const hash = new Hash();
|
||||
this.updateHashCode(hash);
|
||||
return hash.finish()
|
||||
}
|
||||
|
||||
updateHashCode(hash) {
|
||||
hash.update(this.actionType);
|
||||
}
|
||||
|
||||
equals(other) {
|
||||
return this === other;
|
||||
}
|
||||
}
|
||||
|
||||
LexerAction.prototype.hashCode = function() {
|
||||
var hash = new Hash();
|
||||
this.updateHashCode(hash);
|
||||
return hash.finish()
|
||||
};
|
||||
|
||||
LexerAction.prototype.updateHashCode = function(hash) {
|
||||
hash.update(this.actionType);
|
||||
};
|
||||
/**
|
||||
* Implements the {@code skip} lexer action by calling {@link Lexer//skip}.
|
||||
*
|
||||
* <p>The {@code skip} command does not have any parameters, so this action is
|
||||
* implemented as a singleton instance exposed by {@link //INSTANCE}.</p>
|
||||
*/
|
||||
class LexerSkipAction extends LexerAction {
|
||||
constructor() {
|
||||
super(LexerActionType.SKIP);
|
||||
}
|
||||
|
||||
LexerAction.prototype.equals = function(other) {
|
||||
return this === other;
|
||||
};
|
||||
execute(lexer) {
|
||||
lexer.skip();
|
||||
}
|
||||
|
||||
|
||||
|
||||
//
|
||||
// Implements the {@code skip} lexer action by calling {@link Lexer//skip}.
|
||||
//
|
||||
// <p>The {@code skip} command does not have any parameters, so this action is
|
||||
// implemented as a singleton instance exposed by {@link //INSTANCE}.</p>
|
||||
function LexerSkipAction() {
|
||||
LexerAction.call(this, LexerActionType.SKIP);
|
||||
return this;
|
||||
toString() {
|
||||
return "skip";
|
||||
}
|
||||
}
|
||||
|
||||
LexerSkipAction.prototype = Object.create(LexerAction.prototype);
|
||||
LexerSkipAction.prototype.constructor = LexerSkipAction;
|
||||
|
||||
// Provides a singleton instance of this parameterless lexer action.
|
||||
LexerSkipAction.INSTANCE = new LexerSkipAction();
|
||||
|
||||
LexerSkipAction.prototype.execute = function(lexer) {
|
||||
lexer.skip();
|
||||
};
|
||||
|
||||
LexerSkipAction.prototype.toString = function() {
|
||||
return "skip";
|
||||
};
|
||||
|
||||
// Implements the {@code type} lexer action by calling {@link Lexer//setType}
|
||||
// with the assigned type.
|
||||
function LexerTypeAction(type) {
|
||||
LexerAction.call(this, LexerActionType.TYPE);
|
||||
this.type = type;
|
||||
return this;
|
||||
}
|
||||
|
||||
LexerTypeAction.prototype = Object.create(LexerAction.prototype);
|
||||
LexerTypeAction.prototype.constructor = LexerTypeAction;
|
||||
|
||||
LexerTypeAction.prototype.execute = function(lexer) {
|
||||
lexer.type = this.type;
|
||||
};
|
||||
|
||||
LexerTypeAction.prototype.updateHashCode = function(hash) {
|
||||
hash.update(this.actionType, this.type);
|
||||
};
|
||||
|
||||
|
||||
LexerTypeAction.prototype.equals = function(other) {
|
||||
if(this === other) {
|
||||
return true;
|
||||
} else if (! (other instanceof LexerTypeAction)) {
|
||||
return false;
|
||||
} else {
|
||||
return this.type === other.type;
|
||||
/**
|
||||
* Implements the {@code type} lexer action by calling {@link Lexer//setType}
|
||||
* with the assigned type
|
||||
*/
|
||||
class LexerTypeAction extends LexerAction {
|
||||
constructor(type) {
|
||||
super(LexerActionType.TYPE);
|
||||
this.type = type;
|
||||
}
|
||||
};
|
||||
|
||||
LexerTypeAction.prototype.toString = function() {
|
||||
return "type(" + this.type + ")";
|
||||
};
|
||||
|
||||
// Implements the {@code pushMode} lexer action by calling
|
||||
// {@link Lexer//pushMode} with the assigned mode.
|
||||
function LexerPushModeAction(mode) {
|
||||
LexerAction.call(this, LexerActionType.PUSH_MODE);
|
||||
this.mode = mode;
|
||||
return this;
|
||||
}
|
||||
|
||||
LexerPushModeAction.prototype = Object.create(LexerAction.prototype);
|
||||
LexerPushModeAction.prototype.constructor = LexerPushModeAction;
|
||||
|
||||
// <p>This action is implemented by calling {@link Lexer//pushMode} with the
|
||||
// value provided by {@link //getMode}.</p>
|
||||
LexerPushModeAction.prototype.execute = function(lexer) {
|
||||
lexer.pushMode(this.mode);
|
||||
};
|
||||
|
||||
LexerPushModeAction.prototype.updateHashCode = function(hash) {
|
||||
hash.update(this.actionType, this.mode);
|
||||
};
|
||||
|
||||
LexerPushModeAction.prototype.equals = function(other) {
|
||||
if (this === other) {
|
||||
return true;
|
||||
} else if (! (other instanceof LexerPushModeAction)) {
|
||||
return false;
|
||||
} else {
|
||||
return this.mode === other.mode;
|
||||
execute(lexer) {
|
||||
lexer.type = this.type;
|
||||
}
|
||||
};
|
||||
|
||||
LexerPushModeAction.prototype.toString = function() {
|
||||
return "pushMode(" + this.mode + ")";
|
||||
};
|
||||
updateHashCode(hash) {
|
||||
hash.update(this.actionType, this.type);
|
||||
}
|
||||
|
||||
equals(other) {
|
||||
if(this === other) {
|
||||
return true;
|
||||
} else if (! (other instanceof LexerTypeAction)) {
|
||||
return false;
|
||||
} else {
|
||||
return this.type === other.type;
|
||||
}
|
||||
}
|
||||
|
||||
// Implements the {@code popMode} lexer action by calling {@link Lexer//popMode}.
|
||||
//
|
||||
// <p>The {@code popMode} command does not have any parameters, so this action is
|
||||
// implemented as a singleton instance exposed by {@link //INSTANCE}.</p>
|
||||
function LexerPopModeAction() {
|
||||
LexerAction.call(this,LexerActionType.POP_MODE);
|
||||
return this;
|
||||
toString() {
|
||||
return "type(" + this.type + ")";
|
||||
}
|
||||
}
|
||||
|
||||
LexerPopModeAction.prototype = Object.create(LexerAction.prototype);
|
||||
LexerPopModeAction.prototype.constructor = LexerPopModeAction;
|
||||
|
||||
/**
|
||||
* Implements the {@code pushMode} lexer action by calling
|
||||
* {@link Lexer//pushMode} with the assigned mode
|
||||
*/
|
||||
class LexerPushModeAction extends LexerAction {
|
||||
constructor(mode) {
|
||||
super(LexerActionType.PUSH_MODE);
|
||||
this.mode = mode;
|
||||
}
|
||||
|
||||
/**
|
||||
* <p>This action is implemented by calling {@link Lexer//pushMode} with the
|
||||
* value provided by {@link //getMode}.</p>
|
||||
*/
|
||||
execute(lexer) {
|
||||
lexer.pushMode(this.mode);
|
||||
}
|
||||
|
||||
updateHashCode(hash) {
|
||||
hash.update(this.actionType, this.mode);
|
||||
}
|
||||
|
||||
equals(other) {
|
||||
if (this === other) {
|
||||
return true;
|
||||
} else if (! (other instanceof LexerPushModeAction)) {
|
||||
return false;
|
||||
} else {
|
||||
return this.mode === other.mode;
|
||||
}
|
||||
}
|
||||
|
||||
toString() {
|
||||
return "pushMode(" + this.mode + ")";
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Implements the {@code popMode} lexer action by calling {@link Lexer//popMode}.
|
||||
*
|
||||
* <p>The {@code popMode} command does not have any parameters, so this action is
|
||||
* implemented as a singleton instance exposed by {@link //INSTANCE}.</p>
|
||||
*/
|
||||
class LexerPopModeAction extends LexerAction {
|
||||
constructor() {
|
||||
super(LexerActionType.POP_MODE);
|
||||
}
|
||||
|
||||
/**
|
||||
* <p>This action is implemented by calling {@link Lexer//popMode}.</p>
|
||||
*/
|
||||
execute(lexer) {
|
||||
lexer.popMode();
|
||||
}
|
||||
|
||||
toString() {
|
||||
return "popMode";
|
||||
}
|
||||
}
|
||||
|
||||
LexerPopModeAction.INSTANCE = new LexerPopModeAction();
|
||||
|
||||
// <p>This action is implemented by calling {@link Lexer//popMode}.</p>
|
||||
LexerPopModeAction.prototype.execute = function(lexer) {
|
||||
lexer.popMode();
|
||||
};
|
||||
/**
|
||||
* Implements the {@code more} lexer action by calling {@link Lexer//more}.
|
||||
*
|
||||
* <p>The {@code more} command does not have any parameters, so this action is
|
||||
* implemented as a singleton instance exposed by {@link //INSTANCE}.</p>
|
||||
*/
|
||||
class LexerMoreAction extends LexerAction {
|
||||
constructor() {
|
||||
super(LexerActionType.MORE);
|
||||
}
|
||||
|
||||
LexerPopModeAction.prototype.toString = function() {
|
||||
return "popMode";
|
||||
};
|
||||
/**
|
||||
* <p>This action is implemented by calling {@link Lexer//popMode}.</p>
|
||||
*/
|
||||
execute(lexer) {
|
||||
lexer.more();
|
||||
}
|
||||
|
||||
// Implements the {@code more} lexer action by calling {@link Lexer//more}.
|
||||
//
|
||||
// <p>The {@code more} command does not have any parameters, so this action is
|
||||
// implemented as a singleton instance exposed by {@link //INSTANCE}.</p>
|
||||
function LexerMoreAction() {
|
||||
LexerAction.call(this, LexerActionType.MORE);
|
||||
return this;
|
||||
toString() {
|
||||
return "more";
|
||||
}
|
||||
}
|
||||
|
||||
LexerMoreAction.prototype = Object.create(LexerAction.prototype);
|
||||
LexerMoreAction.prototype.constructor = LexerMoreAction;
|
||||
|
||||
LexerMoreAction.INSTANCE = new LexerMoreAction();
|
||||
|
||||
// <p>This action is implemented by calling {@link Lexer//popMode}.</p>
|
||||
LexerMoreAction.prototype.execute = function(lexer) {
|
||||
lexer.more();
|
||||
};
|
||||
|
||||
LexerMoreAction.prototype.toString = function() {
|
||||
return "more";
|
||||
};
|
||||
/**
|
||||
* Implements the {@code mode} lexer action by calling {@link Lexer//mode} with
|
||||
* the assigned mode
|
||||
*/
|
||||
class LexerModeAction extends LexerAction {
|
||||
constructor(mode) {
|
||||
super(LexerActionType.MODE);
|
||||
this.mode = mode;
|
||||
}
|
||||
|
||||
/**
|
||||
* <p>This action is implemented by calling {@link Lexer//mode} with the
|
||||
* value provided by {@link //getMode}.</p>
|
||||
*/
|
||||
execute(lexer) {
|
||||
lexer.mode(this.mode);
|
||||
}
|
||||
|
||||
// Implements the {@code mode} lexer action by calling {@link Lexer//mode} with
|
||||
// the assigned mode.
|
||||
function LexerModeAction(mode) {
|
||||
LexerAction.call(this, LexerActionType.MODE);
|
||||
this.mode = mode;
|
||||
return this;
|
||||
updateHashCode(hash) {
|
||||
hash.update(this.actionType, this.mode);
|
||||
}
|
||||
|
||||
equals(other) {
|
||||
if (this === other) {
|
||||
return true;
|
||||
} else if (! (other instanceof LexerModeAction)) {
|
||||
return false;
|
||||
} else {
|
||||
return this.mode === other.mode;
|
||||
}
|
||||
}
|
||||
|
||||
toString() {
|
||||
return "mode(" + this.mode + ")";
|
||||
}
|
||||
}
|
||||
|
||||
LexerModeAction.prototype = Object.create(LexerAction.prototype);
|
||||
LexerModeAction.prototype.constructor = LexerModeAction;
|
||||
|
||||
// <p>This action is implemented by calling {@link Lexer//mode} with the
|
||||
// value provided by {@link //getMode}.</p>
|
||||
LexerModeAction.prototype.execute = function(lexer) {
|
||||
lexer.mode(this.mode);
|
||||
};
|
||||
|
||||
LexerModeAction.prototype.updateHashCode = function(hash) {
|
||||
hash.update(this.actionType, this.mode);
|
||||
};
|
||||
|
||||
LexerModeAction.prototype.equals = function(other) {
|
||||
if (this === other) {
|
||||
return true;
|
||||
} else if (! (other instanceof LexerModeAction)) {
|
||||
return false;
|
||||
} else {
|
||||
return this.mode === other.mode;
|
||||
/**
|
||||
* Executes a custom lexer action by calling {@link Recognizer//action} with the
|
||||
* rule and action indexes assigned to the custom action. The implementation of
|
||||
* a custom action is added to the generated code for the lexer in an override
|
||||
* of {@link Recognizer//action} when the grammar is compiled.
|
||||
*
|
||||
* <p>This class may represent embedded actions created with the <code>{...}</code>
|
||||
* syntax in ANTLR 4, as well as actions created for lexer commands where the
|
||||
* command argument could not be evaluated when the grammar was compiled.</p>
|
||||
*/
|
||||
class LexerCustomAction extends LexerAction {
|
||||
/**
|
||||
* Constructs a custom lexer action with the specified rule and action
|
||||
* indexes.
|
||||
*
|
||||
* @param ruleIndex The rule index to use for calls to
|
||||
* {@link Recognizer//action}.
|
||||
* @param actionIndex The action index to use for calls to
|
||||
* {@link Recognizer//action}.
|
||||
*/
|
||||
constructor(ruleIndex, actionIndex) {
|
||||
super(LexerActionType.CUSTOM);
|
||||
this.ruleIndex = ruleIndex;
|
||||
this.actionIndex = actionIndex;
|
||||
this.isPositionDependent = true;
|
||||
}
|
||||
};
|
||||
|
||||
LexerModeAction.prototype.toString = function() {
|
||||
return "mode(" + this.mode + ")";
|
||||
};
|
||||
/**
|
||||
* <p>Custom actions are implemented by calling {@link Lexer//action} with the
|
||||
* appropriate rule and action indexes.</p>
|
||||
*/
|
||||
execute(lexer) {
|
||||
lexer.action(null, this.ruleIndex, this.actionIndex);
|
||||
}
|
||||
|
||||
// Executes a custom lexer action by calling {@link Recognizer//action} with the
|
||||
// rule and action indexes assigned to the custom action. The implementation of
|
||||
// a custom action is added to the generated code for the lexer in an override
|
||||
// of {@link Recognizer//action} when the grammar is compiled.
|
||||
//
|
||||
// <p>This class may represent embedded actions created with the <code>{...}</code>
|
||||
// syntax in ANTLR 4, as well as actions created for lexer commands where the
|
||||
// command argument could not be evaluated when the grammar was compiled.</p>
|
||||
updateHashCode(hash) {
|
||||
hash.update(this.actionType, this.ruleIndex, this.actionIndex);
|
||||
}
|
||||
|
||||
|
||||
// Constructs a custom lexer action with the specified rule and action
|
||||
// indexes.
|
||||
//
|
||||
// @param ruleIndex The rule index to use for calls to
|
||||
// {@link Recognizer//action}.
|
||||
// @param actionIndex The action index to use for calls to
|
||||
// {@link Recognizer//action}.
|
||||
|
||||
function LexerCustomAction(ruleIndex, actionIndex) {
|
||||
LexerAction.call(this, LexerActionType.CUSTOM);
|
||||
this.ruleIndex = ruleIndex;
|
||||
this.actionIndex = actionIndex;
|
||||
this.isPositionDependent = true;
|
||||
return this;
|
||||
equals(other) {
|
||||
if (this === other) {
|
||||
return true;
|
||||
} else if (! (other instanceof LexerCustomAction)) {
|
||||
return false;
|
||||
} else {
|
||||
return this.ruleIndex === other.ruleIndex && this.actionIndex === other.actionIndex;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
LexerCustomAction.prototype = Object.create(LexerAction.prototype);
|
||||
LexerCustomAction.prototype.constructor = LexerCustomAction;
|
||||
|
||||
// <p>Custom actions are implemented by calling {@link Lexer//action} with the
|
||||
// appropriate rule and action indexes.</p>
|
||||
LexerCustomAction.prototype.execute = function(lexer) {
|
||||
lexer.action(null, this.ruleIndex, this.actionIndex);
|
||||
};
|
||||
|
||||
LexerCustomAction.prototype.updateHashCode = function(hash) {
|
||||
hash.update(this.actionType, this.ruleIndex, this.actionIndex);
|
||||
};
|
||||
|
||||
LexerCustomAction.prototype.equals = function(other) {
|
||||
if (this === other) {
|
||||
return true;
|
||||
} else if (! (other instanceof LexerCustomAction)) {
|
||||
return false;
|
||||
} else {
|
||||
return this.ruleIndex === other.ruleIndex && this.actionIndex === other.actionIndex;
|
||||
/**
|
||||
* Implements the {@code channel} lexer action by calling
|
||||
* {@link Lexer//setChannel} with the assigned channel.
|
||||
* Constructs a new {@code channel} action with the specified channel value.
|
||||
* @param channel The channel value to pass to {@link Lexer//setChannel}
|
||||
*/
|
||||
class LexerChannelAction extends LexerAction {
|
||||
constructor(channel) {
|
||||
super(LexerActionType.CHANNEL);
|
||||
this.channel = channel;
|
||||
}
|
||||
};
|
||||
|
||||
// Implements the {@code channel} lexer action by calling
|
||||
// {@link Lexer//setChannel} with the assigned channel.
|
||||
// Constructs a new {@code channel} action with the specified channel value.
|
||||
// @param channel The channel value to pass to {@link Lexer//setChannel}.
|
||||
function LexerChannelAction(channel) {
|
||||
LexerAction.call(this, LexerActionType.CHANNEL);
|
||||
this.channel = channel;
|
||||
return this;
|
||||
/**
|
||||
* <p>This action is implemented by calling {@link Lexer//setChannel} with the
|
||||
* value provided by {@link //getChannel}.</p>
|
||||
*/
|
||||
execute(lexer) {
|
||||
lexer._channel = this.channel;
|
||||
}
|
||||
|
||||
updateHashCode(hash) {
|
||||
hash.update(this.actionType, this.channel);
|
||||
}
|
||||
|
||||
equals(other) {
|
||||
if (this === other) {
|
||||
return true;
|
||||
} else if (! (other instanceof LexerChannelAction)) {
|
||||
return false;
|
||||
} else {
|
||||
return this.channel === other.channel;
|
||||
}
|
||||
}
|
||||
|
||||
toString() {
|
||||
return "channel(" + this.channel + ")";
|
||||
}
|
||||
}
|
||||
|
||||
LexerChannelAction.prototype = Object.create(LexerAction.prototype);
|
||||
LexerChannelAction.prototype.constructor = LexerChannelAction;
|
||||
|
||||
// <p>This action is implemented by calling {@link Lexer//setChannel} with the
|
||||
// value provided by {@link //getChannel}.</p>
|
||||
LexerChannelAction.prototype.execute = function(lexer) {
|
||||
lexer._channel = this.channel;
|
||||
};
|
||||
|
||||
LexerChannelAction.prototype.updateHashCode = function(hash) {
|
||||
hash.update(this.actionType, this.channel);
|
||||
};
|
||||
|
||||
LexerChannelAction.prototype.equals = function(other) {
|
||||
if (this === other) {
|
||||
return true;
|
||||
} else if (! (other instanceof LexerChannelAction)) {
|
||||
return false;
|
||||
} else {
|
||||
return this.channel === other.channel;
|
||||
/**
|
||||
* This implementation of {@link LexerAction} is used for tracking input offsets
|
||||
* for position-dependent actions within a {@link LexerActionExecutor}.
|
||||
*
|
||||
* <p>This action is not serialized as part of the ATN, and is only required for
|
||||
* position-dependent lexer actions which appear at a location other than the
|
||||
* end of a rule. For more information about DFA optimizations employed for
|
||||
* lexer actions, see {@link LexerActionExecutor//append} and
|
||||
* {@link LexerActionExecutor//fixOffsetBeforeMatch}.</p>
|
||||
*
|
||||
* Constructs a new indexed custom action by associating a character offset
|
||||
* with a {@link LexerAction}.
|
||||
*
|
||||
* <p>Note: This class is only required for lexer actions for which
|
||||
* {@link LexerAction//isPositionDependent} returns {@code true}.</p>
|
||||
*
|
||||
* @param offset The offset into the input {@link CharStream}, relative to
|
||||
* the token start index, at which the specified lexer action should be
|
||||
* executed.
|
||||
* @param action The lexer action to execute at a particular offset in the
|
||||
* input {@link CharStream}.
|
||||
*/
|
||||
class LexerIndexedCustomAction extends LexerAction {
|
||||
constructor(offset, action) {
|
||||
super(action.actionType);
|
||||
this.offset = offset;
|
||||
this.action = action;
|
||||
this.isPositionDependent = true;
|
||||
}
|
||||
};
|
||||
|
||||
LexerChannelAction.prototype.toString = function() {
|
||||
return "channel(" + this.channel + ")";
|
||||
};
|
||||
/**
|
||||
* <p>This method calls {@link //execute} on the result of {@link //getAction}
|
||||
* using the provided {@code lexer}.</p>
|
||||
*/
|
||||
execute(lexer) {
|
||||
// assume the input stream position was properly set by the calling code
|
||||
this.action.execute(lexer);
|
||||
}
|
||||
|
||||
// This implementation of {@link LexerAction} is used for tracking input offsets
|
||||
// for position-dependent actions within a {@link LexerActionExecutor}.
|
||||
//
|
||||
// <p>This action is not serialized as part of the ATN, and is only required for
|
||||
// position-dependent lexer actions which appear at a location other than the
|
||||
// end of a rule. For more information about DFA optimizations employed for
|
||||
// lexer actions, see {@link LexerActionExecutor//append} and
|
||||
// {@link LexerActionExecutor//fixOffsetBeforeMatch}.</p>
|
||||
updateHashCode(hash) {
|
||||
hash.update(this.actionType, this.offset, this.action);
|
||||
}
|
||||
|
||||
// Constructs a new indexed custom action by associating a character offset
|
||||
// with a {@link LexerAction}.
|
||||
//
|
||||
// <p>Note: This class is only required for lexer actions for which
|
||||
// {@link LexerAction//isPositionDependent} returns {@code true}.</p>
|
||||
//
|
||||
// @param offset The offset into the input {@link CharStream}, relative to
|
||||
// the token start index, at which the specified lexer action should be
|
||||
// executed.
|
||||
// @param action The lexer action to execute at a particular offset in the
|
||||
// input {@link CharStream}.
|
||||
function LexerIndexedCustomAction(offset, action) {
|
||||
LexerAction.call(this, action.actionType);
|
||||
this.offset = offset;
|
||||
this.action = action;
|
||||
this.isPositionDependent = true;
|
||||
return this;
|
||||
equals(other) {
|
||||
if (this === other) {
|
||||
return true;
|
||||
} else if (! (other instanceof LexerIndexedCustomAction)) {
|
||||
return false;
|
||||
} else {
|
||||
return this.offset === other.offset && this.action === other.action;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
LexerIndexedCustomAction.prototype = Object.create(LexerAction.prototype);
|
||||
LexerIndexedCustomAction.prototype.constructor = LexerIndexedCustomAction;
|
||||
|
||||
// <p>This method calls {@link //execute} on the result of {@link //getAction}
|
||||
// using the provided {@code lexer}.</p>
|
||||
LexerIndexedCustomAction.prototype.execute = function(lexer) {
|
||||
// assume the input stream position was properly set by the calling code
|
||||
this.action.execute(lexer);
|
||||
};
|
||||
|
||||
LexerIndexedCustomAction.prototype.updateHashCode = function(hash) {
|
||||
hash.update(this.actionType, this.offset, this.action);
|
||||
};
|
||||
|
||||
LexerIndexedCustomAction.prototype.equals = function(other) {
|
||||
if (this === other) {
|
||||
return true;
|
||||
} else if (! (other instanceof LexerIndexedCustomAction)) {
|
||||
return false;
|
||||
} else {
|
||||
return this.offset === other.offset && this.action === other.action;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
exports.LexerActionType = LexerActionType;
|
||||
exports.LexerSkipAction = LexerSkipAction;
|
||||
exports.LexerChannelAction = LexerChannelAction;
|
||||
exports.LexerCustomAction = LexerCustomAction;
|
||||
exports.LexerIndexedCustomAction = LexerIndexedCustomAction;
|
||||
exports.LexerMoreAction = LexerMoreAction;
|
||||
exports.LexerTypeAction = LexerTypeAction;
|
||||
exports.LexerPushModeAction = LexerPushModeAction;
|
||||
exports.LexerPopModeAction = LexerPopModeAction;
|
||||
exports.LexerModeAction = LexerModeAction;
|
||||
module.exports = {
|
||||
LexerActionType,
|
||||
LexerSkipAction,
|
||||
LexerChannelAction,
|
||||
LexerCustomAction,
|
||||
LexerIndexedCustomAction,
|
||||
LexerMoreAction,
|
||||
LexerTypeAction,
|
||||
LexerPushModeAction,
|
||||
LexerPopModeAction,
|
||||
LexerModeAction
|
||||
}
|
||||
|
|
|
@ -1,166 +1,173 @@
|
|||
//
|
||||
/* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.
|
||||
* Use of this file is governed by the BSD 3-clause license that
|
||||
* can be found in the LICENSE.txt file in the project root.
|
||||
*/
|
||||
///
|
||||
|
||||
// Represents an executor for a sequence of lexer actions which traversed during
|
||||
// the matching operation of a lexer rule (token).
|
||||
//
|
||||
// <p>The executor tracks position information for position-dependent lexer actions
|
||||
// efficiently, ensuring that actions appearing only at the end of the rule do
|
||||
// not cause bloating of the {@link DFA} created for the lexer.</p>
|
||||
const {hashStuff} = require("../Utils");
|
||||
const {LexerIndexedCustomAction} = require('./LexerAction');
|
||||
|
||||
var hashStuff = require("../Utils").hashStuff;
|
||||
var LexerIndexedCustomAction = require('./LexerAction').LexerIndexedCustomAction;
|
||||
class LexerActionExecutor {
|
||||
/**
|
||||
* Represents an executor for a sequence of lexer actions which traversed during
|
||||
* the matching operation of a lexer rule (token).
|
||||
*
|
||||
* <p>The executor tracks position information for position-dependent lexer actions
|
||||
* efficiently, ensuring that actions appearing only at the end of the rule do
|
||||
* not cause bloating of the {@link DFA} created for the lexer.</p>
|
||||
*/
|
||||
constructor(lexerActions) {
|
||||
this.lexerActions = lexerActions === null ? [] : lexerActions;
|
||||
/**
|
||||
* Caches the result of {@link //hashCode} since the hash code is an element
|
||||
* of the performance-critical {@link LexerATNConfig//hashCode} operation
|
||||
*/
|
||||
this.cachedHashCode = hashStuff(lexerActions); // "".join([str(la) for la in
|
||||
// lexerActions]))
|
||||
return this;
|
||||
}
|
||||
|
||||
function LexerActionExecutor(lexerActions) {
|
||||
this.lexerActions = lexerActions === null ? [] : lexerActions;
|
||||
// Caches the result of {@link //hashCode} since the hash code is an element
|
||||
// of the performance-critical {@link LexerATNConfig//hashCode} operation.
|
||||
this.cachedHashCode = hashStuff(lexerActions); // "".join([str(la) for la in
|
||||
// lexerActions]))
|
||||
return this;
|
||||
/**
|
||||
* Creates a {@link LexerActionExecutor} which encodes the current offset
|
||||
* for position-dependent lexer actions.
|
||||
*
|
||||
* <p>Normally, when the executor encounters lexer actions where
|
||||
* {@link LexerAction//isPositionDependent} returns {@code true}, it calls
|
||||
* {@link IntStream//seek} on the input {@link CharStream} to set the input
|
||||
* position to the <em>end</em> of the current token. This behavior provides
|
||||
* for efficient DFA representation of lexer actions which appear at the end
|
||||
* of a lexer rule, even when the lexer rule matches a variable number of
|
||||
* characters.</p>
|
||||
*
|
||||
* <p>Prior to traversing a match transition in the ATN, the current offset
|
||||
* from the token start index is assigned to all position-dependent lexer
|
||||
* actions which have not already been assigned a fixed offset. By storing
|
||||
* the offsets relative to the token start index, the DFA representation of
|
||||
* lexer actions which appear in the middle of tokens remains efficient due
|
||||
* to sharing among tokens of the same length, regardless of their absolute
|
||||
* position in the input stream.</p>
|
||||
*
|
||||
* <p>If the current executor already has offsets assigned to all
|
||||
* position-dependent lexer actions, the method returns {@code this}.</p>
|
||||
*
|
||||
* @param offset The current offset to assign to all position-dependent
|
||||
* lexer actions which do not already have offsets assigned.
|
||||
*
|
||||
* @return {LexerActionExecutor} A {@link LexerActionExecutor} which stores input stream offsets
|
||||
* for all position-dependent lexer actions.
|
||||
*/
|
||||
fixOffsetBeforeMatch(offset) {
|
||||
let updatedLexerActions = null;
|
||||
for (let i = 0; i < this.lexerActions.length; i++) {
|
||||
if (this.lexerActions[i].isPositionDependent &&
|
||||
!(this.lexerActions[i] instanceof LexerIndexedCustomAction)) {
|
||||
if (updatedLexerActions === null) {
|
||||
updatedLexerActions = this.lexerActions.concat([]);
|
||||
}
|
||||
updatedLexerActions[i] = new LexerIndexedCustomAction(offset,
|
||||
this.lexerActions[i]);
|
||||
}
|
||||
}
|
||||
if (updatedLexerActions === null) {
|
||||
return this;
|
||||
} else {
|
||||
return new LexerActionExecutor(updatedLexerActions);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute the actions encapsulated by this executor within the context of a
|
||||
* particular {@link Lexer}.
|
||||
*
|
||||
* <p>This method calls {@link IntStream//seek} to set the position of the
|
||||
* {@code input} {@link CharStream} prior to calling
|
||||
* {@link LexerAction//execute} on a position-dependent action. Before the
|
||||
* method returns, the input position will be restored to the same position
|
||||
* it was in when the method was invoked.</p>
|
||||
*
|
||||
* @param lexer The lexer instance.
|
||||
* @param input The input stream which is the source for the current token.
|
||||
* When this method is called, the current {@link IntStream//index} for
|
||||
* {@code input} should be the start of the following token, i.e. 1
|
||||
* character past the end of the current token.
|
||||
* @param startIndex The token start index. This value may be passed to
|
||||
* {@link IntStream//seek} to set the {@code input} position to the beginning
|
||||
* of the token.
|
||||
*/
|
||||
execute(lexer, input, startIndex) {
|
||||
let requiresSeek = false;
|
||||
const stopIndex = input.index;
|
||||
try {
|
||||
for (let i = 0; i < this.lexerActions.length; i++) {
|
||||
let lexerAction = this.lexerActions[i];
|
||||
if (lexerAction instanceof LexerIndexedCustomAction) {
|
||||
const offset = lexerAction.offset;
|
||||
input.seek(startIndex + offset);
|
||||
lexerAction = lexerAction.action;
|
||||
requiresSeek = (startIndex + offset) !== stopIndex;
|
||||
} else if (lexerAction.isPositionDependent) {
|
||||
input.seek(stopIndex);
|
||||
requiresSeek = false;
|
||||
}
|
||||
lexerAction.execute(lexer);
|
||||
}
|
||||
} finally {
|
||||
if (requiresSeek) {
|
||||
input.seek(stopIndex);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
hashCode() {
|
||||
return this.cachedHashCode;
|
||||
}
|
||||
|
||||
updateHashCode(hash) {
|
||||
hash.update(this.cachedHashCode);
|
||||
}
|
||||
|
||||
equals(other) {
|
||||
if (this === other) {
|
||||
return true;
|
||||
} else if (!(other instanceof LexerActionExecutor)) {
|
||||
return false;
|
||||
} else if (this.cachedHashCode != other.cachedHashCode) {
|
||||
return false;
|
||||
} else if (this.lexerActions.length != other.lexerActions.length) {
|
||||
return false;
|
||||
} else {
|
||||
const numActions = this.lexerActions.length
|
||||
for (let idx = 0; idx < numActions; ++idx) {
|
||||
if (!this.lexerActions[idx].equals(other.lexerActions[idx])) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a {@link LexerActionExecutor} which executes the actions for
|
||||
* the input {@code lexerActionExecutor} followed by a specified
|
||||
* {@code lexerAction}.
|
||||
*
|
||||
* @param lexerActionExecutor The executor for actions already traversed by
|
||||
* the lexer while matching a token within a particular
|
||||
* {@link LexerATNConfig}. If this is {@code null}, the method behaves as
|
||||
* though it were an empty executor.
|
||||
* @param lexerAction The lexer action to execute after the actions
|
||||
* specified in {@code lexerActionExecutor}.
|
||||
*
|
||||
* @return {LexerActionExecutor} A {@link LexerActionExecutor} for executing the combine actions
|
||||
* of {@code lexerActionExecutor} and {@code lexerAction}.
|
||||
*/
|
||||
static append(lexerActionExecutor, lexerAction) {
|
||||
if (lexerActionExecutor === null) {
|
||||
return new LexerActionExecutor([ lexerAction ]);
|
||||
}
|
||||
const lexerActions = lexerActionExecutor.lexerActions.concat([ lexerAction ]);
|
||||
return new LexerActionExecutor(lexerActions);
|
||||
}
|
||||
}
|
||||
|
||||
// Creates a {@link LexerActionExecutor} which executes the actions for
|
||||
// the input {@code lexerActionExecutor} followed by a specified
|
||||
// {@code lexerAction}.
|
||||
//
|
||||
// @param lexerActionExecutor The executor for actions already traversed by
|
||||
// the lexer while matching a token within a particular
|
||||
// {@link LexerATNConfig}. If this is {@code null}, the method behaves as
|
||||
// though it were an empty executor.
|
||||
// @param lexerAction The lexer action to execute after the actions
|
||||
// specified in {@code lexerActionExecutor}.
|
||||
//
|
||||
// @return A {@link LexerActionExecutor} for executing the combine actions
|
||||
// of {@code lexerActionExecutor} and {@code lexerAction}.
|
||||
LexerActionExecutor.append = function(lexerActionExecutor, lexerAction) {
|
||||
if (lexerActionExecutor === null) {
|
||||
return new LexerActionExecutor([ lexerAction ]);
|
||||
}
|
||||
var lexerActions = lexerActionExecutor.lexerActions.concat([ lexerAction ]);
|
||||
return new LexerActionExecutor(lexerActions);
|
||||
};
|
||||
|
||||
// Creates a {@link LexerActionExecutor} which encodes the current offset
|
||||
// for position-dependent lexer actions.
|
||||
//
|
||||
// <p>Normally, when the executor encounters lexer actions where
|
||||
// {@link LexerAction//isPositionDependent} returns {@code true}, it calls
|
||||
// {@link IntStream//seek} on the input {@link CharStream} to set the input
|
||||
// position to the <em>end</em> of the current token. This behavior provides
|
||||
// for efficient DFA representation of lexer actions which appear at the end
|
||||
// of a lexer rule, even when the lexer rule matches a variable number of
|
||||
// characters.</p>
|
||||
//
|
||||
// <p>Prior to traversing a match transition in the ATN, the current offset
|
||||
// from the token start index is assigned to all position-dependent lexer
|
||||
// actions which have not already been assigned a fixed offset. By storing
|
||||
// the offsets relative to the token start index, the DFA representation of
|
||||
// lexer actions which appear in the middle of tokens remains efficient due
|
||||
// to sharing among tokens of the same length, regardless of their absolute
|
||||
// position in the input stream.</p>
|
||||
//
|
||||
// <p>If the current executor already has offsets assigned to all
|
||||
// position-dependent lexer actions, the method returns {@code this}.</p>
|
||||
//
|
||||
// @param offset The current offset to assign to all position-dependent
|
||||
// lexer actions which do not already have offsets assigned.
|
||||
//
|
||||
// @return A {@link LexerActionExecutor} which stores input stream offsets
|
||||
// for all position-dependent lexer actions.
|
||||
// /
|
||||
LexerActionExecutor.prototype.fixOffsetBeforeMatch = function(offset) {
|
||||
var updatedLexerActions = null;
|
||||
for (var i = 0; i < this.lexerActions.length; i++) {
|
||||
if (this.lexerActions[i].isPositionDependent &&
|
||||
!(this.lexerActions[i] instanceof LexerIndexedCustomAction)) {
|
||||
if (updatedLexerActions === null) {
|
||||
updatedLexerActions = this.lexerActions.concat([]);
|
||||
}
|
||||
updatedLexerActions[i] = new LexerIndexedCustomAction(offset,
|
||||
this.lexerActions[i]);
|
||||
}
|
||||
}
|
||||
if (updatedLexerActions === null) {
|
||||
return this;
|
||||
} else {
|
||||
return new LexerActionExecutor(updatedLexerActions);
|
||||
}
|
||||
};
|
||||
|
||||
// Execute the actions encapsulated by this executor within the context of a
|
||||
// particular {@link Lexer}.
|
||||
//
|
||||
// <p>This method calls {@link IntStream//seek} to set the position of the
|
||||
// {@code input} {@link CharStream} prior to calling
|
||||
// {@link LexerAction//execute} on a position-dependent action. Before the
|
||||
// method returns, the input position will be restored to the same position
|
||||
// it was in when the method was invoked.</p>
|
||||
//
|
||||
// @param lexer The lexer instance.
|
||||
// @param input The input stream which is the source for the current token.
|
||||
// When this method is called, the current {@link IntStream//index} for
|
||||
// {@code input} should be the start of the following token, i.e. 1
|
||||
// character past the end of the current token.
|
||||
// @param startIndex The token start index. This value may be passed to
|
||||
// {@link IntStream//seek} to set the {@code input} position to the beginning
|
||||
// of the token.
|
||||
// /
|
||||
LexerActionExecutor.prototype.execute = function(lexer, input, startIndex) {
|
||||
var requiresSeek = false;
|
||||
var stopIndex = input.index;
|
||||
try {
|
||||
for (var i = 0; i < this.lexerActions.length; i++) {
|
||||
var lexerAction = this.lexerActions[i];
|
||||
if (lexerAction instanceof LexerIndexedCustomAction) {
|
||||
var offset = lexerAction.offset;
|
||||
input.seek(startIndex + offset);
|
||||
lexerAction = lexerAction.action;
|
||||
requiresSeek = (startIndex + offset) !== stopIndex;
|
||||
} else if (lexerAction.isPositionDependent) {
|
||||
input.seek(stopIndex);
|
||||
requiresSeek = false;
|
||||
}
|
||||
lexerAction.execute(lexer);
|
||||
}
|
||||
} finally {
|
||||
if (requiresSeek) {
|
||||
input.seek(stopIndex);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
LexerActionExecutor.prototype.hashCode = function() {
|
||||
return this.cachedHashCode;
|
||||
};
|
||||
|
||||
LexerActionExecutor.prototype.updateHashCode = function(hash) {
|
||||
hash.update(this.cachedHashCode);
|
||||
};
|
||||
|
||||
|
||||
LexerActionExecutor.prototype.equals = function(other) {
|
||||
if (this === other) {
|
||||
return true;
|
||||
} else if (!(other instanceof LexerActionExecutor)) {
|
||||
return false;
|
||||
} else if (this.cachedHashCode != other.cachedHashCode) {
|
||||
return false;
|
||||
} else if (this.lexerActions.length != other.lexerActions.length) {
|
||||
return false;
|
||||
} else {
|
||||
var numActions = this.lexerActions.length
|
||||
for (var idx = 0; idx < numActions; ++idx) {
|
||||
if (!this.lexerActions[idx].equals(other.lexerActions[idx])) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
};
|
||||
|
||||
exports.LexerActionExecutor = LexerActionExecutor;
|
||||
module.exports = LexerActionExecutor;
|
||||
|
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
|
@ -1,404 +1,397 @@
|
|||
//
|
||||
/* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.
|
||||
* Use of this file is governed by the BSD 3-clause license that
|
||||
* can be found in the LICENSE.txt file in the project root.
|
||||
*/
|
||||
//
|
||||
|
||||
// A tree structure used to record the semantic context in which
|
||||
// an ATN configuration is valid. It's either a single predicate,
|
||||
// a conjunction {@code p1&&p2}, or a sum of products {@code p1||p2}.
|
||||
//
|
||||
// <p>I have scoped the {@link AND}, {@link OR}, and {@link Predicate} subclasses of
|
||||
// {@link SemanticContext} within the scope of this outer class.</p>
|
||||
//
|
||||
const {Set, Hash} = require('./../Utils');
|
||||
|
||||
var Set = require('./../Utils').Set;
|
||||
var Hash = require('./../Utils').Hash;
|
||||
/**
|
||||
* A tree structure used to record the semantic context in which
|
||||
* an ATN configuration is valid. It's either a single predicate,
|
||||
* a conjunction {@code p1&&p2}, or a sum of products {@code p1||p2}.
|
||||
*
|
||||
* <p>I have scoped the {@link AND}, {@link OR}, and {@link Predicate} subclasses of
|
||||
* {@link SemanticContext} within the scope of this outer class.</p>
|
||||
*/
|
||||
class SemanticContext {
|
||||
hashCode() {
|
||||
const hash = new Hash();
|
||||
this.updateHashCode(hash);
|
||||
return hash.finish();
|
||||
}
|
||||
|
||||
function SemanticContext() {
|
||||
return this;
|
||||
/**
|
||||
* For context independent predicates, we evaluate them without a local
|
||||
* context (i.e., null context). That way, we can evaluate them without
|
||||
* having to create proper rule-specific context during prediction (as
|
||||
* opposed to the parser, which creates them naturally). In a practical
|
||||
* sense, this avoids a cast exception from RuleContext to myruleContext.
|
||||
*
|
||||
* <p>For context dependent predicates, we must pass in a local context so that
|
||||
* references such as $arg evaluate properly as _localctx.arg. We only
|
||||
* capture context dependent predicates in the context in which we begin
|
||||
* prediction, so we passed in the outer context here in case of context
|
||||
* dependent predicate evaluation.</p>
|
||||
*/
|
||||
evaluate(parser, outerContext) {}
|
||||
|
||||
/**
|
||||
* Evaluate the precedence predicates for the context and reduce the result.
|
||||
*
|
||||
* @param parser The parser instance.
|
||||
* @param outerContext The current parser context object.
|
||||
* @return The simplified semantic context after precedence predicates are
|
||||
* evaluated, which will be one of the following values.
|
||||
* <ul>
|
||||
* <li>{@link //NONE}: if the predicate simplifies to {@code true} after
|
||||
* precedence predicates are evaluated.</li>
|
||||
* <li>{@code null}: if the predicate simplifies to {@code false} after
|
||||
* precedence predicates are evaluated.</li>
|
||||
* <li>{@code this}: if the semantic context is not changed as a result of
|
||||
* precedence predicate evaluation.</li>
|
||||
* <li>A non-{@code null} {@link SemanticContext}: the new simplified
|
||||
* semantic context after precedence predicates are evaluated.</li>
|
||||
* </ul>
|
||||
*/
|
||||
evalPrecedence(parser, outerContext) {
|
||||
return this;
|
||||
}
|
||||
|
||||
static andContext(a, b) {
|
||||
if (a === null || a === SemanticContext.NONE) {
|
||||
return b;
|
||||
}
|
||||
if (b === null || b === SemanticContext.NONE) {
|
||||
return a;
|
||||
}
|
||||
const result = new AND(a, b);
|
||||
if (result.opnds.length === 1) {
|
||||
return result.opnds[0];
|
||||
} else {
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
static orContext(a, b) {
|
||||
if (a === null) {
|
||||
return b;
|
||||
}
|
||||
if (b === null) {
|
||||
return a;
|
||||
}
|
||||
if (a === SemanticContext.NONE || b === SemanticContext.NONE) {
|
||||
return SemanticContext.NONE;
|
||||
}
|
||||
const result = new OR(a, b);
|
||||
if (result.opnds.length === 1) {
|
||||
return result.opnds[0];
|
||||
} else {
|
||||
return result;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
SemanticContext.prototype.hashCode = function() {
|
||||
var hash = new Hash();
|
||||
this.updateHashCode(hash);
|
||||
return hash.finish();
|
||||
};
|
||||
|
||||
// For context independent predicates, we evaluate them without a local
|
||||
// context (i.e., null context). That way, we can evaluate them without
|
||||
// having to create proper rule-specific context during prediction (as
|
||||
// opposed to the parser, which creates them naturally). In a practical
|
||||
// sense, this avoids a cast exception from RuleContext to myruleContext.
|
||||
//
|
||||
// <p>For context dependent predicates, we must pass in a local context so that
|
||||
// references such as $arg evaluate properly as _localctx.arg. We only
|
||||
// capture context dependent predicates in the context in which we begin
|
||||
// prediction, so we passed in the outer context here in case of context
|
||||
// dependent predicate evaluation.</p>
|
||||
//
|
||||
SemanticContext.prototype.evaluate = function(parser, outerContext) {
|
||||
};
|
||||
class Predicate extends SemanticContext {
|
||||
constructor(ruleIndex, predIndex, isCtxDependent) {
|
||||
super();
|
||||
this.ruleIndex = ruleIndex === undefined ? -1 : ruleIndex;
|
||||
this.predIndex = predIndex === undefined ? -1 : predIndex;
|
||||
this.isCtxDependent = isCtxDependent === undefined ? false : isCtxDependent; // e.g., $i ref in pred
|
||||
}
|
||||
|
||||
//
|
||||
// Evaluate the precedence predicates for the context and reduce the result.
|
||||
//
|
||||
// @param parser The parser instance.
|
||||
// @param outerContext The current parser context object.
|
||||
// @return The simplified semantic context after precedence predicates are
|
||||
// evaluated, which will be one of the following values.
|
||||
// <ul>
|
||||
// <li>{@link //NONE}: if the predicate simplifies to {@code true} after
|
||||
// precedence predicates are evaluated.</li>
|
||||
// <li>{@code null}: if the predicate simplifies to {@code false} after
|
||||
// precedence predicates are evaluated.</li>
|
||||
// <li>{@code this}: if the semantic context is not changed as a result of
|
||||
// precedence predicate evaluation.</li>
|
||||
// <li>A non-{@code null} {@link SemanticContext}: the new simplified
|
||||
// semantic context after precedence predicates are evaluated.</li>
|
||||
// </ul>
|
||||
//
|
||||
SemanticContext.prototype.evalPrecedence = function(parser, outerContext) {
|
||||
return this;
|
||||
};
|
||||
evaluate(parser, outerContext) {
|
||||
const localctx = this.isCtxDependent ? outerContext : null;
|
||||
return parser.sempred(localctx, this.ruleIndex, this.predIndex);
|
||||
}
|
||||
|
||||
SemanticContext.andContext = function(a, b) {
|
||||
if (a === null || a === SemanticContext.NONE) {
|
||||
return b;
|
||||
updateHashCode(hash) {
|
||||
hash.update(this.ruleIndex, this.predIndex, this.isCtxDependent);
|
||||
}
|
||||
if (b === null || b === SemanticContext.NONE) {
|
||||
return a;
|
||||
}
|
||||
var result = new AND(a, b);
|
||||
if (result.opnds.length === 1) {
|
||||
return result.opnds[0];
|
||||
} else {
|
||||
return result;
|
||||
}
|
||||
};
|
||||
|
||||
SemanticContext.orContext = function(a, b) {
|
||||
if (a === null) {
|
||||
return b;
|
||||
equals(other) {
|
||||
if (this === other) {
|
||||
return true;
|
||||
} else if (!(other instanceof Predicate)) {
|
||||
return false;
|
||||
} else {
|
||||
return this.ruleIndex === other.ruleIndex &&
|
||||
this.predIndex === other.predIndex &&
|
||||
this.isCtxDependent === other.isCtxDependent;
|
||||
}
|
||||
}
|
||||
if (b === null) {
|
||||
return a;
|
||||
}
|
||||
if (a === SemanticContext.NONE || b === SemanticContext.NONE) {
|
||||
return SemanticContext.NONE;
|
||||
}
|
||||
var result = new OR(a, b);
|
||||
if (result.opnds.length === 1) {
|
||||
return result.opnds[0];
|
||||
} else {
|
||||
return result;
|
||||
}
|
||||
};
|
||||
|
||||
function Predicate(ruleIndex, predIndex, isCtxDependent) {
|
||||
SemanticContext.call(this);
|
||||
this.ruleIndex = ruleIndex === undefined ? -1 : ruleIndex;
|
||||
this.predIndex = predIndex === undefined ? -1 : predIndex;
|
||||
this.isCtxDependent = isCtxDependent === undefined ? false : isCtxDependent; // e.g., $i ref in pred
|
||||
return this;
|
||||
toString() {
|
||||
return "{" + this.ruleIndex + ":" + this.predIndex + "}?";
|
||||
}
|
||||
}
|
||||
|
||||
Predicate.prototype = Object.create(SemanticContext.prototype);
|
||||
Predicate.prototype.constructor = Predicate;
|
||||
|
||||
//The default {@link SemanticContext}, which is semantically equivalent to
|
||||
//a predicate of the form {@code {true}?}.
|
||||
//
|
||||
/**
|
||||
* The default {@link SemanticContext}, which is semantically equivalent to
|
||||
* a predicate of the form {@code {true}?}
|
||||
*/
|
||||
SemanticContext.NONE = new Predicate();
|
||||
|
||||
|
||||
Predicate.prototype.evaluate = function(parser, outerContext) {
|
||||
var localctx = this.isCtxDependent ? outerContext : null;
|
||||
return parser.sempred(localctx, this.ruleIndex, this.predIndex);
|
||||
};
|
||||
|
||||
Predicate.prototype.updateHashCode = function(hash) {
|
||||
hash.update(this.ruleIndex, this.predIndex, this.isCtxDependent);
|
||||
};
|
||||
|
||||
Predicate.prototype.equals = function(other) {
|
||||
if (this === other) {
|
||||
return true;
|
||||
} else if (!(other instanceof Predicate)) {
|
||||
return false;
|
||||
} else {
|
||||
return this.ruleIndex === other.ruleIndex &&
|
||||
this.predIndex === other.predIndex &&
|
||||
this.isCtxDependent === other.isCtxDependent;
|
||||
class PrecedencePredicate extends SemanticContext {
|
||||
constructor(precedence) {
|
||||
super();
|
||||
this.precedence = precedence === undefined ? 0 : precedence;
|
||||
}
|
||||
};
|
||||
|
||||
Predicate.prototype.toString = function() {
|
||||
return "{" + this.ruleIndex + ":" + this.predIndex + "}?";
|
||||
};
|
||||
|
||||
function PrecedencePredicate(precedence) {
|
||||
SemanticContext.call(this);
|
||||
this.precedence = precedence === undefined ? 0 : precedence;
|
||||
}
|
||||
|
||||
PrecedencePredicate.prototype = Object.create(SemanticContext.prototype);
|
||||
PrecedencePredicate.prototype.constructor = PrecedencePredicate;
|
||||
|
||||
PrecedencePredicate.prototype.evaluate = function(parser, outerContext) {
|
||||
return parser.precpred(outerContext, this.precedence);
|
||||
};
|
||||
|
||||
PrecedencePredicate.prototype.evalPrecedence = function(parser, outerContext) {
|
||||
if (parser.precpred(outerContext, this.precedence)) {
|
||||
return SemanticContext.NONE;
|
||||
} else {
|
||||
return null;
|
||||
evaluate(parser, outerContext) {
|
||||
return parser.precpred(outerContext, this.precedence);
|
||||
}
|
||||
};
|
||||
|
||||
PrecedencePredicate.prototype.compareTo = function(other) {
|
||||
return this.precedence - other.precedence;
|
||||
};
|
||||
|
||||
PrecedencePredicate.prototype.updateHashCode = function(hash) {
|
||||
hash.update(31);
|
||||
};
|
||||
|
||||
PrecedencePredicate.prototype.equals = function(other) {
|
||||
if (this === other) {
|
||||
return true;
|
||||
} else if (!(other instanceof PrecedencePredicate)) {
|
||||
return false;
|
||||
} else {
|
||||
return this.precedence === other.precedence;
|
||||
}
|
||||
};
|
||||
|
||||
PrecedencePredicate.prototype.toString = function() {
|
||||
return "{"+this.precedence+">=prec}?";
|
||||
};
|
||||
|
||||
|
||||
|
||||
PrecedencePredicate.filterPrecedencePredicates = function(set) {
|
||||
var result = [];
|
||||
set.values().map( function(context) {
|
||||
if (context instanceof PrecedencePredicate) {
|
||||
result.push(context);
|
||||
evalPrecedence(parser, outerContext) {
|
||||
if (parser.precpred(outerContext, this.precedence)) {
|
||||
return SemanticContext.NONE;
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
});
|
||||
return result;
|
||||
};
|
||||
|
||||
|
||||
// A semantic context which is true whenever none of the contained contexts
|
||||
// is false.
|
||||
//
|
||||
function AND(a, b) {
|
||||
SemanticContext.call(this);
|
||||
var operands = new Set();
|
||||
if (a instanceof AND) {
|
||||
a.opnds.map(function(o) {
|
||||
operands.add(o);
|
||||
});
|
||||
} else {
|
||||
operands.add(a);
|
||||
}
|
||||
if (b instanceof AND) {
|
||||
b.opnds.map(function(o) {
|
||||
operands.add(o);
|
||||
});
|
||||
} else {
|
||||
operands.add(b);
|
||||
|
||||
compareTo(other) {
|
||||
return this.precedence - other.precedence;
|
||||
}
|
||||
var precedencePredicates = PrecedencePredicate.filterPrecedencePredicates(operands);
|
||||
if (precedencePredicates.length > 0) {
|
||||
// interested in the transition with the lowest precedence
|
||||
var reduced = null;
|
||||
precedencePredicates.map( function(p) {
|
||||
if(reduced===null || p.precedence<reduced.precedence) {
|
||||
reduced = p;
|
||||
|
||||
updateHashCode(hash) {
|
||||
hash.update(31);
|
||||
}
|
||||
|
||||
equals(other) {
|
||||
if (this === other) {
|
||||
return true;
|
||||
} else if (!(other instanceof PrecedencePredicate)) {
|
||||
return false;
|
||||
} else {
|
||||
return this.precedence === other.precedence;
|
||||
}
|
||||
}
|
||||
|
||||
toString() {
|
||||
return "{"+this.precedence+">=prec}?";
|
||||
}
|
||||
|
||||
static filterPrecedencePredicates(set) {
|
||||
const result = [];
|
||||
set.values().map( function(context) {
|
||||
if (context instanceof PrecedencePredicate) {
|
||||
result.push(context);
|
||||
}
|
||||
});
|
||||
operands.add(reduced);
|
||||
return result;
|
||||
}
|
||||
this.opnds = operands.values();
|
||||
return this;
|
||||
}
|
||||
|
||||
AND.prototype = Object.create(SemanticContext.prototype);
|
||||
AND.prototype.constructor = AND;
|
||||
|
||||
AND.prototype.equals = function(other) {
|
||||
if (this === other) {
|
||||
return true;
|
||||
} else if (!(other instanceof AND)) {
|
||||
return false;
|
||||
} else {
|
||||
return this.opnds === other.opnds;
|
||||
}
|
||||
};
|
||||
|
||||
AND.prototype.updateHashCode = function(hash) {
|
||||
hash.update(this.opnds, "AND");
|
||||
};
|
||||
//
|
||||
// {@inheritDoc}
|
||||
//
|
||||
// <p>
|
||||
// The evaluation of predicates by this context is short-circuiting, but
|
||||
// unordered.</p>
|
||||
//
|
||||
AND.prototype.evaluate = function(parser, outerContext) {
|
||||
for (var i = 0; i < this.opnds.length; i++) {
|
||||
if (!this.opnds[i].evaluate(parser, outerContext)) {
|
||||
return false;
|
||||
class AND extends SemanticContext {
|
||||
/**
|
||||
* A semantic context which is true whenever none of the contained contexts
|
||||
* is false
|
||||
*/
|
||||
constructor(a, b) {
|
||||
super();
|
||||
const operands = new Set();
|
||||
if (a instanceof AND) {
|
||||
a.opnds.map(function(o) {
|
||||
operands.add(o);
|
||||
});
|
||||
} else {
|
||||
operands.add(a);
|
||||
}
|
||||
}
|
||||
return true;
|
||||
};
|
||||
|
||||
AND.prototype.evalPrecedence = function(parser, outerContext) {
|
||||
var differs = false;
|
||||
var operands = [];
|
||||
for (var i = 0; i < this.opnds.length; i++) {
|
||||
var context = this.opnds[i];
|
||||
var evaluated = context.evalPrecedence(parser, outerContext);
|
||||
differs |= (evaluated !== context);
|
||||
if (evaluated === null) {
|
||||
// The AND context is false if any element is false
|
||||
return null;
|
||||
} else if (evaluated !== SemanticContext.NONE) {
|
||||
// Reduce the result by skipping true elements
|
||||
operands.push(evaluated);
|
||||
if (b instanceof AND) {
|
||||
b.opnds.map(function(o) {
|
||||
operands.add(o);
|
||||
});
|
||||
} else {
|
||||
operands.add(b);
|
||||
}
|
||||
}
|
||||
if (!differs) {
|
||||
return this;
|
||||
}
|
||||
if (operands.length === 0) {
|
||||
// all elements were true, so the AND context is true
|
||||
return SemanticContext.NONE;
|
||||
}
|
||||
var result = null;
|
||||
operands.map(function(o) {
|
||||
result = result === null ? o : SemanticContext.andContext(result, o);
|
||||
});
|
||||
return result;
|
||||
};
|
||||
|
||||
AND.prototype.toString = function() {
|
||||
var s = "";
|
||||
this.opnds.map(function(o) {
|
||||
s += "&& " + o.toString();
|
||||
});
|
||||
return s.length > 3 ? s.slice(3) : s;
|
||||
};
|
||||
|
||||
//
|
||||
// A semantic context which is true whenever at least one of the contained
|
||||
// contexts is true.
|
||||
//
|
||||
function OR(a, b) {
|
||||
SemanticContext.call(this);
|
||||
var operands = new Set();
|
||||
if (a instanceof OR) {
|
||||
a.opnds.map(function(o) {
|
||||
operands.add(o);
|
||||
});
|
||||
} else {
|
||||
operands.add(a);
|
||||
}
|
||||
if (b instanceof OR) {
|
||||
b.opnds.map(function(o) {
|
||||
operands.add(o);
|
||||
});
|
||||
} else {
|
||||
operands.add(b);
|
||||
const precedencePredicates = PrecedencePredicate.filterPrecedencePredicates(operands);
|
||||
if (precedencePredicates.length > 0) {
|
||||
// interested in the transition with the lowest precedence
|
||||
let reduced = null;
|
||||
precedencePredicates.map( function(p) {
|
||||
if(reduced===null || p.precedence<reduced.precedence) {
|
||||
reduced = p;
|
||||
}
|
||||
});
|
||||
operands.add(reduced);
|
||||
}
|
||||
this.opnds = operands.values();
|
||||
}
|
||||
|
||||
var precedencePredicates = PrecedencePredicate.filterPrecedencePredicates(operands);
|
||||
if (precedencePredicates.length > 0) {
|
||||
// interested in the transition with the highest precedence
|
||||
var s = precedencePredicates.sort(function(a, b) {
|
||||
return a.compareTo(b);
|
||||
});
|
||||
var reduced = s[s.length-1];
|
||||
operands.add(reduced);
|
||||
}
|
||||
this.opnds = operands.values();
|
||||
return this;
|
||||
}
|
||||
|
||||
OR.prototype = Object.create(SemanticContext.prototype);
|
||||
OR.prototype.constructor = OR;
|
||||
|
||||
OR.prototype.constructor = function(other) {
|
||||
if (this === other) {
|
||||
return true;
|
||||
} else if (!(other instanceof OR)) {
|
||||
return false;
|
||||
} else {
|
||||
return this.opnds === other.opnds;
|
||||
}
|
||||
};
|
||||
|
||||
OR.prototype.updateHashCode = function(hash) {
|
||||
hash.update(this.opnds, "OR");
|
||||
};
|
||||
|
||||
// <p>
|
||||
// The evaluation of predicates by this context is short-circuiting, but
|
||||
// unordered.</p>
|
||||
//
|
||||
OR.prototype.evaluate = function(parser, outerContext) {
|
||||
for (var i = 0; i < this.opnds.length; i++) {
|
||||
if (this.opnds[i].evaluate(parser, outerContext)) {
|
||||
equals(other) {
|
||||
if (this === other) {
|
||||
return true;
|
||||
} else if (!(other instanceof AND)) {
|
||||
return false;
|
||||
} else {
|
||||
return this.opnds === other.opnds;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
};
|
||||
|
||||
OR.prototype.evalPrecedence = function(parser, outerContext) {
|
||||
var differs = false;
|
||||
var operands = [];
|
||||
for (var i = 0; i < this.opnds.length; i++) {
|
||||
var context = this.opnds[i];
|
||||
var evaluated = context.evalPrecedence(parser, outerContext);
|
||||
differs |= (evaluated !== context);
|
||||
if (evaluated === SemanticContext.NONE) {
|
||||
// The OR context is true if any element is true
|
||||
updateHashCode(hash) {
|
||||
hash.update(this.opnds, "AND");
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>
|
||||
* The evaluation of predicates by this context is short-circuiting, but
|
||||
* unordered.</p>
|
||||
*/
|
||||
evaluate(parser, outerContext) {
|
||||
for (let i = 0; i < this.opnds.length; i++) {
|
||||
if (!this.opnds[i].evaluate(parser, outerContext)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
evalPrecedence(parser, outerContext) {
|
||||
let differs = false;
|
||||
const operands = [];
|
||||
for (let i = 0; i < this.opnds.length; i++) {
|
||||
const context = this.opnds[i];
|
||||
const evaluated = context.evalPrecedence(parser, outerContext);
|
||||
differs |= (evaluated !== context);
|
||||
if (evaluated === null) {
|
||||
// The AND context is false if any element is false
|
||||
return null;
|
||||
} else if (evaluated !== SemanticContext.NONE) {
|
||||
// Reduce the result by skipping true elements
|
||||
operands.push(evaluated);
|
||||
}
|
||||
}
|
||||
if (!differs) {
|
||||
return this;
|
||||
}
|
||||
if (operands.length === 0) {
|
||||
// all elements were true, so the AND context is true
|
||||
return SemanticContext.NONE;
|
||||
} else if (evaluated !== null) {
|
||||
// Reduce the result by skipping false elements
|
||||
operands.push(evaluated);
|
||||
}
|
||||
let result = null;
|
||||
operands.map(function(o) {
|
||||
result = result === null ? o : SemanticContext.andContext(result, o);
|
||||
});
|
||||
return result;
|
||||
}
|
||||
|
||||
toString() {
|
||||
let s = "";
|
||||
this.opnds.map(function(o) {
|
||||
s += "&& " + o.toString();
|
||||
});
|
||||
return s.length > 3 ? s.slice(3) : s;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
class OR extends SemanticContext {
|
||||
/**
|
||||
* A semantic context which is true whenever at least one of the contained
|
||||
* contexts is true
|
||||
*/
|
||||
constructor(a, b) {
|
||||
super();
|
||||
const operands = new Set();
|
||||
if (a instanceof OR) {
|
||||
a.opnds.map(function(o) {
|
||||
operands.add(o);
|
||||
});
|
||||
} else {
|
||||
operands.add(a);
|
||||
}
|
||||
if (b instanceof OR) {
|
||||
b.opnds.map(function(o) {
|
||||
operands.add(o);
|
||||
});
|
||||
} else {
|
||||
operands.add(b);
|
||||
}
|
||||
|
||||
const precedencePredicates = PrecedencePredicate.filterPrecedencePredicates(operands);
|
||||
if (precedencePredicates.length > 0) {
|
||||
// interested in the transition with the highest precedence
|
||||
const s = precedencePredicates.sort(function(a, b) {
|
||||
return a.compareTo(b);
|
||||
});
|
||||
const reduced = s[s.length-1];
|
||||
operands.add(reduced);
|
||||
}
|
||||
this.opnds = operands.values();
|
||||
}
|
||||
|
||||
equals(other) {
|
||||
if (this === other) {
|
||||
return true;
|
||||
} else if (!(other instanceof OR)) {
|
||||
return false;
|
||||
} else {
|
||||
return this.opnds === other.opnds;
|
||||
}
|
||||
}
|
||||
if (!differs) {
|
||||
return this;
|
||||
}
|
||||
if (operands.length === 0) {
|
||||
// all elements were false, so the OR context is false
|
||||
return null;
|
||||
}
|
||||
var result = null;
|
||||
operands.map(function(o) {
|
||||
return result === null ? o : SemanticContext.orContext(result, o);
|
||||
});
|
||||
return result;
|
||||
};
|
||||
|
||||
OR.prototype.toString = function() {
|
||||
var s = "";
|
||||
this.opnds.map(function(o) {
|
||||
s += "|| " + o.toString();
|
||||
});
|
||||
return s.length > 3 ? s.slice(3) : s;
|
||||
};
|
||||
updateHashCode(hash) {
|
||||
hash.update(this.opnds, "OR");
|
||||
}
|
||||
|
||||
exports.SemanticContext = SemanticContext;
|
||||
exports.PrecedencePredicate = PrecedencePredicate;
|
||||
exports.Predicate = Predicate;
|
||||
/**
|
||||
* <p>
|
||||
* The evaluation of predicates by this context is short-circuiting, but
|
||||
* unordered.</p>
|
||||
*/
|
||||
evaluate(parser, outerContext) {
|
||||
for (let i = 0; i < this.opnds.length; i++) {
|
||||
if (this.opnds[i].evaluate(parser, outerContext)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
evalPrecedence(parser, outerContext) {
|
||||
let differs = false;
|
||||
const operands = [];
|
||||
for (let i = 0; i < this.opnds.length; i++) {
|
||||
const context = this.opnds[i];
|
||||
const evaluated = context.evalPrecedence(parser, outerContext);
|
||||
differs |= (evaluated !== context);
|
||||
if (evaluated === SemanticContext.NONE) {
|
||||
// The OR context is true if any element is true
|
||||
return SemanticContext.NONE;
|
||||
} else if (evaluated !== null) {
|
||||
// Reduce the result by skipping false elements
|
||||
operands.push(evaluated);
|
||||
}
|
||||
}
|
||||
if (!differs) {
|
||||
return this;
|
||||
}
|
||||
if (operands.length === 0) {
|
||||
// all elements were false, so the OR context is false
|
||||
return null;
|
||||
}
|
||||
const result = null;
|
||||
operands.map(function(o) {
|
||||
return result === null ? o : SemanticContext.orContext(result, o);
|
||||
});
|
||||
return result;
|
||||
}
|
||||
|
||||
toString() {
|
||||
let s = "";
|
||||
this.opnds.map(function(o) {
|
||||
s += "|| " + o.toString();
|
||||
});
|
||||
return s.length > 3 ? s.slice(3) : s;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
SemanticContext,
|
||||
PrecedencePredicate,
|
||||
Predicate
|
||||
}
|
||||
|
|
|
@ -2,45 +2,48 @@
|
|||
* Use of this file is governed by the BSD 3-clause license that
|
||||
* can be found in the LICENSE.txt file in the project root.
|
||||
*/
|
||||
//
|
||||
|
||||
// An ATN transition between any two ATN states. Subclasses define
|
||||
// atom, set, epsilon, action, predicate, rule transitions.
|
||||
//
|
||||
// <p>This is a one way link. It emanates from a state (usually via a list of
|
||||
// transitions) and has a target state.</p>
|
||||
//
|
||||
// <p>Since we never have to change the ATN transitions once we construct it,
|
||||
// we can fix these transitions as specific classes. The DFA transitions
|
||||
// on the other hand need to update the labels as it adds transitions to
|
||||
// the states. We'll use the term Edge for the DFA to distinguish them from
|
||||
// ATN transitions.</p>
|
||||
const {Token} = require('./../Token');
|
||||
const {IntervalSet} = require('./../IntervalSet');
|
||||
const {Predicate, PrecedencePredicate} = require('./SemanticContext');
|
||||
|
||||
var Token = require('./../Token').Token;
|
||||
var Interval = require('./../IntervalSet').Interval;
|
||||
var IntervalSet = require('./../IntervalSet').IntervalSet;
|
||||
var Predicate = require('./SemanticContext').Predicate;
|
||||
var PrecedencePredicate = require('./SemanticContext').PrecedencePredicate;
|
||||
|
||||
function Transition (target) {
|
||||
// The target of this transition.
|
||||
if (target===undefined || target===null) {
|
||||
throw "target cannot be null.";
|
||||
/**
|
||||
* An ATN transition between any two ATN states. Subclasses define
|
||||
* atom, set, epsilon, action, predicate, rule transitions.
|
||||
*
|
||||
* <p>This is a one way link. It emanates from a state (usually via a list of
|
||||
* transitions) and has a target state.</p>
|
||||
*
|
||||
* <p>Since we never have to change the ATN transitions once we construct it,
|
||||
* we can fix these transitions as specific classes. The DFA transitions
|
||||
* on the other hand need to update the labels as it adds transitions to
|
||||
* the states. We'll use the term Edge for the DFA to distinguish them from
|
||||
* ATN transitions.</p>
|
||||
*/
|
||||
class Transition {
|
||||
constructor(target) {
|
||||
// The target of this transition.
|
||||
if (target===undefined || target===null) {
|
||||
throw "target cannot be null.";
|
||||
}
|
||||
this.target = target;
|
||||
// Are we epsilon, action, sempred?
|
||||
this.isEpsilon = false;
|
||||
this.label = null;
|
||||
}
|
||||
this.target = target;
|
||||
// Are we epsilon, action, sempred?
|
||||
this.isEpsilon = false;
|
||||
this.label = null;
|
||||
return this;
|
||||
}
|
||||
// constants for serialization
|
||||
|
||||
// constants for serialization
|
||||
|
||||
Transition.EPSILON = 1;
|
||||
Transition.RANGE = 2;
|
||||
Transition.RULE = 3;
|
||||
Transition.PREDICATE = 4; // e.g., {isType(input.LT(1))}?
|
||||
// e.g., {isType(input.LT(1))}?
|
||||
Transition.PREDICATE = 4;
|
||||
Transition.ATOM = 5;
|
||||
Transition.ACTION = 6;
|
||||
Transition.SET = 7; // ~(A|B) or ~atom, wildcard, which convert to next 2
|
||||
// ~(A|B) or ~atom, wildcard, which convert to next 2
|
||||
Transition.SET = 7;
|
||||
Transition.NOT_SET = 8;
|
||||
Transition.WILDCARD = 9;
|
||||
Transition.PRECEDENCE = 10;
|
||||
|
@ -74,243 +77,227 @@ Transition.serializationTypes = {
|
|||
|
||||
|
||||
// TODO: make all transitions sets? no, should remove set edges
|
||||
function AtomTransition(target, label) {
|
||||
Transition.call(this, target);
|
||||
this.label_ = label; // The token type or character value; or, signifies special label.
|
||||
this.label = this.makeLabel();
|
||||
this.serializationType = Transition.ATOM;
|
||||
return this;
|
||||
|
||||
class AtomTransition extends Transition {
|
||||
constructor(target, label) {
|
||||
super(target);
|
||||
// The token type or character value; or, signifies special label.
|
||||
this.label_ = label;
|
||||
this.label = this.makeLabel();
|
||||
this.serializationType = Transition.ATOM;
|
||||
}
|
||||
|
||||
makeLabel() {
|
||||
const s = new IntervalSet();
|
||||
s.addOne(this.label_);
|
||||
return s;
|
||||
}
|
||||
|
||||
matches(symbol, minVocabSymbol, maxVocabSymbol) {
|
||||
return this.label_ === symbol;
|
||||
}
|
||||
|
||||
toString() {
|
||||
return this.label_;
|
||||
}
|
||||
}
|
||||
|
||||
AtomTransition.prototype = Object.create(Transition.prototype);
|
||||
AtomTransition.prototype.constructor = AtomTransition;
|
||||
|
||||
AtomTransition.prototype.makeLabel = function() {
|
||||
var s = new IntervalSet();
|
||||
s.addOne(this.label_);
|
||||
return s;
|
||||
};
|
||||
class RuleTransition extends Transition {
|
||||
constructor(ruleStart, ruleIndex, precedence, followState) {
|
||||
super(ruleStart);
|
||||
// ptr to the rule definition object for this rule ref
|
||||
this.ruleIndex = ruleIndex;
|
||||
this.precedence = precedence;
|
||||
// what node to begin computations following ref to rule
|
||||
this.followState = followState;
|
||||
this.serializationType = Transition.RULE;
|
||||
this.isEpsilon = true;
|
||||
}
|
||||
|
||||
AtomTransition.prototype.matches = function( symbol, minVocabSymbol, maxVocabSymbol) {
|
||||
return this.label_ === symbol;
|
||||
};
|
||||
|
||||
AtomTransition.prototype.toString = function() {
|
||||
return this.label_;
|
||||
};
|
||||
|
||||
function RuleTransition(ruleStart, ruleIndex, precedence, followState) {
|
||||
Transition.call(this, ruleStart);
|
||||
this.ruleIndex = ruleIndex; // ptr to the rule definition object for this rule ref
|
||||
this.precedence = precedence;
|
||||
this.followState = followState; // what node to begin computations following ref to rule
|
||||
this.serializationType = Transition.RULE;
|
||||
this.isEpsilon = true;
|
||||
return this;
|
||||
matches(symbol, minVocabSymbol, maxVocabSymbol) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
RuleTransition.prototype = Object.create(Transition.prototype);
|
||||
RuleTransition.prototype.constructor = RuleTransition;
|
||||
class EpsilonTransition extends Transition {
|
||||
constructor(target, outermostPrecedenceReturn) {
|
||||
super(target);
|
||||
this.serializationType = Transition.EPSILON;
|
||||
this.isEpsilon = true;
|
||||
this.outermostPrecedenceReturn = outermostPrecedenceReturn;
|
||||
}
|
||||
|
||||
RuleTransition.prototype.matches = function(symbol, minVocabSymbol, maxVocabSymbol) {
|
||||
return false;
|
||||
};
|
||||
matches(symbol, minVocabSymbol, maxVocabSymbol) {
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
function EpsilonTransition(target, outermostPrecedenceReturn) {
|
||||
Transition.call(this, target);
|
||||
this.serializationType = Transition.EPSILON;
|
||||
this.isEpsilon = true;
|
||||
this.outermostPrecedenceReturn = outermostPrecedenceReturn;
|
||||
return this;
|
||||
toString() {
|
||||
return "epsilon";
|
||||
}
|
||||
}
|
||||
|
||||
EpsilonTransition.prototype = Object.create(Transition.prototype);
|
||||
EpsilonTransition.prototype.constructor = EpsilonTransition;
|
||||
|
||||
EpsilonTransition.prototype.matches = function( symbol, minVocabSymbol, maxVocabSymbol) {
|
||||
return false;
|
||||
};
|
||||
class RangeTransition extends Transition {
|
||||
constructor(target, start, stop) {
|
||||
super(target);
|
||||
this.serializationType = Transition.RANGE;
|
||||
this.start = start;
|
||||
this.stop = stop;
|
||||
this.label = this.makeLabel();
|
||||
}
|
||||
|
||||
EpsilonTransition.prototype.toString = function() {
|
||||
return "epsilon";
|
||||
};
|
||||
makeLabel() {
|
||||
const s = new IntervalSet();
|
||||
s.addRange(this.start, this.stop);
|
||||
return s;
|
||||
}
|
||||
|
||||
function RangeTransition(target, start, stop) {
|
||||
Transition.call(this, target);
|
||||
this.serializationType = Transition.RANGE;
|
||||
this.start = start;
|
||||
this.stop = stop;
|
||||
this.label = this.makeLabel();
|
||||
return this;
|
||||
matches(symbol, minVocabSymbol, maxVocabSymbol) {
|
||||
return symbol >= this.start && symbol <= this.stop;
|
||||
}
|
||||
|
||||
toString() {
|
||||
return "'" + String.fromCharCode(this.start) + "'..'" + String.fromCharCode(this.stop) + "'";
|
||||
}
|
||||
}
|
||||
|
||||
RangeTransition.prototype = Object.create(Transition.prototype);
|
||||
RangeTransition.prototype.constructor = RangeTransition;
|
||||
|
||||
RangeTransition.prototype.makeLabel = function() {
|
||||
var s = new IntervalSet();
|
||||
s.addRange(this.start, this.stop);
|
||||
return s;
|
||||
};
|
||||
|
||||
RangeTransition.prototype.matches = function(symbol, minVocabSymbol, maxVocabSymbol) {
|
||||
return symbol >= this.start && symbol <= this.stop;
|
||||
};
|
||||
|
||||
RangeTransition.prototype.toString = function() {
|
||||
return "'" + String.fromCharCode(this.start) + "'..'" + String.fromCharCode(this.stop) + "'";
|
||||
};
|
||||
|
||||
function AbstractPredicateTransition(target) {
|
||||
Transition.call(this, target);
|
||||
return this;
|
||||
class AbstractPredicateTransition extends Transition {
|
||||
constructor(target) {
|
||||
super(target);
|
||||
}
|
||||
}
|
||||
|
||||
AbstractPredicateTransition.prototype = Object.create(Transition.prototype);
|
||||
AbstractPredicateTransition.prototype.constructor = AbstractPredicateTransition;
|
||||
class PredicateTransition extends AbstractPredicateTransition {
|
||||
constructor(target, ruleIndex, predIndex, isCtxDependent) {
|
||||
super(target);
|
||||
this.serializationType = Transition.PREDICATE;
|
||||
this.ruleIndex = ruleIndex;
|
||||
this.predIndex = predIndex;
|
||||
this.isCtxDependent = isCtxDependent; // e.g., $i ref in pred
|
||||
this.isEpsilon = true;
|
||||
}
|
||||
|
||||
function PredicateTransition(target, ruleIndex, predIndex, isCtxDependent) {
|
||||
AbstractPredicateTransition.call(this, target);
|
||||
this.serializationType = Transition.PREDICATE;
|
||||
this.ruleIndex = ruleIndex;
|
||||
this.predIndex = predIndex;
|
||||
this.isCtxDependent = isCtxDependent; // e.g., $i ref in pred
|
||||
this.isEpsilon = true;
|
||||
return this;
|
||||
matches(symbol, minVocabSymbol, maxVocabSymbol) {
|
||||
return false;
|
||||
}
|
||||
|
||||
getPredicate() {
|
||||
return new Predicate(this.ruleIndex, this.predIndex, this.isCtxDependent);
|
||||
}
|
||||
|
||||
toString() {
|
||||
return "pred_" + this.ruleIndex + ":" + this.predIndex;
|
||||
}
|
||||
}
|
||||
|
||||
PredicateTransition.prototype = Object.create(AbstractPredicateTransition.prototype);
|
||||
PredicateTransition.prototype.constructor = PredicateTransition;
|
||||
|
||||
PredicateTransition.prototype.matches = function(symbol, minVocabSymbol, maxVocabSymbol) {
|
||||
return false;
|
||||
};
|
||||
class ActionTransition extends Transition {
|
||||
constructor(target, ruleIndex, actionIndex, isCtxDependent) {
|
||||
super(target);
|
||||
this.serializationType = Transition.ACTION;
|
||||
this.ruleIndex = ruleIndex;
|
||||
this.actionIndex = actionIndex===undefined ? -1 : actionIndex;
|
||||
this.isCtxDependent = isCtxDependent===undefined ? false : isCtxDependent; // e.g., $i ref in pred
|
||||
this.isEpsilon = true;
|
||||
}
|
||||
|
||||
PredicateTransition.prototype.getPredicate = function() {
|
||||
return new Predicate(this.ruleIndex, this.predIndex, this.isCtxDependent);
|
||||
};
|
||||
matches(symbol, minVocabSymbol, maxVocabSymbol) {
|
||||
return false;
|
||||
}
|
||||
|
||||
PredicateTransition.prototype.toString = function() {
|
||||
return "pred_" + this.ruleIndex + ":" + this.predIndex;
|
||||
};
|
||||
|
||||
function ActionTransition(target, ruleIndex, actionIndex, isCtxDependent) {
|
||||
Transition.call(this, target);
|
||||
this.serializationType = Transition.ACTION;
|
||||
this.ruleIndex = ruleIndex;
|
||||
this.actionIndex = actionIndex===undefined ? -1 : actionIndex;
|
||||
this.isCtxDependent = isCtxDependent===undefined ? false : isCtxDependent; // e.g., $i ref in pred
|
||||
this.isEpsilon = true;
|
||||
return this;
|
||||
toString() {
|
||||
return "action_" + this.ruleIndex + ":" + this.actionIndex;
|
||||
}
|
||||
}
|
||||
|
||||
ActionTransition.prototype = Object.create(Transition.prototype);
|
||||
ActionTransition.prototype.constructor = ActionTransition;
|
||||
|
||||
|
||||
ActionTransition.prototype.matches = function(symbol, minVocabSymbol, maxVocabSymbol) {
|
||||
return false;
|
||||
};
|
||||
|
||||
ActionTransition.prototype.toString = function() {
|
||||
return "action_" + this.ruleIndex + ":" + this.actionIndex;
|
||||
};
|
||||
|
||||
|
||||
// A transition containing a set of values.
|
||||
function SetTransition(target, set) {
|
||||
Transition.call(this, target);
|
||||
this.serializationType = Transition.SET;
|
||||
if (set !==undefined && set !==null) {
|
||||
this.label = set;
|
||||
} else {
|
||||
this.label = new IntervalSet();
|
||||
this.label.addOne(Token.INVALID_TYPE);
|
||||
class SetTransition extends Transition {
|
||||
constructor(target, set) {
|
||||
super(target);
|
||||
this.serializationType = Transition.SET;
|
||||
if (set !==undefined && set !==null) {
|
||||
this.label = set;
|
||||
} else {
|
||||
this.label = new IntervalSet();
|
||||
this.label.addOne(Token.INVALID_TYPE);
|
||||
}
|
||||
}
|
||||
|
||||
matches(symbol, minVocabSymbol, maxVocabSymbol) {
|
||||
return this.label.contains(symbol);
|
||||
}
|
||||
|
||||
toString() {
|
||||
return this.label.toString();
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
SetTransition.prototype = Object.create(Transition.prototype);
|
||||
SetTransition.prototype.constructor = SetTransition;
|
||||
class NotSetTransition extends SetTransition {
|
||||
constructor(target, set) {
|
||||
super(target, set);
|
||||
this.serializationType = Transition.NOT_SET;
|
||||
}
|
||||
|
||||
SetTransition.prototype.matches = function(symbol, minVocabSymbol, maxVocabSymbol) {
|
||||
return this.label.contains(symbol);
|
||||
};
|
||||
matches(symbol, minVocabSymbol, maxVocabSymbol) {
|
||||
return symbol >= minVocabSymbol && symbol <= maxVocabSymbol &&
|
||||
!super.matches(symbol, minVocabSymbol, maxVocabSymbol);
|
||||
}
|
||||
|
||||
|
||||
SetTransition.prototype.toString = function() {
|
||||
return this.label.toString();
|
||||
};
|
||||
|
||||
function NotSetTransition(target, set) {
|
||||
SetTransition.call(this, target, set);
|
||||
this.serializationType = Transition.NOT_SET;
|
||||
return this;
|
||||
toString() {
|
||||
return '~' + super.toString();
|
||||
}
|
||||
}
|
||||
|
||||
NotSetTransition.prototype = Object.create(SetTransition.prototype);
|
||||
NotSetTransition.prototype.constructor = NotSetTransition;
|
||||
class WildcardTransition extends Transition {
|
||||
constructor(target) {
|
||||
super(target);
|
||||
this.serializationType = Transition.WILDCARD;
|
||||
}
|
||||
|
||||
NotSetTransition.prototype.matches = function(symbol, minVocabSymbol, maxVocabSymbol) {
|
||||
return symbol >= minVocabSymbol && symbol <= maxVocabSymbol &&
|
||||
!SetTransition.prototype.matches.call(this, symbol, minVocabSymbol, maxVocabSymbol);
|
||||
};
|
||||
matches(symbol, minVocabSymbol, maxVocabSymbol) {
|
||||
return symbol >= minVocabSymbol && symbol <= maxVocabSymbol;
|
||||
}
|
||||
|
||||
NotSetTransition.prototype.toString = function() {
|
||||
return '~' + SetTransition.prototype.toString.call(this);
|
||||
};
|
||||
|
||||
function WildcardTransition(target) {
|
||||
Transition.call(this, target);
|
||||
this.serializationType = Transition.WILDCARD;
|
||||
return this;
|
||||
toString() {
|
||||
return ".";
|
||||
}
|
||||
}
|
||||
|
||||
WildcardTransition.prototype = Object.create(Transition.prototype);
|
||||
WildcardTransition.prototype.constructor = WildcardTransition;
|
||||
class PrecedencePredicateTransition extends AbstractPredicateTransition {
|
||||
constructor(target, precedence) {
|
||||
super(target);
|
||||
this.serializationType = Transition.PRECEDENCE;
|
||||
this.precedence = precedence;
|
||||
this.isEpsilon = true;
|
||||
}
|
||||
|
||||
matches(symbol, minVocabSymbol, maxVocabSymbol) {
|
||||
return false;
|
||||
}
|
||||
|
||||
WildcardTransition.prototype.matches = function(symbol, minVocabSymbol, maxVocabSymbol) {
|
||||
return symbol >= minVocabSymbol && symbol <= maxVocabSymbol;
|
||||
};
|
||||
getPredicate() {
|
||||
return new PrecedencePredicate(this.precedence);
|
||||
}
|
||||
|
||||
WildcardTransition.prototype.toString = function() {
|
||||
return ".";
|
||||
};
|
||||
|
||||
function PrecedencePredicateTransition(target, precedence) {
|
||||
AbstractPredicateTransition.call(this, target);
|
||||
this.serializationType = Transition.PRECEDENCE;
|
||||
this.precedence = precedence;
|
||||
this.isEpsilon = true;
|
||||
return this;
|
||||
toString() {
|
||||
return this.precedence + " >= _p";
|
||||
}
|
||||
}
|
||||
|
||||
PrecedencePredicateTransition.prototype = Object.create(AbstractPredicateTransition.prototype);
|
||||
PrecedencePredicateTransition.prototype.constructor = PrecedencePredicateTransition;
|
||||
|
||||
PrecedencePredicateTransition.prototype.matches = function(symbol, minVocabSymbol, maxVocabSymbol) {
|
||||
return false;
|
||||
};
|
||||
|
||||
PrecedencePredicateTransition.prototype.getPredicate = function() {
|
||||
return new PrecedencePredicate(this.precedence);
|
||||
};
|
||||
|
||||
PrecedencePredicateTransition.prototype.toString = function() {
|
||||
return this.precedence + " >= _p";
|
||||
};
|
||||
|
||||
exports.Transition = Transition;
|
||||
exports.AtomTransition = AtomTransition;
|
||||
exports.SetTransition = SetTransition;
|
||||
exports.NotSetTransition = NotSetTransition;
|
||||
exports.RuleTransition = RuleTransition;
|
||||
exports.ActionTransition = ActionTransition;
|
||||
exports.EpsilonTransition = EpsilonTransition;
|
||||
exports.RangeTransition = RangeTransition;
|
||||
exports.WildcardTransition = WildcardTransition;
|
||||
exports.PredicateTransition = PredicateTransition;
|
||||
exports.PrecedencePredicateTransition = PrecedencePredicateTransition;
|
||||
exports.AbstractPredicateTransition = AbstractPredicateTransition;
|
||||
module.exports = {
|
||||
Transition,
|
||||
AtomTransition,
|
||||
SetTransition,
|
||||
NotSetTransition,
|
||||
RuleTransition,
|
||||
ActionTransition,
|
||||
EpsilonTransition,
|
||||
RangeTransition,
|
||||
WildcardTransition,
|
||||
PredicateTransition,
|
||||
PrecedencePredicateTransition,
|
||||
AbstractPredicateTransition
|
||||
}
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
*/
|
||||
|
||||
exports.ATN = require('./ATN');
|
||||
exports.ATNDeserializer = require('./ATNDeserializer').ATNDeserializer;
|
||||
exports.LexerATNSimulator = require('./LexerATNSimulator').LexerATNSimulator;
|
||||
exports.ParserATNSimulator = require('./ParserATNSimulator').ParserATNSimulator;
|
||||
exports.PredictionMode = require('./PredictionMode').PredictionMode;
|
||||
exports.ATNDeserializer = require('./ATNDeserializer');
|
||||
exports.LexerATNSimulator = require('./LexerATNSimulator');
|
||||
exports.ParserATNSimulator = require('./ParserATNSimulator');
|
||||
exports.PredictionMode = require('./PredictionMode');
|
||||
|
|
Loading…
Reference in New Issue