refactored ATNSimulator.js to use es6 classes

use const for better scoping
fix: dont wrap class in object for export
This commit is contained in:
Camilo Roca 2020-03-06 19:54:10 +01:00
parent 43f896d6dc
commit 01c5dca26a
3 changed files with 41 additions and 41 deletions

View File

@ -1,52 +1,52 @@
//
/* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.
* Use of this file is governed by the BSD 3-clause license that
* can be found in the LICENSE.txt file in the project root.
*/
///
var DFAState = require('./../dfa/DFAState').DFAState;
var ATNConfigSet = require('./ATNConfigSet').ATNConfigSet;
var getCachedPredictionContext = require('./../PredictionContext').getCachedPredictionContext;
var Map = require('./../Utils').Map;
const {DFAState} = require('./../dfa/DFAState');
const {ATNConfigSet} = require('./ATNConfigSet');
const {getCachedPredictionContext} = require('./../PredictionContext');
const {Map} = require('./../Utils');
function ATNSimulator(atn, sharedContextCache) {
class ATNSimulator {
constructor(atn, sharedContextCache) {
/**
* The context cache maps all PredictionContext objects that are ==
* to a single cached copy. This cache is shared across all contexts
* in all ATNConfigs in all DFA states. We rebuild each ATNConfigSet
* to use only cached nodes/graphs in addDFAState(). We don't want to
* fill this during closure() since there are lots of contexts that
* pop up but are not used ever again. It also greatly slows down closure().
*
* <p>This cache makes a huge difference in memory and a little bit in speed.
* For the Java grammar on java.*, it dropped the memory requirements
* at the end from 25M to 16M. We don't store any of the full context
* graphs in the DFA because they are limited to local context only,
* but apparently there's a lot of repetition there as well. We optimize
* the config contexts before storing the config set in the DFA states
* by literally rebuilding them with cached subgraphs only.</p>
*
* <p>I tried a cache for use during closure operations, that was
* whacked after each adaptivePredict(). It cost a little bit
* more time I think and doesn't save on the overall footprint
* so it's not worth the complexity.</p>
*/
this.atn = atn;
this.sharedContextCache = sharedContextCache;
return this;
}
// The context cache maps all PredictionContext objects that are ==
// to a single cached copy. This cache is shared across all contexts
// in all ATNConfigs in all DFA states. We rebuild each ATNConfigSet
// to use only cached nodes/graphs in addDFAState(). We don't want to
// fill this during closure() since there are lots of contexts that
// pop up but are not used ever again. It also greatly slows down closure().
//
// <p>This cache makes a huge difference in memory and a little bit in speed.
// For the Java grammar on java.*, it dropped the memory requirements
// at the end from 25M to 16M. We don't store any of the full context
// graphs in the DFA because they are limited to local context only,
// but apparently there's a lot of repetition there as well. We optimize
// the config contexts before storing the config set in the DFA states
// by literally rebuilding them with cached subgraphs only.</p>
//
// <p>I tried a cache for use during closure operations, that was
// whacked after each adaptivePredict(). It cost a little bit
// more time I think and doesn't save on the overall footprint
// so it's not worth the complexity.</p>
///
this.atn = atn;
this.sharedContextCache = sharedContextCache;
return this;
getCachedContext(context) {
if (this.sharedContextCache ===null) {
return context;
}
const visited = new Map();
return getCachedPredictionContext(context, this.sharedContextCache, visited);
}
}
// Must distinguish between missing edge and edge we know leads nowhere///
ATNSimulator.ERROR = new DFAState(0x7FFFFFFF, new ATNConfigSet());
ATNSimulator.prototype.getCachedContext = function(context) {
if (this.sharedContextCache ===null) {
return context;
}
var visited = new Map();
return getCachedPredictionContext(context, this.sharedContextCache, visited);
};
exports.ATNSimulator = ATNSimulator;
module.exports = ATNSimulator;

View File

@ -24,7 +24,7 @@
var Token = require('./../Token').Token;
var Lexer = require('./../Lexer').Lexer;
var ATN = require('./ATN');
var ATNSimulator = require('./ATNSimulator').ATNSimulator;
var ATNSimulator = require('./ATNSimulator');
var DFAState = require('./../dfa/DFAState').DFAState;
var ATNConfigSet = require('./ATNConfigSet').ATNConfigSet;
var OrderedATNConfigSet = require('./ATNConfigSet').OrderedATNConfigSet;

View File

@ -244,7 +244,7 @@ var ATNConfigSet = require('./ATNConfigSet').ATNConfigSet;
var Token = require('./../Token').Token;
var DFAState = require('./../dfa/DFAState').DFAState;
var PredPrediction = require('./../dfa/DFAState').PredPrediction;
var ATNSimulator = require('./ATNSimulator').ATNSimulator;
var ATNSimulator = require('./ATNSimulator');
var PredictionMode = require('./PredictionMode').PredictionMode;
var RuleContext = require('./../RuleContext');
var ParserRuleContext = require('./../ParserRuleContext').ParserRuleContext;