refactored CommonTokenStream.js to use es6 classes

fix: dont wrap class in an object for export
fix: annotate adjustSeekIndex with Number type to avoid warning
use const for better scoping
use jsdoc
This commit is contained in:
Camilo Roca 2020-03-15 16:10:50 +01:00
parent 5f6b7de72d
commit 181c44fb11
3 changed files with 87 additions and 91 deletions

View File

@ -214,8 +214,8 @@ class BufferedTokenStream extends TokenStream {
* that * that
* the seek target is always an on-channel token.</p> * the seek target is always an on-channel token.</p>
* *
* @param i The target token index. * @param {Number} i The target token index.
* @return The adjusted target token index. * @return {Number} The adjusted target token index.
*/ */
adjustSeekIndex(i) { adjustSeekIndex(i) {
return i; return i;

View File

@ -1,57 +1,52 @@
//
/* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved. /* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.
* Use of this file is governed by the BSD 3-clause license that * Use of this file is governed by the BSD 3-clause license that
* can be found in the LICENSE.txt file in the project root. * can be found in the LICENSE.txt file in the project root.
*/ */
///
//
// This class extends {@link BufferedTokenStream} with functionality to filter
// token streams to tokens on a particular channel (tokens where
// {@link Token//getChannel} returns a particular value).
//
// <p>
// This token stream provides access to all tokens by index or when calling
// methods like {@link //getText}. The channel filtering is only used for code
// accessing tokens via the lookahead methods {@link //LA}, {@link //LT}, and
// {@link //LB}.</p>
//
// <p>
// By default, tokens are placed on the default channel
// ({@link Token//DEFAULT_CHANNEL}), but may be reassigned by using the
// {@code ->channel(HIDDEN)} lexer command, or by using an embedded action to
// call {@link Lexer//setChannel}.
// </p>
//
// <p>
// Note: lexer rules which use the {@code ->skip} lexer command or call
// {@link Lexer//skip} do not produce tokens at all, so input text matched by
// such a rule will not be available as part of the token stream, regardless of
// channel.</p>
///
var Token = require('./Token').Token; const Token = require('./Token').Token;
var BufferedTokenStream = require('./BufferedTokenStream'); const BufferedTokenStream = require('./BufferedTokenStream');
function CommonTokenStream(lexer, channel) { /**
BufferedTokenStream.call(this, lexer); * This class extends {@link BufferedTokenStream} with functionality to filter
* token streams to tokens on a particular channel (tokens where
* {@link Token//getChannel} returns a particular value).
*
* <p>
* This token stream provides access to all tokens by index or when calling
* methods like {@link //getText}. The channel filtering is only used for code
* accessing tokens via the lookahead methods {@link //LA}, {@link //LT}, and
* {@link //LB}.</p>
*
* <p>
* By default, tokens are placed on the default channel
* ({@link Token//DEFAULT_CHANNEL}), but may be reassigned by using the
* {@code ->channel(HIDDEN)} lexer command, or by using an embedded action to
* call {@link Lexer//setChannel}.
* </p>
*
* <p>
* Note: lexer rules which use the {@code ->skip} lexer command or call
* {@link Lexer//skip} do not produce tokens at all, so input text matched by
* such a rule will not be available as part of the token stream, regardless of
* channel.</p>
*/
class CommonTokenStream extends BufferedTokenStream {
constructor(lexer, channel) {
super(lexer);
this.channel = channel===undefined ? Token.DEFAULT_CHANNEL : channel; this.channel = channel===undefined ? Token.DEFAULT_CHANNEL : channel;
return this;
} }
CommonTokenStream.prototype = Object.create(BufferedTokenStream.prototype); adjustSeekIndex(i) {
CommonTokenStream.prototype.constructor = CommonTokenStream;
CommonTokenStream.prototype.adjustSeekIndex = function(i) {
return this.nextTokenOnChannel(i, this.channel); return this.nextTokenOnChannel(i, this.channel);
}; }
CommonTokenStream.prototype.LB = function(k) { LB(k) {
if (k===0 || this.index-k<0) { if (k===0 || this.index-k<0) {
return null; return null;
} }
var i = this.index; let i = this.index;
var n = 1; let n = 1;
// find k good tokens looking backwards // find k good tokens looking backwards
while (n <= k) { while (n <= k) {
// skip off-channel tokens // skip off-channel tokens
@ -62,9 +57,9 @@ CommonTokenStream.prototype.LB = function(k) {
return null; return null;
} }
return this.tokens[i]; return this.tokens[i];
}; }
CommonTokenStream.prototype.LT = function(k) { LT(k) {
this.lazyInit(); this.lazyInit();
if (k === 0) { if (k === 0) {
return null; return null;
@ -72,8 +67,8 @@ CommonTokenStream.prototype.LT = function(k) {
if (k < 0) { if (k < 0) {
return this.LB(-k); return this.LB(-k);
} }
var i = this.index; let i = this.index;
var n = 1; // we know tokens[pos] is a good one let n = 1; // we know tokens[pos] is a good one
// find k good tokens // find k good tokens
while (n < k) { while (n < k) {
// skip off-channel tokens, but make sure to not look past EOF // skip off-channel tokens, but make sure to not look past EOF
@ -83,14 +78,14 @@ CommonTokenStream.prototype.LT = function(k) {
n += 1; n += 1;
} }
return this.tokens[i]; return this.tokens[i];
}; }
// Count EOF just once./// // Count EOF just once.
CommonTokenStream.prototype.getNumberOfOnChannelTokens = function() { getNumberOfOnChannelTokens() {
var n = 0; let n = 0;
this.fill(); this.fill();
for (var i =0; i< this.tokens.length;i++) { for (let i =0; i< this.tokens.length;i++) {
var t = this.tokens[i]; const t = this.tokens[i];
if( t.channel===this.channel) { if( t.channel===this.channel) {
n += 1; n += 1;
} }
@ -99,6 +94,7 @@ CommonTokenStream.prototype.getNumberOfOnChannelTokens = function() {
} }
} }
return n; return n;
}; }
}
exports.CommonTokenStream = CommonTokenStream; module.exports = CommonTokenStream;

View File

@ -13,7 +13,7 @@ exports.CharStreams = require('./CharStreams');
exports.CommonToken = require('./Token').CommonToken; exports.CommonToken = require('./Token').CommonToken;
exports.InputStream = require('./InputStream').InputStream; exports.InputStream = require('./InputStream').InputStream;
exports.FileStream = require('./FileStream').FileStream; exports.FileStream = require('./FileStream').FileStream;
exports.CommonTokenStream = require('./CommonTokenStream').CommonTokenStream; exports.CommonTokenStream = require('./CommonTokenStream');
exports.Lexer = require('./Lexer').Lexer; exports.Lexer = require('./Lexer').Lexer;
exports.Parser = require('./Parser').Parser; exports.Parser = require('./Parser').Parser;
var pc = require('./PredictionContext'); var pc = require('./PredictionContext');