forked from jasder/antlr
while -> for
This commit is contained in:
parent
1ac3d2701a
commit
dc5cf284ed
|
@ -241,14 +241,14 @@ func (bt *BufferedTokenStream) setTokenSource(tokenSource) {
|
|||
// Return i if tokens[i] is on channel. Return -1 if there are no tokens
|
||||
// on channel between i and EOF.
|
||||
// /
|
||||
func (bt *BufferedTokenStream) nextTokenOnChannel(i, channel) {
|
||||
func (bt *BufferedTokenStream) nextTokenOnChannel(i, channel int) {
|
||||
bt.sync(i)
|
||||
if (i >= len(bt.tokens)) {
|
||||
return -1
|
||||
}
|
||||
var token = bt.tokens[i]
|
||||
while (token.channel != bt.channel) {
|
||||
if (token.type == TokenEOF) {
|
||||
for (token.channel != bt.channel) {
|
||||
if (token.tokenType == TokenEOF) {
|
||||
return -1
|
||||
}
|
||||
i += 1
|
||||
|
@ -261,8 +261,8 @@ func (bt *BufferedTokenStream) nextTokenOnChannel(i, channel) {
|
|||
// Given a starting index, return the index of the previous token on channel.
|
||||
// Return i if tokens[i] is on channel. Return -1 if there are no tokens
|
||||
// on channel between i and 0.
|
||||
func (bt *BufferedTokenStream) previousTokenOnChannel(i, channel) {
|
||||
while (i >= 0 && bt.tokens[i].channel != channel) {
|
||||
func (bt *BufferedTokenStream) previousTokenOnChannel(i, channel int) {
|
||||
for (i >= 0 && bt.tokens[i].channel != channel) {
|
||||
i -= 1
|
||||
}
|
||||
return i
|
||||
|
@ -271,8 +271,7 @@ func (bt *BufferedTokenStream) previousTokenOnChannel(i, channel) {
|
|||
// Collect all tokens on specified channel to the right of
|
||||
// the current token up until we see a token on DEFAULT_TOKEN_CHANNEL or
|
||||
// EOF. If channel is -1, find any non default channel token.
|
||||
func (bt *BufferedTokenStream) getHiddenTokensToRight(tokenIndex,
|
||||
channel) {
|
||||
func (bt *BufferedTokenStream) getHiddenTokensToRight(tokenIndex, channel int) {
|
||||
if (channel == undefined) {
|
||||
channel = -1
|
||||
}
|
||||
|
@ -291,8 +290,7 @@ func (bt *BufferedTokenStream) getHiddenTokensToRight(tokenIndex,
|
|||
// Collect all tokens on specified channel to the left of
|
||||
// the current token up until we see a token on DEFAULT_TOKEN_CHANNEL.
|
||||
// If channel is -1, find any non default channel token.
|
||||
func (bt *BufferedTokenStream) getHiddenTokensToLeft(tokenIndex,
|
||||
channel) {
|
||||
func (bt *BufferedTokenStream) getHiddenTokensToLeft(tokenIndex, channel int) {
|
||||
if (channel == undefined) {
|
||||
channel = -1
|
||||
}
|
||||
|
@ -311,9 +309,9 @@ func (bt *BufferedTokenStream) getHiddenTokensToLeft(tokenIndex,
|
|||
return bt.filterForChannel(from_, to, channel)
|
||||
}
|
||||
|
||||
func (bt *BufferedTokenStream) filterForChannel(left, right, channel) {
|
||||
func (bt *BufferedTokenStream) filterForChannel(left, right, channel int) {
|
||||
var hidden = []
|
||||
for var i = left; i < right + 1; i++ {
|
||||
for i := left; i < right + 1; i++ {
|
||||
var t = bt.tokens[i]
|
||||
if (channel == -1) {
|
||||
if (t.channel != LexerDefaultTokenChannel) {
|
||||
|
@ -334,10 +332,10 @@ func (bt *BufferedTokenStream) getSourceName() {
|
|||
}
|
||||
|
||||
// Get the text of all tokens in bt buffer.///
|
||||
func (bt *BufferedTokenStream) getText(interval) string {
|
||||
func (bt *BufferedTokenStream) getText(interval *Interval) string {
|
||||
bt.lazyInit()
|
||||
bt.fill()
|
||||
if (interval == undefined || interval == nil) {
|
||||
if (interval == nil) {
|
||||
interval = NewInterval(0, len(bt.tokens) - 1)
|
||||
}
|
||||
var start = interval.start
|
||||
|
|
|
@ -479,7 +479,7 @@ func (p.*Parser) unrollRecursionContexts(parentCtx) {
|
|||
var retCtx = p._ctx // save current ctx (return value)
|
||||
// unroll so _ctx is as it was before call to recursive method
|
||||
if (p._parseListeners != nil) {
|
||||
while (p._ctx != parentCtx) {
|
||||
for (p._ctx != parentCtx) {
|
||||
p.triggerExitRuleEvent()
|
||||
p._ctx = p._ctx.parentCtx
|
||||
}
|
||||
|
@ -496,7 +496,7 @@ func (p.*Parser) unrollRecursionContexts(parentCtx) {
|
|||
|
||||
func (p.*Parser) getInvokingContext(ruleIndex) {
|
||||
var ctx = p._ctx
|
||||
while (ctx != nil) {
|
||||
for (ctx != nil) {
|
||||
if (ctx.ruleIndex == ruleIndex) {
|
||||
return ctx
|
||||
}
|
||||
|
@ -539,7 +539,7 @@ func (p.*Parser) isExpectedToken(symbol) {
|
|||
if (!following.contains(TokenEpsilon)) {
|
||||
return false
|
||||
}
|
||||
while (ctx != nil && ctx.invokingState >= 0 && following.contains(TokenEpsilon)) {
|
||||
for (ctx != nil && ctx.invokingState >= 0 && following.contains(TokenEpsilon)) {
|
||||
var invokingState = atn.states[ctx.invokingState]
|
||||
var rt = invokingState.transitions[0]
|
||||
following = atn.nextTokens(rt.followState)
|
||||
|
@ -594,7 +594,7 @@ func (p.*Parser) getRuleInvocationStack(p) {
|
|||
p = p._ctx
|
||||
}
|
||||
var stack = []
|
||||
while (p != nil) {
|
||||
for (p != nil) {
|
||||
// compute what follows who invoked us
|
||||
var ruleIndex = p.ruleIndex
|
||||
if (ruleIndex < 0) {
|
||||
|
|
|
@ -274,8 +274,8 @@ func (this *ArrayPredictionContext) toString() string {
|
|||
// Convert a {@link RuleContext} tree to a {@link PredictionContext} graph.
|
||||
// Return {@link //EMPTY} if {@code outerContext} is empty or nil.
|
||||
// /
|
||||
func predictionContextFromRuleContext(atn, outerContext) {
|
||||
if (outerContext == undefined || outerContext == nil) {
|
||||
func predictionContextFromRuleContext(atn *ATN, outerContext *RuleContext) {
|
||||
if (outerContext == nil) {
|
||||
outerContext = RuleContext.EMPTY
|
||||
}
|
||||
// if we are in RuleContext of start rule, s, then PredictionContext
|
||||
|
@ -540,7 +540,7 @@ func mergeArrays(a, b, rootIsWildcard, mergeCache) {
|
|||
var mergedReturnStates = []
|
||||
var mergedParents = []
|
||||
// walk and merge to yield mergedParents, mergedReturnStates
|
||||
while (i < a.returnStates.length && j < b.returnStates.length) {
|
||||
for (i < a.returnStates.length && j < b.returnStates.length) {
|
||||
var a_parent = a.parents[i]
|
||||
var b_parent = b.parents[j]
|
||||
if (a.returnStates[i] == b.returnStates[j]) {
|
||||
|
@ -680,8 +680,7 @@ func getCachedPredictionContext(context, contextCache, visited) {
|
|||
if (parents.length == 0) {
|
||||
updated = PredictionContext.EMPTY
|
||||
} else if (parents.length == 1) {
|
||||
updated = SingletonPredictionContext.create(parents[0], context
|
||||
.getReturnState(0))
|
||||
updated = SingletonPredictionContext.create(parents[0], context.getReturnState(0))
|
||||
} else {
|
||||
updated = NewArrayPredictionContext(parents, context.returnStates)
|
||||
}
|
||||
|
|
|
@ -41,7 +41,7 @@ func RuleContext(parent, invokingState) {
|
|||
func (this *RuleContext) depth() {
|
||||
var n = 0
|
||||
var p = this
|
||||
while (p != nil) {
|
||||
for (p != nil) {
|
||||
p = p.parentCtx
|
||||
n += 1
|
||||
}
|
||||
|
@ -115,7 +115,7 @@ func (this *RuleContext) toString(ruleNames, stop) {
|
|||
stop = stop || nil
|
||||
var p = this
|
||||
var s = "["
|
||||
while (p != nil && p != stop) {
|
||||
for (p != nil && p != stop) {
|
||||
if (ruleNames == nil) {
|
||||
if (!p.isEmpty()) {
|
||||
s += p.invokingState
|
||||
|
|
|
@ -87,7 +87,7 @@ func (this *ATN) addState( state) {
|
|||
this.states.push(state)
|
||||
}
|
||||
|
||||
func (this *ATN) removeState( state) {
|
||||
func (this *ATN) removeState( state ) {
|
||||
this.states[state.stateNumber] = nil // just free mem, don't shift states in list
|
||||
}
|
||||
|
||||
|
@ -137,7 +137,7 @@ func (this *ATN) getExpectedTokens( stateNumber, ctx ) {
|
|||
var expected = NewIntervalSet()
|
||||
expected.addSet(following)
|
||||
expected.removeOne(TokenEpsilon)
|
||||
while (ctx != nil && ctx.invokingState >= 0 && following.contains(TokenEpsilon)) {
|
||||
for (ctx != nil && ctx.invokingState >= 0 && following.contains(TokenEpsilon)) {
|
||||
var invokingState = this.states[ctx.invokingState]
|
||||
var rt = invokingState.transitions[0]
|
||||
following = this.nextTokens(rt.followState)
|
||||
|
|
|
@ -423,7 +423,7 @@ func (this *ATNDeserializer) generateRuleBypassTransition(atn, idx) {
|
|||
// instead
|
||||
var ruleToStartState = atn.ruleToStartState[idx]
|
||||
var count = ruleToStartState.transitions.length
|
||||
while ( count > 0) {
|
||||
for ( count > 0) {
|
||||
bypassStart.addTransition(ruleToStartState.transitions[count-1])
|
||||
ruleToStartState.transitions = ruleToStartState.transitions.slice(-1)
|
||||
}
|
||||
|
|
|
@ -148,7 +148,7 @@ LexerATNSimulator.prototype.execATN = function(input, ds0) {
|
|||
var t = input.LA(1)
|
||||
var s = ds0 // s is current/from DFA state
|
||||
|
||||
while (true) { // while more work
|
||||
for (true) { // while more work
|
||||
if (this.debug) {
|
||||
console.log("execATN loop starting closure: " + s.configs)
|
||||
}
|
||||
|
|
|
@ -13,23 +13,27 @@ const (
|
|||
LexerActionTypeSKIP = 6 //The type of a {@link LexerSkipAction} action.
|
||||
LexerActionTypeTYPE = 7 //The type of a {@link LexerTypeAction} action.
|
||||
)
|
||||
type LexerAction struct {
|
||||
actionType LexerActionType
|
||||
isPositionDependent bool
|
||||
}
|
||||
|
||||
func LexerAction(action) {
|
||||
this.actionType = action
|
||||
this.isPositionDependent = false
|
||||
return this
|
||||
func LexerAction(action LexerActionType) *LexerAction {
|
||||
la := new(LexerAction)
|
||||
la.actionType = action
|
||||
la.isPositionDependent = false
|
||||
return la
|
||||
}
|
||||
|
||||
func (this *LexerAction) hashString() {
|
||||
return "" + this.actionType
|
||||
}
|
||||
|
||||
func (this *LexerAction) equals(other) {
|
||||
func (this *LexerAction) equals(other *LexerAction) {
|
||||
return this == other
|
||||
}
|
||||
|
||||
|
||||
|
||||
//
|
||||
// Implements the {@code skip} lexer action by calling {@link Lexer//skip}.
|
||||
//
|
||||
|
@ -46,7 +50,7 @@ type LexerSkipAction struct {
|
|||
// Provides a singleton instance of this parameterless lexer action.
|
||||
LexerSkipAction.INSTANCE = NewLexerSkipAction()
|
||||
|
||||
func (this *LexerSkipAction) execute(lexer) {
|
||||
func (this *LexerSkipAction) execute(lexer *Lexer) {
|
||||
lexer.skip()
|
||||
}
|
||||
|
||||
|
@ -65,7 +69,7 @@ func LexerTypeAction(type) {
|
|||
//LexerTypeAction.prototype = Object.create(LexerAction.prototype)
|
||||
//LexerTypeAction.prototype.constructor = LexerTypeAction
|
||||
|
||||
func (this *LexerTypeAction) execute(lexer) {
|
||||
func (this *LexerTypeAction) execute(lexer *Lexer) {
|
||||
lexer.type = this.type
|
||||
}
|
||||
|
||||
|
@ -101,7 +105,7 @@ func LexerPushModeAction(mode) {
|
|||
|
||||
// <p>This action is implemented by calling {@link Lexer//pushMode} with the
|
||||
// value provided by {@link //getMode}.</p>
|
||||
func (this *LexerPushModeAction) execute(lexer) {
|
||||
func (this *LexerPushModeAction) execute(lexer *Lexer) {
|
||||
lexer.pushMode(this.mode)
|
||||
}
|
||||
|
||||
|
@ -139,7 +143,7 @@ type LexerPopModeAction struct {
|
|||
LexerPopModeAction.INSTANCE = NewLexerPopModeAction()
|
||||
|
||||
// <p>This action is implemented by calling {@link Lexer//popMode}.</p>
|
||||
func (this *LexerPopModeAction) execute(lexer) {
|
||||
func (this *LexerPopModeAction) execute(lexer *Lexer) {
|
||||
lexer.popMode()
|
||||
}
|
||||
|
||||
|
@ -162,7 +166,7 @@ type LexerMoreAction struct {
|
|||
LexerMoreAction.INSTANCE = NewLexerMoreAction()
|
||||
|
||||
// <p>This action is implemented by calling {@link Lexer//popMode}.</p>
|
||||
func (this *LexerMoreAction) execute(lexer) {
|
||||
func (this *LexerMoreAction) execute(lexer *Lexer) {
|
||||
lexer.more()
|
||||
}
|
||||
|
||||
|
@ -184,7 +188,7 @@ func LexerModeAction(mode) {
|
|||
|
||||
// <p>This action is implemented by calling {@link Lexer//mode} with the
|
||||
// value provided by {@link //getMode}.</p>
|
||||
func (this *LexerModeAction) execute(lexer) {
|
||||
func (this *LexerModeAction) execute(lexer *Lexer) {
|
||||
lexer.mode(this.mode)
|
||||
}
|
||||
|
||||
|
@ -237,7 +241,7 @@ func LexerCustomAction(ruleIndex, actionIndex) {
|
|||
|
||||
// <p>Custom actions are implemented by calling {@link Lexer//action} with the
|
||||
// appropriate rule and action indexes.</p>
|
||||
func (this *LexerCustomAction) execute(lexer) {
|
||||
func (this *LexerCustomAction) execute(lexer *Lexer) {
|
||||
lexer.action(nil, this.ruleIndex, this.actionIndex)
|
||||
}
|
||||
|
||||
|
@ -270,7 +274,7 @@ func LexerChannelAction(channel) {
|
|||
|
||||
// <p>This action is implemented by calling {@link Lexer//setChannel} with the
|
||||
// value provided by {@link //getChannel}.</p>
|
||||
func (this *LexerChannelAction) execute(lexer) {
|
||||
func (this *LexerChannelAction) execute(lexer *Lexer) {
|
||||
lexer._channel = this.channel
|
||||
}
|
||||
|
||||
|
@ -325,9 +329,9 @@ func LexerIndexedCustomAction(offset, action) {
|
|||
|
||||
// <p>This method calls {@link //execute} on the result of {@link //getAction}
|
||||
// using the provided {@code lexer}.</p>
|
||||
func (this *LexerIndexedCustomAction) execute(lexer) {
|
||||
func (this *LexerIndexedCustomAction) execute(lexer *Lexer) {
|
||||
// assume the input stream position was properly set by the calling code
|
||||
this.action.execute(lexer)
|
||||
this.action.execute(lexer *Lexer)
|
||||
}
|
||||
|
||||
func (this *LexerIndexedCustomAction) hashString() {
|
||||
|
|
|
@ -606,7 +606,7 @@ ParserATNSimulator.prototype.execATNWithFullContext = function(dfa, D, // how fa
|
|||
input.seek(startIndex)
|
||||
var t = input.LA(1)
|
||||
var predictedAlt = -1
|
||||
while (true) { // while more work
|
||||
for (true) { // while more work
|
||||
reach = this.computeReachSet(previous, t, fullCtx)
|
||||
if (reach==nil) {
|
||||
// if any configs in previous dipped into outer context, that
|
||||
|
|
|
@ -667,7 +667,7 @@ func (this *DefaultErrorStrategy) getErrorRecoverySet(recognizer) {
|
|||
var atn = recognizer._interp.atn
|
||||
var ctx = recognizer._ctx
|
||||
var recoverSet = NewIntervalSet()
|
||||
while (ctx != nil && ctx.invokingState>=0) {
|
||||
for (ctx != nil && ctx.invokingState>=0) {
|
||||
// compute what follows who invoked us
|
||||
var invokingState = atn.states[ctx.invokingState]
|
||||
var rt = invokingState.transitions[0]
|
||||
|
@ -731,7 +731,7 @@ type BailErrorStrategy struct {
|
|||
//
|
||||
func (this *BailErrorStrategy) recover(recognizer, e) {
|
||||
var context = recognizer._ctx
|
||||
while (context != nil) {
|
||||
for (context != nil) {
|
||||
context.exception = e
|
||||
context = context.parentCtx
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue