More bugs

This commit is contained in:
Peter Boyer 2015-12-29 15:32:11 -06:00
parent 56e6e6c26f
commit 7798333f72
15 changed files with 452 additions and 492 deletions

View File

@ -15,6 +15,8 @@ import (
// //
type IATNConfig interface { type IATNConfig interface {
Hasher
getPrecedenceFilterSuppressed() bool getPrecedenceFilterSuppressed() bool
setPrecedenceFilterSuppressed(bool) setPrecedenceFilterSuppressed(bool)
@ -30,7 +32,7 @@ type IATNConfig interface {
String() string String() string
shortHashString() string shortHash() string
} }
type ATNConfig struct { type ATNConfig struct {
@ -152,17 +154,17 @@ func (this *ATNConfig) equals(other interface{}) bool {
} }
} }
func (this *ATNConfig) shortHashString() string { func (this *ATNConfig) shortHash() string {
return strconv.Itoa(this.state.GetStateNumber()) + "/" + strconv.Itoa(this.alt) + "/" + this.semanticContext.String() return strconv.Itoa(this.state.GetStateNumber()) + "/" + strconv.Itoa(this.alt) + "/" + this.semanticContext.String()
} }
func (this *ATNConfig) hashString() string { func (this *ATNConfig) Hash() string {
var c string var c string
if this.context == nil { if this.context == nil {
c = "" c = ""
} else { } else {
c = this.context.hashString() c = this.context.Hash()
} }
return strconv.Itoa(this.state.GetStateNumber()) + "/" + strconv.Itoa(this.alt) + "/" + c + "/" + this.semanticContext.String() return strconv.Itoa(this.state.GetStateNumber()) + "/" + strconv.Itoa(this.alt) + "/" + c + "/" + this.semanticContext.String()
@ -262,7 +264,7 @@ func NewLexerATNConfig1(state IATNState, alt int, context IPredictionContext) *L
return this return this
} }
func (this *LexerATNConfig) hashString() string { func (this *LexerATNConfig) Hash() string {
var f string var f string
if this.passedThroughNonGreedyDecision { if this.passedThroughNonGreedyDecision {

View File

@ -11,7 +11,7 @@ import (
/// ///
func hashATNConfig(c interface{}) string { func hashATNConfig(c interface{}) string {
return c.(IATNConfig).shortHashString() return c.(IATNConfig).shortHash()
} }
func equalATNConfigs(a, b interface{}) bool { func equalATNConfigs(a, b interface{}) bool {
@ -197,7 +197,7 @@ func (this *ATNConfigSet) equals(other interface{}) bool {
this.dipsIntoOuterContext == other2.dipsIntoOuterContext this.dipsIntoOuterContext == other2.dipsIntoOuterContext
} }
func (this *ATNConfigSet) hashString() string { func (this *ATNConfigSet) Hash() string {
if this.readOnly { if this.readOnly {
if this.cachedHashString == "-1" { if this.cachedHashString == "-1" {
this.cachedHashString = this.hashConfigs() this.cachedHashString = this.hashConfigs()

View File

@ -1,396 +0,0 @@
// This implementation of {@link TokenStream} loads tokens from a
// {@link TokenSource} on-demand, and places the tokens in a buffer to provide
// access to any previous token by index.
//
// <p>
// This token stream ignores the value of {@link Token//getChannel}. If your
// parser requires the token stream filter tokens to only those on a particular
// channel, such as {@link Token//DEFAULT_CHANNEL} or
// {@link Token//HIDDEN_CHANNEL}, use a filtering token stream such a
// {@link CommonTokenStream}.</p>
package antlr4
import (
"strconv"
"fmt"
)
// bt is just to keep meaningful parameter types to Parser
type BufferedTokenStream struct {
tokenSource TokenSource
tokens []IToken
index int
fetchedEOF bool
channel int
}
func NewBufferedTokenStream(tokenSource TokenSource) *BufferedTokenStream {
ts := new(BufferedTokenStream)
// The {@link TokenSource} from which tokens for bt stream are fetched.
ts.tokenSource = tokenSource
// A collection of all tokens fetched from the token source. The list is
// considered a complete view of the input once {@link //fetchedEOF} is set
// to {@code true}.
ts.tokens = make([]IToken, 0)
// The index into {@link //tokens} of the current token (next token to
// {@link //consume}). {@link //tokens}{@code [}{@link //p}{@code ]} should
// be
// {@link //LT LT(1)}.
//
// <p>This field is set to -1 when the stream is first constructed or when
// {@link //SetTokenSource} is called, indicating that the first token has
// not yet been fetched from the token source. For additional information,
// see the documentation of {@link IntStream} for a description of
// Initializing Methods.</p>
ts.index = -1
// Indicates whether the {@link Token//EOF} token has been fetched from
// {@link //tokenSource} and added to {@link //tokens}. This field improves
// performance for the following cases:
//
// <ul>
// <li>{@link //consume}: The lookahead check in {@link //consume} to
// prevent
// consuming the EOF symbol is optimized by checking the values of
// {@link //fetchedEOF} and {@link //p} instead of calling {@link
// //LA}.</li>
// <li>{@link //fetch}: The check to prevent adding multiple EOF symbols
// into
// {@link //tokens} is trivial with bt field.</li>
// <ul>
ts.fetchedEOF = false
return ts
}
func (bt *BufferedTokenStream) Mark() int {
return 0
}
func (bt *BufferedTokenStream) Release(marker int) {
// no resources to release
}
func (bt *BufferedTokenStream) reset() {
bt.Seek(0)
}
func (bt *BufferedTokenStream) Seek(index int) {
bt.lazyInit()
bt.index = bt.adjustSeekIndex(index)
}
func (bt *BufferedTokenStream) Get(index int) IToken {
bt.lazyInit()
return bt.tokens[index]
}
func (bt *BufferedTokenStream) Consume() {
var skipEofCheck = false
if bt.index >= 0 {
if bt.fetchedEOF {
// the last token in tokens is EOF. skip check if p indexes any
// fetched token except the last.
skipEofCheck = bt.index < len(bt.tokens)-1
} else {
// no EOF token in tokens. skip check if p indexes a fetched token.
skipEofCheck = bt.index < len(bt.tokens)
}
} else {
// not yet initialized
skipEofCheck = false
}
fmt.Println("Consume 1")
if !skipEofCheck && bt.LA(1) == TokenEOF {
panic("cannot consume EOF")
}
if bt.Sync(bt.index + 1) {
fmt.Println("Consume 2")
bt.index = bt.adjustSeekIndex(bt.index + 1)
}
}
// Make sure index {@code i} in tokens has a token.
//
// @return {@code true} if a token is located at index {@code i}, otherwise
// {@code false}.
// @see //Get(int i)
// /
func (bt *BufferedTokenStream) Sync(i int) bool {
var n = i - len(bt.tokens) + 1 // how many more elements we need?
if n > 0 {
var fetched = bt.fetch(n)
fmt.Println("Sync done")
return fetched >= n
}
return true
}
// Add {@code n} elements to buffer.
//
// @return The actual number of elements added to the buffer.
// /
func (bt *BufferedTokenStream) fetch(n int) int {
if bt.fetchedEOF {
return 0
}
for i := 0; i < n; i++ {
var t IToken = bt.tokenSource.nextToken()
fmt.Println("fetch loop")
t.SetTokenIndex( len(bt.tokens) )
bt.tokens = append(bt.tokens, t)
if t.GetTokenType() == TokenEOF {
bt.fetchedEOF = true
return i + 1
}
}
fmt.Println("fetch done")
return n
}
// Get all tokens from start..stop inclusively///
func (bt *BufferedTokenStream) GetTokens(start int, stop int, types *IntervalSet) []IToken {
if start < 0 || stop < 0 {
return nil
}
bt.lazyInit()
var subset = make([]IToken, 0)
if stop >= len(bt.tokens) {
stop = len(bt.tokens) - 1
}
for i := start; i < stop; i++ {
var t = bt.tokens[i]
if t.GetTokenType() == TokenEOF {
break
}
if types == nil || types.contains(t.GetTokenType()) {
subset = append(subset, t)
}
}
return subset
}
func (bt *BufferedTokenStream) LA(i int) int {
return bt.LT(i).GetTokenType()
}
func (bt *BufferedTokenStream) LB(k int) IToken {
if bt.index-k < 0 {
return nil
}
return bt.tokens[bt.index-k]
}
func (bt *BufferedTokenStream) LT(k int) IToken {
bt.lazyInit()
if k == 0 {
return nil
}
if k < 0 {
return bt.LB(-k)
}
var i = bt.index + k - 1
bt.Sync(i)
if i >= len(bt.tokens) { // return EOF token
// EOF must be last token
return bt.tokens[len(bt.tokens)-1]
}
return bt.tokens[i]
}
// Allowed derived classes to modify the behavior of operations which change
// the current stream position by adjusting the target token index of a seek
// operation. The default implementation simply returns {@code i}. If an
// exception is panic(n in bt method, the current stream index should not be
// changed.
//
// <p>For example, {@link CommonTokenStream} overrides bt method to ensure
// that
// the seek target is always an on-channel token.</p>
//
// @param i The target token index.
// @return The adjusted target token index.
func (bt *BufferedTokenStream) adjustSeekIndex(i int) int {
return i
}
func (bt *BufferedTokenStream) lazyInit() {
if bt.index == -1 {
bt.setup()
}
}
func (bt *BufferedTokenStream) setup() {
bt.Sync(0)
bt.index = bt.adjustSeekIndex(0)
}
func (bt *BufferedTokenStream) GetTokenSource() TokenSource {
return bt.tokenSource
}
// Reset bt token stream by setting its token source.///
func (bt *BufferedTokenStream) SetTokenSource(tokenSource TokenSource) {
bt.tokenSource = tokenSource
bt.tokens = make([]IToken, 0)
bt.index = -1
}
// Given a starting index, return the index of the next token on channel.
// Return i if tokens[i] is on channel. Return -1 if there are no tokens
// on channel between i and EOF.
// /
func (bt *BufferedTokenStream) nextTokenOnChannel(i, channel int) int {
bt.Sync(i)
if i >= len(bt.tokens) {
return -1
}
var token = bt.tokens[i]
for token.GetChannel() != bt.channel {
if token.GetTokenType() == TokenEOF {
return -1
}
i += 1
bt.Sync(i)
token = bt.tokens[i]
}
return i
}
// Given a starting index, return the index of the previous token on channel.
// Return i if tokens[i] is on channel. Return -1 if there are no tokens
// on channel between i and 0.
func (bt *BufferedTokenStream) previousTokenOnChannel(i, channel int) int {
for i >= 0 && bt.tokens[i].GetChannel() != channel {
i -= 1
}
return i
}
// Collect all tokens on specified channel to the right of
// the current token up until we see a token on DEFAULT_TOKEN_CHANNEL or
// EOF. If channel is -1, find any non default channel token.
func (bt *BufferedTokenStream) getHiddenTokensToRight(tokenIndex, channel int) []IToken {
bt.lazyInit()
if tokenIndex < 0 || tokenIndex >= len(bt.tokens) {
panic(strconv.Itoa(tokenIndex) + " not in 0.." + strconv.Itoa(len(bt.tokens)-1))
}
var nextOnChannel = bt.nextTokenOnChannel(tokenIndex+1, LexerDefaultTokenChannel)
var from_ = tokenIndex + 1
// if none onchannel to right, nextOnChannel=-1 so set to = last token
var to int
if nextOnChannel == -1 {
to = len(bt.tokens) - 1
} else {
to = nextOnChannel
}
return bt.filterForChannel(from_, to, channel)
}
// Collect all tokens on specified channel to the left of
// the current token up until we see a token on DEFAULT_TOKEN_CHANNEL.
// If channel is -1, find any non default channel token.
func (bt *BufferedTokenStream) getHiddenTokensToLeft(tokenIndex, channel int) []IToken {
bt.lazyInit()
if tokenIndex < 0 || tokenIndex >= len(bt.tokens) {
panic(strconv.Itoa(tokenIndex) + " not in 0.." + strconv.Itoa(len(bt.tokens)-1))
}
var prevOnChannel = bt.previousTokenOnChannel(tokenIndex-1, LexerDefaultTokenChannel)
if prevOnChannel == tokenIndex-1 {
return nil
}
// if none on channel to left, prevOnChannel=-1 then from=0
var from_ = prevOnChannel + 1
var to = tokenIndex - 1
return bt.filterForChannel(from_, to, channel)
}
func (bt *BufferedTokenStream) filterForChannel(left, right, channel int) []IToken {
var hidden = make([]IToken, 0)
for i := left; i < right+1; i++ {
var t = bt.tokens[i]
if channel == -1 {
if t.GetChannel() != LexerDefaultTokenChannel {
hidden = append(hidden, t)
}
} else if t.GetChannel() == channel {
hidden = append(hidden, t)
}
}
if len(hidden) == 0 {
return nil
}
return hidden
}
func (bt *BufferedTokenStream) GetSourceName() string {
return bt.tokenSource.GetSourceName()
}
func (bt *BufferedTokenStream) Size() int {
return len(bt.tokens)
}
func (bt *BufferedTokenStream) Index() int {
return bt.index
}
func (bt *BufferedTokenStream) GetAllText() string {
return bt.GetTextFromInterval(nil)
}
func (bt *BufferedTokenStream) GetTextFromTokens(start, end IToken) string {
return bt.GetTextFromInterval(NewInterval(start.GetTokenIndex(), end.GetTokenIndex()))
}
func (bt *BufferedTokenStream) GetTextFromRuleContext(interval IRuleContext) string {
return bt.GetTextFromInterval(interval.GetSourceInterval())
}
func (bt *BufferedTokenStream) GetTextFromInterval(interval *Interval) string {
bt.lazyInit()
bt.fill()
if interval == nil {
interval = NewInterval(0, len(bt.tokens)-1)
}
var start = interval.start
var stop = interval.stop
if start < 0 || stop < 0 {
return ""
}
if stop >= len(bt.tokens) {
stop = len(bt.tokens) - 1
}
var s = ""
for i := start; i < stop+1; i++ {
var t = bt.tokens[i]
if t.GetTokenType() == TokenEOF {
break
}
s += t.GetText()
}
return s
}
// Get all tokens from lexer until EOF///
func (bt *BufferedTokenStream) fill() {
bt.lazyInit()
for bt.fetch(1000) == 1000 {
continue
}
}

View File

@ -1,38 +1,357 @@
// // This implementation of {@link TokenStream} loads tokens from a
// This class extends {@link BufferedTokenStream} with functionality to filter // {@link TokenSource} on-demand, and places the tokens in a buffer to provide
// token streams to tokens on a particular channel (tokens where // access to any previous token by index.
// {@link Token//getChannel} returns a particular value).
// //
// <p> // <p>
// This token stream provides access to all tokens by index or when calling // This token stream ignores the value of {@link Token//getChannel}. If your
// methods like {@link //GetText}. The channel filtering is only used for code // parser requires the token stream filter tokens to only those on a particular
// accessing tokens via the lookahead methods {@link //LA}, {@link //LT}, and // channel, such as {@link Token//DEFAULT_CHANNEL} or
// {@link //LB}.</p> // {@link Token//HIDDEN_CHANNEL}, use a filtering token stream such a
// // {@link CommonTokenStream}.</p>
// <p>
// By default, tokens are placed on the default channel
// ({@link Token//DEFAULT_CHANNEL}), but may be reassigned by using the
// {@code ->channel(HIDDEN)} lexer command, or by using an embedded action to
// call {@link Lexer//setChannel}.
// </p>
//
// <p>
// Note: lexer rules which use the {@code ->skip} lexer command or call
// {@link Lexer//skip} do not produce tokens at all, so input text Matched by
// such a rule will not be available as part of the token stream, regardless of
// channel.</p>
///
package antlr4 package antlr4
import (
"strconv"
"fmt"
)
func (bt *CommonTokenStream) Mark() int {
return 0
}
func (bt *CommonTokenStream) Release(marker int) {
// no resources to release
}
func (bt *CommonTokenStream) reset() {
bt.Seek(0)
}
func (bt *CommonTokenStream) Seek(index int) {
bt.lazyInit()
bt.index = bt.adjustSeekIndex(index)
}
func (bt *CommonTokenStream) Get(index int) IToken {
bt.lazyInit()
return bt.tokens[index]
}
func (bt *CommonTokenStream) Consume() {
var skipEofCheck = false
if bt.index >= 0 {
if bt.fetchedEOF {
// the last token in tokens is EOF. skip check if p indexes any
// fetched token except the last.
skipEofCheck = bt.index < len(bt.tokens)-1
} else {
// no EOF token in tokens. skip check if p indexes a fetched token.
skipEofCheck = bt.index < len(bt.tokens)
}
} else {
// not yet initialized
skipEofCheck = false
}
fmt.Println("Consume 1")
if !skipEofCheck && bt.LA(1) == TokenEOF {
panic("cannot consume EOF")
}
if bt.Sync(bt.index + 1) {
fmt.Println("Consume 2")
bt.index = bt.adjustSeekIndex(bt.index + 1)
}
}
// Make sure index {@code i} in tokens has a token.
//
// @return {@code true} if a token is located at index {@code i}, otherwise
// {@code false}.
// @see //Get(int i)
// /
func (bt *CommonTokenStream) Sync(i int) bool {
var n = i - len(bt.tokens) + 1 // how many more elements we need?
if n > 0 {
var fetched = bt.fetch(n)
fmt.Println("Sync done")
return fetched >= n
}
return true
}
// Add {@code n} elements to buffer.
//
// @return The actual number of elements added to the buffer.
// /
func (bt *CommonTokenStream) fetch(n int) int {
if bt.fetchedEOF {
return 0
}
for i := 0; i < n; i++ {
var t IToken = bt.tokenSource.nextToken()
fmt.Println("fetch loop")
t.SetTokenIndex( len(bt.tokens) )
bt.tokens = append(bt.tokens, t)
if t.GetTokenType() == TokenEOF {
bt.fetchedEOF = true
return i + 1
}
}
fmt.Println("fetch done")
return n
}
// Get all tokens from start..stop inclusively///
func (bt *CommonTokenStream) GetTokens(start int, stop int, types *IntervalSet) []IToken {
if start < 0 || stop < 0 {
return nil
}
bt.lazyInit()
var subset = make([]IToken, 0)
if stop >= len(bt.tokens) {
stop = len(bt.tokens) - 1
}
for i := start; i < stop; i++ {
var t = bt.tokens[i]
if t.GetTokenType() == TokenEOF {
break
}
if types == nil || types.contains(t.GetTokenType()) {
subset = append(subset, t)
}
}
return subset
}
func (bt *CommonTokenStream) LA(i int) int {
return bt.LT(i).GetTokenType()
}
func (bt *CommonTokenStream) lazyInit() {
if bt.index == -1 {
bt.setup()
}
}
func (bt *CommonTokenStream) setup() {
bt.Sync(0)
bt.index = bt.adjustSeekIndex(0)
}
func (bt *CommonTokenStream) GetTokenSource() TokenSource {
return bt.tokenSource
}
// Reset bt token stream by setting its token source.///
func (bt *CommonTokenStream) SetTokenSource(tokenSource TokenSource) {
bt.tokenSource = tokenSource
bt.tokens = make([]IToken, 0)
bt.index = -1
}
// Given a starting index, return the index of the next token on channel.
// Return i if tokens[i] is on channel. Return -1 if there are no tokens
// on channel between i and EOF.
// /
func (bt *CommonTokenStream) nextTokenOnChannel(i, channel int) int {
bt.Sync(i)
if i >= len(bt.tokens) {
return -1
}
var token = bt.tokens[i]
for token.GetChannel() != bt.channel {
if token.GetTokenType() == TokenEOF {
return -1
}
i += 1
bt.Sync(i)
token = bt.tokens[i]
}
return i
}
// Given a starting index, return the index of the previous token on channel.
// Return i if tokens[i] is on channel. Return -1 if there are no tokens
// on channel between i and 0.
func (bt *CommonTokenStream) previousTokenOnChannel(i, channel int) int {
for i >= 0 && bt.tokens[i].GetChannel() != channel {
i -= 1
}
return i
}
// Collect all tokens on specified channel to the right of
// the current token up until we see a token on DEFAULT_TOKEN_CHANNEL or
// EOF. If channel is -1, find any non default channel token.
func (bt *CommonTokenStream) getHiddenTokensToRight(tokenIndex, channel int) []IToken {
bt.lazyInit()
if tokenIndex < 0 || tokenIndex >= len(bt.tokens) {
panic(strconv.Itoa(tokenIndex) + " not in 0.." + strconv.Itoa(len(bt.tokens)-1))
}
var nextOnChannel = bt.nextTokenOnChannel(tokenIndex+1, LexerDefaultTokenChannel)
var from_ = tokenIndex + 1
// if none onchannel to right, nextOnChannel=-1 so set to = last token
var to int
if nextOnChannel == -1 {
to = len(bt.tokens) - 1
} else {
to = nextOnChannel
}
return bt.filterForChannel(from_, to, channel)
}
// Collect all tokens on specified channel to the left of
// the current token up until we see a token on DEFAULT_TOKEN_CHANNEL.
// If channel is -1, find any non default channel token.
func (bt *CommonTokenStream) getHiddenTokensToLeft(tokenIndex, channel int) []IToken {
bt.lazyInit()
if tokenIndex < 0 || tokenIndex >= len(bt.tokens) {
panic(strconv.Itoa(tokenIndex) + " not in 0.." + strconv.Itoa(len(bt.tokens)-1))
}
var prevOnChannel = bt.previousTokenOnChannel(tokenIndex-1, LexerDefaultTokenChannel)
if prevOnChannel == tokenIndex-1 {
return nil
}
// if none on channel to left, prevOnChannel=-1 then from=0
var from_ = prevOnChannel + 1
var to = tokenIndex - 1
return bt.filterForChannel(from_, to, channel)
}
func (bt *CommonTokenStream) filterForChannel(left, right, channel int) []IToken {
var hidden = make([]IToken, 0)
for i := left; i < right+1; i++ {
var t = bt.tokens[i]
if channel == -1 {
if t.GetChannel() != LexerDefaultTokenChannel {
hidden = append(hidden, t)
}
} else if t.GetChannel() == channel {
hidden = append(hidden, t)
}
}
if len(hidden) == 0 {
return nil
}
return hidden
}
func (bt *CommonTokenStream) GetSourceName() string {
return bt.tokenSource.GetSourceName()
}
func (bt *CommonTokenStream) Size() int {
return len(bt.tokens)
}
func (bt *CommonTokenStream) Index() int {
return bt.index
}
func (bt *CommonTokenStream) GetAllText() string {
return bt.GetTextFromInterval(nil)
}
func (bt *CommonTokenStream) GetTextFromTokens(start, end IToken) string {
return bt.GetTextFromInterval(NewInterval(start.GetTokenIndex(), end.GetTokenIndex()))
}
func (bt *CommonTokenStream) GetTextFromRuleContext(interval IRuleContext) string {
return bt.GetTextFromInterval(interval.GetSourceInterval())
}
func (bt *CommonTokenStream) GetTextFromInterval(interval *Interval) string {
bt.lazyInit()
bt.fill()
if interval == nil {
interval = NewInterval(0, len(bt.tokens)-1)
}
var start = interval.start
var stop = interval.stop
if start < 0 || stop < 0 {
return ""
}
if stop >= len(bt.tokens) {
stop = len(bt.tokens) - 1
}
var s = ""
for i := start; i < stop+1; i++ {
var t = bt.tokens[i]
if t.GetTokenType() == TokenEOF {
break
}
s += t.GetText()
}
return s
}
// Get all tokens from lexer until EOF///
func (bt *CommonTokenStream) fill() {
bt.lazyInit()
for bt.fetch(1000) == 1000 {
continue
}
}
type CommonTokenStream struct { type CommonTokenStream struct {
*BufferedTokenStream tokenSource TokenSource
tokens []IToken
index int
fetchedEOF bool
channel int
} }
func NewCommonTokenStream(lexer ILexer, channel int) *CommonTokenStream { func NewCommonTokenStream(lexer ILexer, channel int) *CommonTokenStream {
ts := new(CommonTokenStream) ts := new(CommonTokenStream)
ts.BufferedTokenStream = NewBufferedTokenStream(lexer)
// The {@link TokenSource} from which tokens for bt stream are fetched.
ts.tokenSource = lexer
// A collection of all tokens fetched from the token source. The list is
// considered a complete view of the input once {@link //fetchedEOF} is set
// to {@code true}.
ts.tokens = make([]IToken, 0)
// The index into {@link //tokens} of the current token (next token to
// {@link //consume}). {@link //tokens}{@code [}{@link //p}{@code ]} should
// be
// {@link //LT LT(1)}.
//
// <p>This field is set to -1 when the stream is first constructed or when
// {@link //SetTokenSource} is called, indicating that the first token has
// not yet been fetched from the token source. For additional information,
// see the documentation of {@link IntStream} for a description of
// Initializing Methods.</p>
ts.index = -1
// Indicates whether the {@link Token//EOF} token has been fetched from
// {@link //tokenSource} and added to {@link //tokens}. This field improves
// performance for the following cases:
//
// <ul>
// <li>{@link //consume}: The lookahead check in {@link //consume} to
// prevent
// consuming the EOF symbol is optimized by checking the values of
// {@link //fetchedEOF} and {@link //p} instead of calling {@link
// //LA}.</li>
// <li>{@link //fetch}: The check to prevent adding multiple EOF symbols
// into
// {@link //tokens} is trivial with bt field.</li>
// <ul>
ts.fetchedEOF = false
ts.channel = channel ts.channel = channel

View File

@ -145,10 +145,10 @@ func (this *DFAState) equals(other interface{}) bool {
} }
func (this *DFAState) String() string { func (this *DFAState) String() string {
return strconv.Itoa(this.stateNumber) + ":" + this.hashString() return strconv.Itoa(this.stateNumber) + ":" + this.Hash()
} }
func (this *DFAState) hashString() string { func (this *DFAState) Hash() string {
var s string var s string
if (this.isAcceptState) { if (this.isAcceptState) {

View File

@ -625,7 +625,7 @@ func (this *LexerATNSimulator) addDFAState(configs *ATNConfigSet) *DFAState {
proposed.lexerActionExecutor = firstConfigWithRuleStopState.(*LexerATNConfig).lexerActionExecutor proposed.lexerActionExecutor = firstConfigWithRuleStopState.(*LexerATNConfig).lexerActionExecutor
proposed.prediction = this.atn.ruleToTokenType[firstConfigWithRuleStopState.GetState().GetRuleIndex()] proposed.prediction = this.atn.ruleToTokenType[firstConfigWithRuleStopState.GetState().GetRuleIndex()]
} }
var hash = proposed.hashString() var hash = proposed.Hash()
var dfa = this.decisionToDFA[this.mode] var dfa = this.decisionToDFA[this.mode]
var existing = dfa.GetStates()[hash] var existing = dfa.GetStates()[hash]
if existing != nil { if existing != nil {

View File

@ -17,7 +17,7 @@ type ILexerAction interface {
getActionType() int getActionType() int
getIsPositionDependent() bool getIsPositionDependent() bool
execute(lexer ILexer) execute(lexer ILexer)
hashString() string Hash() string
equals(other ILexerAction) bool equals(other ILexerAction) bool
} }
@ -48,7 +48,7 @@ func (this *LexerAction) getIsPositionDependent() bool {
return this.isPositionDependent return this.isPositionDependent
} }
func (this *LexerAction) hashString() string { func (this *LexerAction) Hash() string {
return strconv.Itoa(this.actionType) return strconv.Itoa(this.actionType)
} }
@ -101,7 +101,7 @@ func (this *LexerTypeAction) execute(lexer ILexer) {
lexer.setType(this._type) lexer.setType(this._type)
} }
func (this *LexerTypeAction) hashString() string { func (this *LexerTypeAction) Hash() string {
return strconv.Itoa(this.actionType) + strconv.Itoa(this._type) return strconv.Itoa(this.actionType) + strconv.Itoa(this._type)
} }
@ -142,7 +142,7 @@ func (this *LexerPushModeAction) execute(lexer ILexer) {
lexer.pushMode(this.mode) lexer.pushMode(this.mode)
} }
func (this *LexerPushModeAction) hashString() string { func (this *LexerPushModeAction) Hash() string {
return strconv.Itoa(this.actionType) + strconv.Itoa(this.mode) return strconv.Itoa(this.actionType) + strconv.Itoa(this.mode)
} }
@ -236,7 +236,7 @@ func (this *LexerModeAction) execute(lexer ILexer) {
lexer.mode(this.mode) lexer.mode(this.mode)
} }
func (this *LexerModeAction) hashString() string { func (this *LexerModeAction) Hash() string {
return strconv.Itoa(this.actionType) + strconv.Itoa(this.mode) return strconv.Itoa(this.actionType) + strconv.Itoa(this.mode)
} }
@ -291,7 +291,7 @@ func (this *LexerCustomAction) execute(lexer ILexer) {
lexer.Action(nil, this.ruleIndex, this.actionIndex) lexer.Action(nil, this.ruleIndex, this.actionIndex)
} }
func (this *LexerCustomAction) hashString() string { func (this *LexerCustomAction) Hash() string {
return strconv.Itoa(this.actionType) + strconv.Itoa(this.ruleIndex) + strconv.Itoa(this.actionIndex) return strconv.Itoa(this.actionType) + strconv.Itoa(this.ruleIndex) + strconv.Itoa(this.actionIndex)
} }
@ -328,7 +328,7 @@ func (this *LexerChannelAction) execute(lexer ILexer) {
lexer.setChannel(this.channel) lexer.setChannel(this.channel)
} }
func (this *LexerChannelAction) hashString() string { func (this *LexerChannelAction) Hash() string {
return strconv.Itoa(this.actionType) + strconv.Itoa(this.channel) return strconv.Itoa(this.actionType) + strconv.Itoa(this.channel)
} }
@ -393,8 +393,8 @@ func (this *LexerIndexedCustomAction) execute(lexer ILexer) {
this.lexerAction.execute(lexer) this.lexerAction.execute(lexer)
} }
func (this *LexerIndexedCustomAction) hashString() string { func (this *LexerIndexedCustomAction) Hash() string {
return strconv.Itoa(this.actionType) + strconv.Itoa(this.offset) + this.lexerAction.hashString() return strconv.Itoa(this.actionType) + strconv.Itoa(this.offset) + this.lexerAction.Hash()
} }
func (this *LexerIndexedCustomAction) equals(other ILexerAction) bool { func (this *LexerIndexedCustomAction) equals(other ILexerAction) bool {

View File

@ -27,7 +27,7 @@ func NewLexerActionExecutor(lexerActions []ILexerAction) *LexerActionExecutor {
var s string var s string
for _, a := range lexerActions { for _, a := range lexerActions {
s += a.hashString() s += a.Hash()
} }
this.cachedHashString = s // "".join([str(la) for la in this.cachedHashString = s // "".join([str(la) for la in
@ -153,7 +153,7 @@ func (this *LexerActionExecutor) execute(lexer ILexer, input CharStream, startIn
} }
} }
func (this *LexerActionExecutor) hashString() string { func (this *LexerActionExecutor) Hash() string {
return this.cachedHashString return this.cachedHashString
} }

View File

@ -998,6 +998,7 @@ func (this *ParserATNSimulator) closureCheckingStopState(config IATNConfig, conf
} else { } else {
// we have no context info, just chase follow links (if greedy) // we have no context info, just chase follow links (if greedy)
if ParserATNSimulatorDebug { if ParserATNSimulatorDebug {
fmt.Println("DEBUG 1")
fmt.Println("FALLING off rule " + this.getRuleName(config.GetState().GetRuleIndex())) fmt.Println("FALLING off rule " + this.getRuleName(config.GetState().GetRuleIndex()))
} }
this.closure_(config, configs, closureBusy, collectPredicates, fullCtx, depth, treatEofAsEpsilon) this.closure_(config, configs, closureBusy, collectPredicates, fullCtx, depth, treatEofAsEpsilon)
@ -1022,6 +1023,7 @@ func (this *ParserATNSimulator) closureCheckingStopState(config IATNConfig, conf
} else { } else {
// else if we have no context info, just chase follow links (if greedy) // else if we have no context info, just chase follow links (if greedy)
if ParserATNSimulatorDebug { if ParserATNSimulatorDebug {
fmt.Println("DEBUG 2")
fmt.Println("FALLING off rule " + this.getRuleName(config.GetState().GetRuleIndex())) fmt.Println("FALLING off rule " + this.getRuleName(config.GetState().GetRuleIndex()))
} }
} }
@ -1031,6 +1033,7 @@ func (this *ParserATNSimulator) closureCheckingStopState(config IATNConfig, conf
// Do the actual work of walking epsilon edges// // Do the actual work of walking epsilon edges//
func (this *ParserATNSimulator) closure_(config IATNConfig, configs *ATNConfigSet, closureBusy *Set, collectPredicates, fullCtx bool, depth int, treatEofAsEpsilon bool) { func (this *ParserATNSimulator) closure_(config IATNConfig, configs *ATNConfigSet, closureBusy *Set, collectPredicates, fullCtx bool, depth int, treatEofAsEpsilon bool) {
fmt.Println("closure_")
var p = config.GetState() var p = config.GetState()
// optimization // optimization
if !p.GetEpsilonOnlyTransitions() { if !p.GetEpsilonOnlyTransitions() {
@ -1044,6 +1047,7 @@ func (this *ParserATNSimulator) closure_(config IATNConfig, configs *ATNConfigSe
var continueCollecting = collectPredicates && !ok var continueCollecting = collectPredicates && !ok
var c = this.getEpsilonTarget(config, t, continueCollecting, depth == 0, fullCtx, treatEofAsEpsilon) var c = this.getEpsilonTarget(config, t, continueCollecting, depth == 0, fullCtx, treatEofAsEpsilon)
if c != nil { if c != nil {
fmt.Println("DEBUG 1")
if !t.getIsEpsilon() && closureBusy.add(c) != c { if !t.getIsEpsilon() && closureBusy.add(c) != c {
// avoid infinite recursion for EOF* and EOF+ // avoid infinite recursion for EOF* and EOF+
continue continue
@ -1051,6 +1055,8 @@ func (this *ParserATNSimulator) closure_(config IATNConfig, configs *ATNConfigSe
var newDepth = depth var newDepth = depth
if _, ok := config.GetState().(*RuleStopState); ok { if _, ok := config.GetState().(*RuleStopState); ok {
fmt.Println("DEBUG 2")
// target fell off end of rule mark resulting c as having dipped into outer context // target fell off end of rule mark resulting c as having dipped into outer context
// We can't get here if incoming config was rule stop and we had context // We can't get here if incoming config was rule stop and we had context
// track how far we dip into outer context. Might // track how far we dip into outer context. Might
@ -1058,11 +1064,17 @@ func (this *ParserATNSimulator) closure_(config IATNConfig, configs *ATNConfigSe
// preds if this is > 0. // preds if this is > 0.
if closureBusy.add(c) != c { if closureBusy.add(c) != c {
fmt.Println("DEBUG 3")
// avoid infinite recursion for right-recursive rules // avoid infinite recursion for right-recursive rules
continue continue
} else {
fmt.Println(c)
fmt.Println(closureBusy)
} }
if this._dfa != nil && this._dfa.precedenceDfa { if this._dfa != nil && this._dfa.precedenceDfa {
fmt.Println("DEBUG 4")
if t.(*EpsilonTransition).outermostPrecedenceReturn == this._dfa.atnStartState.GetRuleIndex() { if t.(*EpsilonTransition).outermostPrecedenceReturn == this._dfa.atnStartState.GetRuleIndex() {
c.precedenceFilterSuppressed = true c.precedenceFilterSuppressed = true
} }
@ -1420,7 +1432,7 @@ func (this *ParserATNSimulator) addDFAState(dfa *DFA, D *DFAState) *DFAState {
if D == ATNSimulatorERROR { if D == ATNSimulatorERROR {
return D return D
} }
var hash = D.hashString() var hash = D.Hash()
var existing, ok = dfa.GetStates()[hash] var existing, ok = dfa.GetStates()[hash]
if ok { if ok {
return existing return existing

View File

@ -6,7 +6,7 @@ import (
) )
type IPredictionContext interface { type IPredictionContext interface {
hashString() string Hash() string
GetParent(int) IPredictionContext GetParent(int) IPredictionContext
getReturnState(int) int getReturnState(int) int
equals(IPredictionContext) bool equals(IPredictionContext) bool
@ -72,7 +72,7 @@ func (this *PredictionContext) isEmpty() bool {
return false return false
} }
func (this *PredictionContext) hashString() string { func (this *PredictionContext) Hash() string {
return this.cachedHashString return this.cachedHashString
} }
@ -180,7 +180,7 @@ func (this *SingletonPredictionContext) equals(other IPredictionContext) bool {
return true return true
} else if _, ok := other.(*SingletonPredictionContext); !ok { } else if _, ok := other.(*SingletonPredictionContext); !ok {
return false return false
} else if this.hashString() != other.hashString() { } else if this.Hash() != other.Hash() {
return false // can't be same if hash is different return false // can't be same if hash is different
} else { } else {
@ -196,7 +196,7 @@ func (this *SingletonPredictionContext) equals(other IPredictionContext) bool {
} }
} }
func (this *SingletonPredictionContext) hashString() string { func (this *SingletonPredictionContext) Hash() string {
return this.cachedHashString return this.cachedHashString
} }
@ -310,7 +310,7 @@ func (this *ArrayPredictionContext) getReturnState(index int) int {
func (this *ArrayPredictionContext) equals(other IPredictionContext) bool { func (this *ArrayPredictionContext) equals(other IPredictionContext) bool {
if _, ok := other.(*ArrayPredictionContext); !ok { if _, ok := other.(*ArrayPredictionContext); !ok {
return false return false
} else if this.cachedHashString != other.hashString() { } else if this.cachedHashString != other.Hash() {
return false // can't be same if hash is different return false // can't be same if hash is different
} else { } else {
otherP := other.(*ArrayPredictionContext) otherP := other.(*ArrayPredictionContext)
@ -441,11 +441,11 @@ func merge(a, b IPredictionContext, rootIsWildcard bool, mergeCache *DoubleDict)
// / // /
func mergeSingletons(a, b *SingletonPredictionContext, rootIsWildcard bool, mergeCache *DoubleDict) IPredictionContext { func mergeSingletons(a, b *SingletonPredictionContext, rootIsWildcard bool, mergeCache *DoubleDict) IPredictionContext {
if mergeCache != nil { if mergeCache != nil {
var previous = mergeCache.Get(a.hashString(), b.hashString()) var previous = mergeCache.Get(a.Hash(), b.Hash())
if previous != nil { if previous != nil {
return previous.(IPredictionContext) return previous.(IPredictionContext)
} }
previous = mergeCache.Get(b.hashString(), a.hashString()) previous = mergeCache.Get(b.Hash(), a.Hash())
if previous != nil { if previous != nil {
return previous.(IPredictionContext) return previous.(IPredictionContext)
} }
@ -454,7 +454,7 @@ func mergeSingletons(a, b *SingletonPredictionContext, rootIsWildcard bool, merg
var rootMerge = mergeRoot(a, b, rootIsWildcard) var rootMerge = mergeRoot(a, b, rootIsWildcard)
if rootMerge != nil { if rootMerge != nil {
if mergeCache != nil { if mergeCache != nil {
mergeCache.set(a.hashString(), b.hashString(), rootMerge) mergeCache.set(a.Hash(), b.Hash(), rootMerge)
} }
return rootMerge return rootMerge
} }
@ -474,7 +474,7 @@ func mergeSingletons(a, b *SingletonPredictionContext, rootIsWildcard bool, merg
// Newjoined parent so create Newsingleton pointing to it, a' // Newjoined parent so create Newsingleton pointing to it, a'
var spc = SingletonPredictionContextCreate(parent, a.returnState) var spc = SingletonPredictionContextCreate(parent, a.returnState)
if mergeCache != nil { if mergeCache != nil {
mergeCache.set(a.hashString(), b.hashString(), spc) mergeCache.set(a.Hash(), b.Hash(), spc)
} }
return spc return spc
} else { // a != b payloads differ } else { // a != b payloads differ
@ -495,7 +495,7 @@ func mergeSingletons(a, b *SingletonPredictionContext, rootIsWildcard bool, merg
var parents = []IPredictionContext{singleParent, singleParent} var parents = []IPredictionContext{singleParent, singleParent}
var apc = NewArrayPredictionContext(parents, payloads) var apc = NewArrayPredictionContext(parents, payloads)
if mergeCache != nil { if mergeCache != nil {
mergeCache.set(a.hashString(), b.hashString(), apc) mergeCache.set(a.Hash(), b.Hash(), apc)
} }
return apc return apc
} }
@ -511,7 +511,7 @@ func mergeSingletons(a, b *SingletonPredictionContext, rootIsWildcard bool, merg
} }
var a_ = NewArrayPredictionContext(parents, payloads) var a_ = NewArrayPredictionContext(parents, payloads)
if mergeCache != nil { if mergeCache != nil {
mergeCache.set(a.hashString(), b.hashString(), a_) mergeCache.set(a.Hash(), b.Hash(), a_)
} }
return a_ return a_
} }
@ -601,11 +601,11 @@ func mergeRoot(a, b ISingletonPredictionContext, rootIsWildcard bool) IPredictio
// / // /
func mergeArrays(a, b *ArrayPredictionContext, rootIsWildcard bool, mergeCache *DoubleDict) IPredictionContext { func mergeArrays(a, b *ArrayPredictionContext, rootIsWildcard bool, mergeCache *DoubleDict) IPredictionContext {
if mergeCache != nil { if mergeCache != nil {
var previous = mergeCache.Get(a.hashString(), b.hashString()) var previous = mergeCache.Get(a.Hash(), b.Hash())
if previous != nil { if previous != nil {
return previous.(IPredictionContext) return previous.(IPredictionContext)
} }
previous = mergeCache.Get(b.hashString(), a.hashString()) previous = mergeCache.Get(b.Hash(), a.Hash())
if previous != nil { if previous != nil {
return previous.(IPredictionContext) return previous.(IPredictionContext)
} }
@ -669,7 +669,7 @@ func mergeArrays(a, b *ArrayPredictionContext, rootIsWildcard bool, mergeCache *
if k == 1 { // for just one merged element, return singleton top if k == 1 { // for just one merged element, return singleton top
var a_ = SingletonPredictionContextCreate(mergedParents[0], mergedReturnStates[0]) var a_ = SingletonPredictionContextCreate(mergedParents[0], mergedReturnStates[0])
if mergeCache != nil { if mergeCache != nil {
mergeCache.set(a.hashString(), b.hashString(), a_) mergeCache.set(a.Hash(), b.Hash(), a_)
} }
return a_ return a_
} }
@ -683,20 +683,20 @@ func mergeArrays(a, b *ArrayPredictionContext, rootIsWildcard bool, mergeCache *
// TODO: track whether this is possible above during merge sort for speed // TODO: track whether this is possible above during merge sort for speed
if M == a { if M == a {
if mergeCache != nil { if mergeCache != nil {
mergeCache.set(a.hashString(), b.hashString(), a) mergeCache.set(a.Hash(), b.Hash(), a)
} }
return a return a
} }
if M == b { if M == b {
if mergeCache != nil { if mergeCache != nil {
mergeCache.set(a.hashString(), b.hashString(), b) mergeCache.set(a.Hash(), b.Hash(), b)
} }
return b return b
} }
combineCommonParents(mergedParents) combineCommonParents(mergedParents)
if mergeCache != nil { if mergeCache != nil {
mergeCache.set(a.hashString(), b.hashString(), M) mergeCache.set(a.Hash(), b.Hash(), M)
} }
return M return M
} }

View File

@ -88,7 +88,7 @@ func (this *Predicate) evaluate(parser IRecognizer, outerContext IRuleContext) b
return parser.Sempred(localctx, this.ruleIndex, this.predIndex) return parser.Sempred(localctx, this.ruleIndex, this.predIndex)
} }
func (this *Predicate) hashString() string { func (this *Predicate) Hash() string {
return strconv.Itoa(this.ruleIndex) + "/" + strconv.Itoa(this.predIndex) + "/" + fmt.Sprint(this.isCtxDependent) return strconv.Itoa(this.ruleIndex) + "/" + strconv.Itoa(this.predIndex) + "/" + fmt.Sprint(this.isCtxDependent)
} }
@ -136,7 +136,7 @@ func (this *PrecedencePredicate) compareTo(other *PrecedencePredicate) int {
return this.precedence - other.precedence return this.precedence - other.precedence
} }
func (this *PrecedencePredicate) hashString() string { func (this *PrecedencePredicate) Hash() string {
return "31" return "31"
} }
@ -232,7 +232,7 @@ func (this *AND) equals(other interface{}) bool {
} }
} }
func (this *AND) hashString() string { func (this *AND) Hash() string {
return fmt.Sprint(this.opnds) + "/AND" return fmt.Sprint(this.opnds) + "/AND"
} }
@ -371,7 +371,7 @@ func (this *OR) equals(other interface{}) bool {
} }
} }
func (this *OR) hashString() string { func (this *OR) Hash() string {
return fmt.Sprint(this.opnds) + "/OR" return fmt.Sprint(this.opnds) + "/OR"
} }

View File

@ -6,8 +6,8 @@ import (
"hash/fnv" "hash/fnv"
"strings" "strings"
// "regexp" // "regexp"
"bytes" // "bytes"
"encoding/gob" // "encoding/gob"
) )
func intMin(a, b int) int { func intMin(a, b int) int {
@ -44,11 +44,6 @@ func (s *IntStack) Push(e int) {
*s = append(*s, e) *s = append(*s, e)
} }
func hashCode(s string) string {
h := fnv.New32a()
h.Write([]byte((s)))
return fmt.Sprint(h.Sum32())
}
type Set struct { type Set struct {
data map[string][]interface{} data map[string][]interface{}
@ -78,28 +73,40 @@ func NewSet(hashFunction func(interface{}) string, equalsFunction func(interface
} }
func standardHashFunction(a interface{}) string { func standardHashFunction(a interface{}) string {
h, ok := a.(Hasher)
if ok {
return h.Hash()
}
return fmt.Sprint(a)
}
//func getBytes(key interface{}) ([]byte, error) {
// var buf bytes.Buffer
// enc := gob.NewEncoder(&buf)
// err := enc.Encode(key)
// if err != nil {
// return nil, err
// }
// return buf.Bytes(), nil
//}
type Hasher interface {
Hash() string
}
func hashCode(s string) string {
h := fnv.New32a() h := fnv.New32a()
v, _ := getBytes(a) h.Write([]byte((s)))
h.Write(v)
return fmt.Sprint(h.Sum32()) return fmt.Sprint(h.Sum32())
} }
func getBytes(key interface{}) ([]byte, error) {
var buf bytes.Buffer
enc := gob.NewEncoder(&buf)
err := enc.Encode(key)
if err != nil {
return nil, err
}
return buf.Bytes(), nil
}
func standardEqualsFunction(a interface{}, b interface{}) bool { func standardEqualsFunction(a interface{}, b interface{}) bool {
return standardHashFunction(a) == standardHashFunction(b) return standardHashFunction(a) == standardHashFunction(b)
} }
func (this *Set) length() int { func (this *Set) length() int {
return len(this.data) return len(this.data)
} }
@ -130,7 +137,7 @@ func (this *Set) add(value interface{}) interface{} {
func (this *Set) contains(value interface{}) bool { func (this *Set) contains(value interface{}) bool {
hash := this.hashFunction(value) hash := this.hashFunction(value)
key := hashCode(hash) key := "hash_" + hashCode(hash)
values := this.data[key] values := this.data[key]
@ -157,7 +164,16 @@ func (this *Set) values() []interface{} {
} }
func (this *Set) String() string { func (this *Set) String() string {
return fmt.Sprint(this.data)
s := ""
for _,av := range this.data {
for _,v := range av {
s += fmt.Sprint(v)
}
}
return s
} }
type BitSet struct { type BitSet struct {

View File

@ -150,9 +150,7 @@ BufferedTokenStream.prototype.sync = function(i) {
var n = i - this.tokens.length + 1; // how many more elements we need? var n = i - this.tokens.length + 1; // how many more elements we need?
if (n > 0) { if (n > 0) {
var fetched = this.fetch(n); var fetched = this.fetch(n);
var e = new Error();
console.log("sync done") console.log("sync done")
console.log(e.stack)
return fetched >= n; return fetched >= n;
} }

View File

@ -132,7 +132,6 @@ LexerATNSimulator.prototype.match = function(input, mode) {
return this.matchATN(input); return this.matchATN(input);
} else { } else {
console.log("execATN") console.log("execATN")
console.log((new Error()).stack)
var res = this.execATN(input, dfa.s0); var res = this.execATN(input, dfa.s0);
return res; return res;
} }
@ -235,9 +234,7 @@ LexerATNSimulator.prototype.execATN = function(input, ds0) {
} }
console.log("Done with execATN loop") console.log("Done with execATN loop")
var res = this.failOrAccept(this.prevAccept, input, s.configs, t); return this.failOrAccept(this.prevAccept, input, s.configs, t);
console.log("Done with failOrAccept", res)
return res;
}; };
// Get an existing target state for an edge in the DFA. If the target state // Get an existing target state for an edge in the DFA. If the target state

View File

@ -1254,6 +1254,7 @@ ParserATNSimulator.prototype.closureCheckingStopState = function(config, configs
} else { } else {
// we have no context info, just chase follow links (if greedy) // we have no context info, just chase follow links (if greedy)
if (this.debug) { if (this.debug) {
console.log("DEBUG 1")
console.log("FALLING off rule " + this.getRuleName(config.state.ruleIndex)); console.log("FALLING off rule " + this.getRuleName(config.state.ruleIndex));
} }
this.closure_(config, configs, closureBusy, collectPredicates, this.closure_(config, configs, closureBusy, collectPredicates,
@ -1279,6 +1280,7 @@ ParserATNSimulator.prototype.closureCheckingStopState = function(config, configs
} else { } else {
// else if we have no context info, just chase follow links (if greedy) // else if we have no context info, just chase follow links (if greedy)
if (this.debug) { if (this.debug) {
console.log("DEBUG 2")
console.log("FALLING off rule " + this.getRuleName(config.state.ruleIndex)); console.log("FALLING off rule " + this.getRuleName(config.state.ruleIndex));
} }
} }
@ -1288,6 +1290,7 @@ ParserATNSimulator.prototype.closureCheckingStopState = function(config, configs
// Do the actual work of walking epsilon edges// // Do the actual work of walking epsilon edges//
ParserATNSimulator.prototype.closure_ = function(config, configs, closureBusy, collectPredicates, fullCtx, depth, treatEofAsEpsilon) { ParserATNSimulator.prototype.closure_ = function(config, configs, closureBusy, collectPredicates, fullCtx, depth, treatEofAsEpsilon) {
console.log("closure_")
var p = config.state; var p = config.state;
// optimization // optimization
if (! p.epsilonOnlyTransitions) { if (! p.epsilonOnlyTransitions) {
@ -1300,12 +1303,15 @@ ParserATNSimulator.prototype.closure_ = function(config, configs, closureBusy, c
var continueCollecting = collectPredicates && !(t instanceof ActionTransition); var continueCollecting = collectPredicates && !(t instanceof ActionTransition);
var c = this.getEpsilonTarget(config, t, continueCollecting, depth === 0, fullCtx, treatEofAsEpsilon); var c = this.getEpsilonTarget(config, t, continueCollecting, depth === 0, fullCtx, treatEofAsEpsilon);
if (c!==null) { if (c!==null) {
console.log("DEBUG 1")
if (!t.isEpsilon && closureBusy.add(c)!==c){ if (!t.isEpsilon && closureBusy.add(c)!==c){
// avoid infinite recursion for EOF* and EOF+ // avoid infinite recursion for EOF* and EOF+
continue; continue;
} }
var newDepth = depth; var newDepth = depth;
if ( config.state instanceof RuleStopState) { if ( config.state instanceof RuleStopState) {
console.log("DEBUG 2")
// target fell off end of rule; mark resulting c as having dipped into outer context // target fell off end of rule; mark resulting c as having dipped into outer context
// We can't get here if incoming config was rule stop and we had context // We can't get here if incoming config was rule stop and we had context
// track how far we dip into outer context. Might // track how far we dip into outer context. Might
@ -1313,11 +1319,16 @@ ParserATNSimulator.prototype.closure_ = function(config, configs, closureBusy, c
// preds if this is > 0. // preds if this is > 0.
if (closureBusy.add(c)!==c) { if (closureBusy.add(c)!==c) {
console.log("DEBUG 3")
// avoid infinite recursion for right-recursive rules // avoid infinite recursion for right-recursive rules
continue; continue;
} else {
console.log(c.toString())
console.log(closureBusy.toString())
} }
if (this._dfa !== null && this._dfa.precedenceDfa) { if (this._dfa !== null && this._dfa.precedenceDfa) {
console.log("DEBUG 4")
if (t.outermostPrecedenceReturn === this._dfa.atnStartState.ruleIndex) { if (t.outermostPrecedenceReturn === this._dfa.atnStartState.ruleIndex) {
c.precedenceFilterSuppressed = true; c.precedenceFilterSuppressed = true;
} }
@ -1327,6 +1338,7 @@ ParserATNSimulator.prototype.closure_ = function(config, configs, closureBusy, c
configs.dipsIntoOuterContext = true; // TODO: can remove? only care when we add to set per middle of this method configs.dipsIntoOuterContext = true; // TODO: can remove? only care when we add to set per middle of this method
newDepth -= 1; newDepth -= 1;
if (this.debug) { if (this.debug) {
// console.log((new Error()).stack)
console.log("dips into outer ctx: " + c); console.log("dips into outer ctx: " + c);
} }
} else if (t instanceof RuleTransition) { } else if (t instanceof RuleTransition) {