More bugs

This commit is contained in:
Peter Boyer 2015-12-29 15:32:11 -06:00
parent 56e6e6c26f
commit 7798333f72
15 changed files with 452 additions and 492 deletions

View File

@ -15,6 +15,8 @@ import (
//
type IATNConfig interface {
Hasher
getPrecedenceFilterSuppressed() bool
setPrecedenceFilterSuppressed(bool)
@ -30,7 +32,7 @@ type IATNConfig interface {
String() string
shortHashString() string
shortHash() string
}
type ATNConfig struct {
@ -152,17 +154,17 @@ func (this *ATNConfig) equals(other interface{}) bool {
}
}
func (this *ATNConfig) shortHashString() string {
func (this *ATNConfig) shortHash() string {
return strconv.Itoa(this.state.GetStateNumber()) + "/" + strconv.Itoa(this.alt) + "/" + this.semanticContext.String()
}
func (this *ATNConfig) hashString() string {
func (this *ATNConfig) Hash() string {
var c string
if this.context == nil {
c = ""
} else {
c = this.context.hashString()
c = this.context.Hash()
}
return strconv.Itoa(this.state.GetStateNumber()) + "/" + strconv.Itoa(this.alt) + "/" + c + "/" + this.semanticContext.String()
@ -262,7 +264,7 @@ func NewLexerATNConfig1(state IATNState, alt int, context IPredictionContext) *L
return this
}
func (this *LexerATNConfig) hashString() string {
func (this *LexerATNConfig) Hash() string {
var f string
if this.passedThroughNonGreedyDecision {

View File

@ -11,7 +11,7 @@ import (
///
func hashATNConfig(c interface{}) string {
return c.(IATNConfig).shortHashString()
return c.(IATNConfig).shortHash()
}
func equalATNConfigs(a, b interface{}) bool {
@ -197,7 +197,7 @@ func (this *ATNConfigSet) equals(other interface{}) bool {
this.dipsIntoOuterContext == other2.dipsIntoOuterContext
}
func (this *ATNConfigSet) hashString() string {
func (this *ATNConfigSet) Hash() string {
if this.readOnly {
if this.cachedHashString == "-1" {
this.cachedHashString = this.hashConfigs()

View File

@ -1,396 +0,0 @@
// This implementation of {@link TokenStream} loads tokens from a
// {@link TokenSource} on-demand, and places the tokens in a buffer to provide
// access to any previous token by index.
//
// <p>
// This token stream ignores the value of {@link Token//getChannel}. If your
// parser requires the token stream filter tokens to only those on a particular
// channel, such as {@link Token//DEFAULT_CHANNEL} or
// {@link Token//HIDDEN_CHANNEL}, use a filtering token stream such a
// {@link CommonTokenStream}.</p>
package antlr4
import (
"strconv"
"fmt"
)
// bt is just to keep meaningful parameter types to Parser
type BufferedTokenStream struct {
tokenSource TokenSource
tokens []IToken
index int
fetchedEOF bool
channel int
}
func NewBufferedTokenStream(tokenSource TokenSource) *BufferedTokenStream {
ts := new(BufferedTokenStream)
// The {@link TokenSource} from which tokens for bt stream are fetched.
ts.tokenSource = tokenSource
// A collection of all tokens fetched from the token source. The list is
// considered a complete view of the input once {@link //fetchedEOF} is set
// to {@code true}.
ts.tokens = make([]IToken, 0)
// The index into {@link //tokens} of the current token (next token to
// {@link //consume}). {@link //tokens}{@code [}{@link //p}{@code ]} should
// be
// {@link //LT LT(1)}.
//
// <p>This field is set to -1 when the stream is first constructed or when
// {@link //SetTokenSource} is called, indicating that the first token has
// not yet been fetched from the token source. For additional information,
// see the documentation of {@link IntStream} for a description of
// Initializing Methods.</p>
ts.index = -1
// Indicates whether the {@link Token//EOF} token has been fetched from
// {@link //tokenSource} and added to {@link //tokens}. This field improves
// performance for the following cases:
//
// <ul>
// <li>{@link //consume}: The lookahead check in {@link //consume} to
// prevent
// consuming the EOF symbol is optimized by checking the values of
// {@link //fetchedEOF} and {@link //p} instead of calling {@link
// //LA}.</li>
// <li>{@link //fetch}: The check to prevent adding multiple EOF symbols
// into
// {@link //tokens} is trivial with bt field.</li>
// <ul>
ts.fetchedEOF = false
return ts
}
func (bt *BufferedTokenStream) Mark() int {
return 0
}
func (bt *BufferedTokenStream) Release(marker int) {
// no resources to release
}
func (bt *BufferedTokenStream) reset() {
bt.Seek(0)
}
func (bt *BufferedTokenStream) Seek(index int) {
bt.lazyInit()
bt.index = bt.adjustSeekIndex(index)
}
func (bt *BufferedTokenStream) Get(index int) IToken {
bt.lazyInit()
return bt.tokens[index]
}
func (bt *BufferedTokenStream) Consume() {
var skipEofCheck = false
if bt.index >= 0 {
if bt.fetchedEOF {
// the last token in tokens is EOF. skip check if p indexes any
// fetched token except the last.
skipEofCheck = bt.index < len(bt.tokens)-1
} else {
// no EOF token in tokens. skip check if p indexes a fetched token.
skipEofCheck = bt.index < len(bt.tokens)
}
} else {
// not yet initialized
skipEofCheck = false
}
fmt.Println("Consume 1")
if !skipEofCheck && bt.LA(1) == TokenEOF {
panic("cannot consume EOF")
}
if bt.Sync(bt.index + 1) {
fmt.Println("Consume 2")
bt.index = bt.adjustSeekIndex(bt.index + 1)
}
}
// Make sure index {@code i} in tokens has a token.
//
// @return {@code true} if a token is located at index {@code i}, otherwise
// {@code false}.
// @see //Get(int i)
// /
func (bt *BufferedTokenStream) Sync(i int) bool {
var n = i - len(bt.tokens) + 1 // how many more elements we need?
if n > 0 {
var fetched = bt.fetch(n)
fmt.Println("Sync done")
return fetched >= n
}
return true
}
// Add {@code n} elements to buffer.
//
// @return The actual number of elements added to the buffer.
// /
func (bt *BufferedTokenStream) fetch(n int) int {
if bt.fetchedEOF {
return 0
}
for i := 0; i < n; i++ {
var t IToken = bt.tokenSource.nextToken()
fmt.Println("fetch loop")
t.SetTokenIndex( len(bt.tokens) )
bt.tokens = append(bt.tokens, t)
if t.GetTokenType() == TokenEOF {
bt.fetchedEOF = true
return i + 1
}
}
fmt.Println("fetch done")
return n
}
// Get all tokens from start..stop inclusively///
func (bt *BufferedTokenStream) GetTokens(start int, stop int, types *IntervalSet) []IToken {
if start < 0 || stop < 0 {
return nil
}
bt.lazyInit()
var subset = make([]IToken, 0)
if stop >= len(bt.tokens) {
stop = len(bt.tokens) - 1
}
for i := start; i < stop; i++ {
var t = bt.tokens[i]
if t.GetTokenType() == TokenEOF {
break
}
if types == nil || types.contains(t.GetTokenType()) {
subset = append(subset, t)
}
}
return subset
}
func (bt *BufferedTokenStream) LA(i int) int {
return bt.LT(i).GetTokenType()
}
func (bt *BufferedTokenStream) LB(k int) IToken {
if bt.index-k < 0 {
return nil
}
return bt.tokens[bt.index-k]
}
func (bt *BufferedTokenStream) LT(k int) IToken {
bt.lazyInit()
if k == 0 {
return nil
}
if k < 0 {
return bt.LB(-k)
}
var i = bt.index + k - 1
bt.Sync(i)
if i >= len(bt.tokens) { // return EOF token
// EOF must be last token
return bt.tokens[len(bt.tokens)-1]
}
return bt.tokens[i]
}
// Allowed derived classes to modify the behavior of operations which change
// the current stream position by adjusting the target token index of a seek
// operation. The default implementation simply returns {@code i}. If an
// exception is panic(n in bt method, the current stream index should not be
// changed.
//
// <p>For example, {@link CommonTokenStream} overrides bt method to ensure
// that
// the seek target is always an on-channel token.</p>
//
// @param i The target token index.
// @return The adjusted target token index.
func (bt *BufferedTokenStream) adjustSeekIndex(i int) int {
return i
}
func (bt *BufferedTokenStream) lazyInit() {
if bt.index == -1 {
bt.setup()
}
}
func (bt *BufferedTokenStream) setup() {
bt.Sync(0)
bt.index = bt.adjustSeekIndex(0)
}
func (bt *BufferedTokenStream) GetTokenSource() TokenSource {
return bt.tokenSource
}
// Reset bt token stream by setting its token source.///
func (bt *BufferedTokenStream) SetTokenSource(tokenSource TokenSource) {
bt.tokenSource = tokenSource
bt.tokens = make([]IToken, 0)
bt.index = -1
}
// Given a starting index, return the index of the next token on channel.
// Return i if tokens[i] is on channel. Return -1 if there are no tokens
// on channel between i and EOF.
// /
func (bt *BufferedTokenStream) nextTokenOnChannel(i, channel int) int {
bt.Sync(i)
if i >= len(bt.tokens) {
return -1
}
var token = bt.tokens[i]
for token.GetChannel() != bt.channel {
if token.GetTokenType() == TokenEOF {
return -1
}
i += 1
bt.Sync(i)
token = bt.tokens[i]
}
return i
}
// Given a starting index, return the index of the previous token on channel.
// Return i if tokens[i] is on channel. Return -1 if there are no tokens
// on channel between i and 0.
func (bt *BufferedTokenStream) previousTokenOnChannel(i, channel int) int {
for i >= 0 && bt.tokens[i].GetChannel() != channel {
i -= 1
}
return i
}
// Collect all tokens on specified channel to the right of
// the current token up until we see a token on DEFAULT_TOKEN_CHANNEL or
// EOF. If channel is -1, find any non default channel token.
func (bt *BufferedTokenStream) getHiddenTokensToRight(tokenIndex, channel int) []IToken {
bt.lazyInit()
if tokenIndex < 0 || tokenIndex >= len(bt.tokens) {
panic(strconv.Itoa(tokenIndex) + " not in 0.." + strconv.Itoa(len(bt.tokens)-1))
}
var nextOnChannel = bt.nextTokenOnChannel(tokenIndex+1, LexerDefaultTokenChannel)
var from_ = tokenIndex + 1
// if none onchannel to right, nextOnChannel=-1 so set to = last token
var to int
if nextOnChannel == -1 {
to = len(bt.tokens) - 1
} else {
to = nextOnChannel
}
return bt.filterForChannel(from_, to, channel)
}
// Collect all tokens on specified channel to the left of
// the current token up until we see a token on DEFAULT_TOKEN_CHANNEL.
// If channel is -1, find any non default channel token.
func (bt *BufferedTokenStream) getHiddenTokensToLeft(tokenIndex, channel int) []IToken {
bt.lazyInit()
if tokenIndex < 0 || tokenIndex >= len(bt.tokens) {
panic(strconv.Itoa(tokenIndex) + " not in 0.." + strconv.Itoa(len(bt.tokens)-1))
}
var prevOnChannel = bt.previousTokenOnChannel(tokenIndex-1, LexerDefaultTokenChannel)
if prevOnChannel == tokenIndex-1 {
return nil
}
// if none on channel to left, prevOnChannel=-1 then from=0
var from_ = prevOnChannel + 1
var to = tokenIndex - 1
return bt.filterForChannel(from_, to, channel)
}
func (bt *BufferedTokenStream) filterForChannel(left, right, channel int) []IToken {
var hidden = make([]IToken, 0)
for i := left; i < right+1; i++ {
var t = bt.tokens[i]
if channel == -1 {
if t.GetChannel() != LexerDefaultTokenChannel {
hidden = append(hidden, t)
}
} else if t.GetChannel() == channel {
hidden = append(hidden, t)
}
}
if len(hidden) == 0 {
return nil
}
return hidden
}
func (bt *BufferedTokenStream) GetSourceName() string {
return bt.tokenSource.GetSourceName()
}
func (bt *BufferedTokenStream) Size() int {
return len(bt.tokens)
}
func (bt *BufferedTokenStream) Index() int {
return bt.index
}
func (bt *BufferedTokenStream) GetAllText() string {
return bt.GetTextFromInterval(nil)
}
func (bt *BufferedTokenStream) GetTextFromTokens(start, end IToken) string {
return bt.GetTextFromInterval(NewInterval(start.GetTokenIndex(), end.GetTokenIndex()))
}
func (bt *BufferedTokenStream) GetTextFromRuleContext(interval IRuleContext) string {
return bt.GetTextFromInterval(interval.GetSourceInterval())
}
func (bt *BufferedTokenStream) GetTextFromInterval(interval *Interval) string {
bt.lazyInit()
bt.fill()
if interval == nil {
interval = NewInterval(0, len(bt.tokens)-1)
}
var start = interval.start
var stop = interval.stop
if start < 0 || stop < 0 {
return ""
}
if stop >= len(bt.tokens) {
stop = len(bt.tokens) - 1
}
var s = ""
for i := start; i < stop+1; i++ {
var t = bt.tokens[i]
if t.GetTokenType() == TokenEOF {
break
}
s += t.GetText()
}
return s
}
// Get all tokens from lexer until EOF///
func (bt *BufferedTokenStream) fill() {
bt.lazyInit()
for bt.fetch(1000) == 1000 {
continue
}
}

View File

@ -1,38 +1,357 @@
//
// This class extends {@link BufferedTokenStream} with functionality to filter
// token streams to tokens on a particular channel (tokens where
// {@link Token//getChannel} returns a particular value).
// This implementation of {@link TokenStream} loads tokens from a
// {@link TokenSource} on-demand, and places the tokens in a buffer to provide
// access to any previous token by index.
//
// <p>
// This token stream provides access to all tokens by index or when calling
// methods like {@link //GetText}. The channel filtering is only used for code
// accessing tokens via the lookahead methods {@link //LA}, {@link //LT}, and
// {@link //LB}.</p>
//
// <p>
// By default, tokens are placed on the default channel
// ({@link Token//DEFAULT_CHANNEL}), but may be reassigned by using the
// {@code ->channel(HIDDEN)} lexer command, or by using an embedded action to
// call {@link Lexer//setChannel}.
// </p>
//
// <p>
// Note: lexer rules which use the {@code ->skip} lexer command or call
// {@link Lexer//skip} do not produce tokens at all, so input text Matched by
// such a rule will not be available as part of the token stream, regardless of
// channel.</p>
///
// This token stream ignores the value of {@link Token//getChannel}. If your
// parser requires the token stream filter tokens to only those on a particular
// channel, such as {@link Token//DEFAULT_CHANNEL} or
// {@link Token//HIDDEN_CHANNEL}, use a filtering token stream such a
// {@link CommonTokenStream}.</p>
package antlr4
import (
"strconv"
"fmt"
)
func (bt *CommonTokenStream) Mark() int {
return 0
}
func (bt *CommonTokenStream) Release(marker int) {
// no resources to release
}
func (bt *CommonTokenStream) reset() {
bt.Seek(0)
}
func (bt *CommonTokenStream) Seek(index int) {
bt.lazyInit()
bt.index = bt.adjustSeekIndex(index)
}
func (bt *CommonTokenStream) Get(index int) IToken {
bt.lazyInit()
return bt.tokens[index]
}
func (bt *CommonTokenStream) Consume() {
var skipEofCheck = false
if bt.index >= 0 {
if bt.fetchedEOF {
// the last token in tokens is EOF. skip check if p indexes any
// fetched token except the last.
skipEofCheck = bt.index < len(bt.tokens)-1
} else {
// no EOF token in tokens. skip check if p indexes a fetched token.
skipEofCheck = bt.index < len(bt.tokens)
}
} else {
// not yet initialized
skipEofCheck = false
}
fmt.Println("Consume 1")
if !skipEofCheck && bt.LA(1) == TokenEOF {
panic("cannot consume EOF")
}
if bt.Sync(bt.index + 1) {
fmt.Println("Consume 2")
bt.index = bt.adjustSeekIndex(bt.index + 1)
}
}
// Make sure index {@code i} in tokens has a token.
//
// @return {@code true} if a token is located at index {@code i}, otherwise
// {@code false}.
// @see //Get(int i)
// /
func (bt *CommonTokenStream) Sync(i int) bool {
var n = i - len(bt.tokens) + 1 // how many more elements we need?
if n > 0 {
var fetched = bt.fetch(n)
fmt.Println("Sync done")
return fetched >= n
}
return true
}
// Add {@code n} elements to buffer.
//
// @return The actual number of elements added to the buffer.
// /
func (bt *CommonTokenStream) fetch(n int) int {
if bt.fetchedEOF {
return 0
}
for i := 0; i < n; i++ {
var t IToken = bt.tokenSource.nextToken()
fmt.Println("fetch loop")
t.SetTokenIndex( len(bt.tokens) )
bt.tokens = append(bt.tokens, t)
if t.GetTokenType() == TokenEOF {
bt.fetchedEOF = true
return i + 1
}
}
fmt.Println("fetch done")
return n
}
// Get all tokens from start..stop inclusively///
func (bt *CommonTokenStream) GetTokens(start int, stop int, types *IntervalSet) []IToken {
if start < 0 || stop < 0 {
return nil
}
bt.lazyInit()
var subset = make([]IToken, 0)
if stop >= len(bt.tokens) {
stop = len(bt.tokens) - 1
}
for i := start; i < stop; i++ {
var t = bt.tokens[i]
if t.GetTokenType() == TokenEOF {
break
}
if types == nil || types.contains(t.GetTokenType()) {
subset = append(subset, t)
}
}
return subset
}
func (bt *CommonTokenStream) LA(i int) int {
return bt.LT(i).GetTokenType()
}
func (bt *CommonTokenStream) lazyInit() {
if bt.index == -1 {
bt.setup()
}
}
func (bt *CommonTokenStream) setup() {
bt.Sync(0)
bt.index = bt.adjustSeekIndex(0)
}
func (bt *CommonTokenStream) GetTokenSource() TokenSource {
return bt.tokenSource
}
// Reset bt token stream by setting its token source.///
func (bt *CommonTokenStream) SetTokenSource(tokenSource TokenSource) {
bt.tokenSource = tokenSource
bt.tokens = make([]IToken, 0)
bt.index = -1
}
// Given a starting index, return the index of the next token on channel.
// Return i if tokens[i] is on channel. Return -1 if there are no tokens
// on channel between i and EOF.
// /
func (bt *CommonTokenStream) nextTokenOnChannel(i, channel int) int {
bt.Sync(i)
if i >= len(bt.tokens) {
return -1
}
var token = bt.tokens[i]
for token.GetChannel() != bt.channel {
if token.GetTokenType() == TokenEOF {
return -1
}
i += 1
bt.Sync(i)
token = bt.tokens[i]
}
return i
}
// Given a starting index, return the index of the previous token on channel.
// Return i if tokens[i] is on channel. Return -1 if there are no tokens
// on channel between i and 0.
func (bt *CommonTokenStream) previousTokenOnChannel(i, channel int) int {
for i >= 0 && bt.tokens[i].GetChannel() != channel {
i -= 1
}
return i
}
// Collect all tokens on specified channel to the right of
// the current token up until we see a token on DEFAULT_TOKEN_CHANNEL or
// EOF. If channel is -1, find any non default channel token.
func (bt *CommonTokenStream) getHiddenTokensToRight(tokenIndex, channel int) []IToken {
bt.lazyInit()
if tokenIndex < 0 || tokenIndex >= len(bt.tokens) {
panic(strconv.Itoa(tokenIndex) + " not in 0.." + strconv.Itoa(len(bt.tokens)-1))
}
var nextOnChannel = bt.nextTokenOnChannel(tokenIndex+1, LexerDefaultTokenChannel)
var from_ = tokenIndex + 1
// if none onchannel to right, nextOnChannel=-1 so set to = last token
var to int
if nextOnChannel == -1 {
to = len(bt.tokens) - 1
} else {
to = nextOnChannel
}
return bt.filterForChannel(from_, to, channel)
}
// Collect all tokens on specified channel to the left of
// the current token up until we see a token on DEFAULT_TOKEN_CHANNEL.
// If channel is -1, find any non default channel token.
func (bt *CommonTokenStream) getHiddenTokensToLeft(tokenIndex, channel int) []IToken {
bt.lazyInit()
if tokenIndex < 0 || tokenIndex >= len(bt.tokens) {
panic(strconv.Itoa(tokenIndex) + " not in 0.." + strconv.Itoa(len(bt.tokens)-1))
}
var prevOnChannel = bt.previousTokenOnChannel(tokenIndex-1, LexerDefaultTokenChannel)
if prevOnChannel == tokenIndex-1 {
return nil
}
// if none on channel to left, prevOnChannel=-1 then from=0
var from_ = prevOnChannel + 1
var to = tokenIndex - 1
return bt.filterForChannel(from_, to, channel)
}
func (bt *CommonTokenStream) filterForChannel(left, right, channel int) []IToken {
var hidden = make([]IToken, 0)
for i := left; i < right+1; i++ {
var t = bt.tokens[i]
if channel == -1 {
if t.GetChannel() != LexerDefaultTokenChannel {
hidden = append(hidden, t)
}
} else if t.GetChannel() == channel {
hidden = append(hidden, t)
}
}
if len(hidden) == 0 {
return nil
}
return hidden
}
func (bt *CommonTokenStream) GetSourceName() string {
return bt.tokenSource.GetSourceName()
}
func (bt *CommonTokenStream) Size() int {
return len(bt.tokens)
}
func (bt *CommonTokenStream) Index() int {
return bt.index
}
func (bt *CommonTokenStream) GetAllText() string {
return bt.GetTextFromInterval(nil)
}
func (bt *CommonTokenStream) GetTextFromTokens(start, end IToken) string {
return bt.GetTextFromInterval(NewInterval(start.GetTokenIndex(), end.GetTokenIndex()))
}
func (bt *CommonTokenStream) GetTextFromRuleContext(interval IRuleContext) string {
return bt.GetTextFromInterval(interval.GetSourceInterval())
}
func (bt *CommonTokenStream) GetTextFromInterval(interval *Interval) string {
bt.lazyInit()
bt.fill()
if interval == nil {
interval = NewInterval(0, len(bt.tokens)-1)
}
var start = interval.start
var stop = interval.stop
if start < 0 || stop < 0 {
return ""
}
if stop >= len(bt.tokens) {
stop = len(bt.tokens) - 1
}
var s = ""
for i := start; i < stop+1; i++ {
var t = bt.tokens[i]
if t.GetTokenType() == TokenEOF {
break
}
s += t.GetText()
}
return s
}
// Get all tokens from lexer until EOF///
func (bt *CommonTokenStream) fill() {
bt.lazyInit()
for bt.fetch(1000) == 1000 {
continue
}
}
type CommonTokenStream struct {
*BufferedTokenStream
tokenSource TokenSource
tokens []IToken
index int
fetchedEOF bool
channel int
}
func NewCommonTokenStream(lexer ILexer, channel int) *CommonTokenStream {
ts := new(CommonTokenStream)
ts.BufferedTokenStream = NewBufferedTokenStream(lexer)
// The {@link TokenSource} from which tokens for bt stream are fetched.
ts.tokenSource = lexer
// A collection of all tokens fetched from the token source. The list is
// considered a complete view of the input once {@link //fetchedEOF} is set
// to {@code true}.
ts.tokens = make([]IToken, 0)
// The index into {@link //tokens} of the current token (next token to
// {@link //consume}). {@link //tokens}{@code [}{@link //p}{@code ]} should
// be
// {@link //LT LT(1)}.
//
// <p>This field is set to -1 when the stream is first constructed or when
// {@link //SetTokenSource} is called, indicating that the first token has
// not yet been fetched from the token source. For additional information,
// see the documentation of {@link IntStream} for a description of
// Initializing Methods.</p>
ts.index = -1
// Indicates whether the {@link Token//EOF} token has been fetched from
// {@link //tokenSource} and added to {@link //tokens}. This field improves
// performance for the following cases:
//
// <ul>
// <li>{@link //consume}: The lookahead check in {@link //consume} to
// prevent
// consuming the EOF symbol is optimized by checking the values of
// {@link //fetchedEOF} and {@link //p} instead of calling {@link
// //LA}.</li>
// <li>{@link //fetch}: The check to prevent adding multiple EOF symbols
// into
// {@link //tokens} is trivial with bt field.</li>
// <ul>
ts.fetchedEOF = false
ts.channel = channel

View File

@ -145,10 +145,10 @@ func (this *DFAState) equals(other interface{}) bool {
}
func (this *DFAState) String() string {
return strconv.Itoa(this.stateNumber) + ":" + this.hashString()
return strconv.Itoa(this.stateNumber) + ":" + this.Hash()
}
func (this *DFAState) hashString() string {
func (this *DFAState) Hash() string {
var s string
if (this.isAcceptState) {

View File

@ -625,7 +625,7 @@ func (this *LexerATNSimulator) addDFAState(configs *ATNConfigSet) *DFAState {
proposed.lexerActionExecutor = firstConfigWithRuleStopState.(*LexerATNConfig).lexerActionExecutor
proposed.prediction = this.atn.ruleToTokenType[firstConfigWithRuleStopState.GetState().GetRuleIndex()]
}
var hash = proposed.hashString()
var hash = proposed.Hash()
var dfa = this.decisionToDFA[this.mode]
var existing = dfa.GetStates()[hash]
if existing != nil {

View File

@ -17,7 +17,7 @@ type ILexerAction interface {
getActionType() int
getIsPositionDependent() bool
execute(lexer ILexer)
hashString() string
Hash() string
equals(other ILexerAction) bool
}
@ -48,7 +48,7 @@ func (this *LexerAction) getIsPositionDependent() bool {
return this.isPositionDependent
}
func (this *LexerAction) hashString() string {
func (this *LexerAction) Hash() string {
return strconv.Itoa(this.actionType)
}
@ -101,7 +101,7 @@ func (this *LexerTypeAction) execute(lexer ILexer) {
lexer.setType(this._type)
}
func (this *LexerTypeAction) hashString() string {
func (this *LexerTypeAction) Hash() string {
return strconv.Itoa(this.actionType) + strconv.Itoa(this._type)
}
@ -142,7 +142,7 @@ func (this *LexerPushModeAction) execute(lexer ILexer) {
lexer.pushMode(this.mode)
}
func (this *LexerPushModeAction) hashString() string {
func (this *LexerPushModeAction) Hash() string {
return strconv.Itoa(this.actionType) + strconv.Itoa(this.mode)
}
@ -236,7 +236,7 @@ func (this *LexerModeAction) execute(lexer ILexer) {
lexer.mode(this.mode)
}
func (this *LexerModeAction) hashString() string {
func (this *LexerModeAction) Hash() string {
return strconv.Itoa(this.actionType) + strconv.Itoa(this.mode)
}
@ -291,7 +291,7 @@ func (this *LexerCustomAction) execute(lexer ILexer) {
lexer.Action(nil, this.ruleIndex, this.actionIndex)
}
func (this *LexerCustomAction) hashString() string {
func (this *LexerCustomAction) Hash() string {
return strconv.Itoa(this.actionType) + strconv.Itoa(this.ruleIndex) + strconv.Itoa(this.actionIndex)
}
@ -328,7 +328,7 @@ func (this *LexerChannelAction) execute(lexer ILexer) {
lexer.setChannel(this.channel)
}
func (this *LexerChannelAction) hashString() string {
func (this *LexerChannelAction) Hash() string {
return strconv.Itoa(this.actionType) + strconv.Itoa(this.channel)
}
@ -393,8 +393,8 @@ func (this *LexerIndexedCustomAction) execute(lexer ILexer) {
this.lexerAction.execute(lexer)
}
func (this *LexerIndexedCustomAction) hashString() string {
return strconv.Itoa(this.actionType) + strconv.Itoa(this.offset) + this.lexerAction.hashString()
func (this *LexerIndexedCustomAction) Hash() string {
return strconv.Itoa(this.actionType) + strconv.Itoa(this.offset) + this.lexerAction.Hash()
}
func (this *LexerIndexedCustomAction) equals(other ILexerAction) bool {

View File

@ -27,7 +27,7 @@ func NewLexerActionExecutor(lexerActions []ILexerAction) *LexerActionExecutor {
var s string
for _, a := range lexerActions {
s += a.hashString()
s += a.Hash()
}
this.cachedHashString = s // "".join([str(la) for la in
@ -153,7 +153,7 @@ func (this *LexerActionExecutor) execute(lexer ILexer, input CharStream, startIn
}
}
func (this *LexerActionExecutor) hashString() string {
func (this *LexerActionExecutor) Hash() string {
return this.cachedHashString
}

View File

@ -998,6 +998,7 @@ func (this *ParserATNSimulator) closureCheckingStopState(config IATNConfig, conf
} else {
// we have no context info, just chase follow links (if greedy)
if ParserATNSimulatorDebug {
fmt.Println("DEBUG 1")
fmt.Println("FALLING off rule " + this.getRuleName(config.GetState().GetRuleIndex()))
}
this.closure_(config, configs, closureBusy, collectPredicates, fullCtx, depth, treatEofAsEpsilon)
@ -1022,6 +1023,7 @@ func (this *ParserATNSimulator) closureCheckingStopState(config IATNConfig, conf
} else {
// else if we have no context info, just chase follow links (if greedy)
if ParserATNSimulatorDebug {
fmt.Println("DEBUG 2")
fmt.Println("FALLING off rule " + this.getRuleName(config.GetState().GetRuleIndex()))
}
}
@ -1031,6 +1033,7 @@ func (this *ParserATNSimulator) closureCheckingStopState(config IATNConfig, conf
// Do the actual work of walking epsilon edges//
func (this *ParserATNSimulator) closure_(config IATNConfig, configs *ATNConfigSet, closureBusy *Set, collectPredicates, fullCtx bool, depth int, treatEofAsEpsilon bool) {
fmt.Println("closure_")
var p = config.GetState()
// optimization
if !p.GetEpsilonOnlyTransitions() {
@ -1044,6 +1047,7 @@ func (this *ParserATNSimulator) closure_(config IATNConfig, configs *ATNConfigSe
var continueCollecting = collectPredicates && !ok
var c = this.getEpsilonTarget(config, t, continueCollecting, depth == 0, fullCtx, treatEofAsEpsilon)
if c != nil {
fmt.Println("DEBUG 1")
if !t.getIsEpsilon() && closureBusy.add(c) != c {
// avoid infinite recursion for EOF* and EOF+
continue
@ -1051,6 +1055,8 @@ func (this *ParserATNSimulator) closure_(config IATNConfig, configs *ATNConfigSe
var newDepth = depth
if _, ok := config.GetState().(*RuleStopState); ok {
fmt.Println("DEBUG 2")
// target fell off end of rule mark resulting c as having dipped into outer context
// We can't get here if incoming config was rule stop and we had context
// track how far we dip into outer context. Might
@ -1058,11 +1064,17 @@ func (this *ParserATNSimulator) closure_(config IATNConfig, configs *ATNConfigSe
// preds if this is > 0.
if closureBusy.add(c) != c {
fmt.Println("DEBUG 3")
// avoid infinite recursion for right-recursive rules
continue
} else {
fmt.Println(c)
fmt.Println(closureBusy)
}
if this._dfa != nil && this._dfa.precedenceDfa {
fmt.Println("DEBUG 4")
if t.(*EpsilonTransition).outermostPrecedenceReturn == this._dfa.atnStartState.GetRuleIndex() {
c.precedenceFilterSuppressed = true
}
@ -1420,7 +1432,7 @@ func (this *ParserATNSimulator) addDFAState(dfa *DFA, D *DFAState) *DFAState {
if D == ATNSimulatorERROR {
return D
}
var hash = D.hashString()
var hash = D.Hash()
var existing, ok = dfa.GetStates()[hash]
if ok {
return existing

View File

@ -6,7 +6,7 @@ import (
)
type IPredictionContext interface {
hashString() string
Hash() string
GetParent(int) IPredictionContext
getReturnState(int) int
equals(IPredictionContext) bool
@ -72,7 +72,7 @@ func (this *PredictionContext) isEmpty() bool {
return false
}
func (this *PredictionContext) hashString() string {
func (this *PredictionContext) Hash() string {
return this.cachedHashString
}
@ -180,7 +180,7 @@ func (this *SingletonPredictionContext) equals(other IPredictionContext) bool {
return true
} else if _, ok := other.(*SingletonPredictionContext); !ok {
return false
} else if this.hashString() != other.hashString() {
} else if this.Hash() != other.Hash() {
return false // can't be same if hash is different
} else {
@ -196,7 +196,7 @@ func (this *SingletonPredictionContext) equals(other IPredictionContext) bool {
}
}
func (this *SingletonPredictionContext) hashString() string {
func (this *SingletonPredictionContext) Hash() string {
return this.cachedHashString
}
@ -310,7 +310,7 @@ func (this *ArrayPredictionContext) getReturnState(index int) int {
func (this *ArrayPredictionContext) equals(other IPredictionContext) bool {
if _, ok := other.(*ArrayPredictionContext); !ok {
return false
} else if this.cachedHashString != other.hashString() {
} else if this.cachedHashString != other.Hash() {
return false // can't be same if hash is different
} else {
otherP := other.(*ArrayPredictionContext)
@ -441,11 +441,11 @@ func merge(a, b IPredictionContext, rootIsWildcard bool, mergeCache *DoubleDict)
// /
func mergeSingletons(a, b *SingletonPredictionContext, rootIsWildcard bool, mergeCache *DoubleDict) IPredictionContext {
if mergeCache != nil {
var previous = mergeCache.Get(a.hashString(), b.hashString())
var previous = mergeCache.Get(a.Hash(), b.Hash())
if previous != nil {
return previous.(IPredictionContext)
}
previous = mergeCache.Get(b.hashString(), a.hashString())
previous = mergeCache.Get(b.Hash(), a.Hash())
if previous != nil {
return previous.(IPredictionContext)
}
@ -454,7 +454,7 @@ func mergeSingletons(a, b *SingletonPredictionContext, rootIsWildcard bool, merg
var rootMerge = mergeRoot(a, b, rootIsWildcard)
if rootMerge != nil {
if mergeCache != nil {
mergeCache.set(a.hashString(), b.hashString(), rootMerge)
mergeCache.set(a.Hash(), b.Hash(), rootMerge)
}
return rootMerge
}
@ -474,7 +474,7 @@ func mergeSingletons(a, b *SingletonPredictionContext, rootIsWildcard bool, merg
// Newjoined parent so create Newsingleton pointing to it, a'
var spc = SingletonPredictionContextCreate(parent, a.returnState)
if mergeCache != nil {
mergeCache.set(a.hashString(), b.hashString(), spc)
mergeCache.set(a.Hash(), b.Hash(), spc)
}
return spc
} else { // a != b payloads differ
@ -495,7 +495,7 @@ func mergeSingletons(a, b *SingletonPredictionContext, rootIsWildcard bool, merg
var parents = []IPredictionContext{singleParent, singleParent}
var apc = NewArrayPredictionContext(parents, payloads)
if mergeCache != nil {
mergeCache.set(a.hashString(), b.hashString(), apc)
mergeCache.set(a.Hash(), b.Hash(), apc)
}
return apc
}
@ -511,7 +511,7 @@ func mergeSingletons(a, b *SingletonPredictionContext, rootIsWildcard bool, merg
}
var a_ = NewArrayPredictionContext(parents, payloads)
if mergeCache != nil {
mergeCache.set(a.hashString(), b.hashString(), a_)
mergeCache.set(a.Hash(), b.Hash(), a_)
}
return a_
}
@ -601,11 +601,11 @@ func mergeRoot(a, b ISingletonPredictionContext, rootIsWildcard bool) IPredictio
// /
func mergeArrays(a, b *ArrayPredictionContext, rootIsWildcard bool, mergeCache *DoubleDict) IPredictionContext {
if mergeCache != nil {
var previous = mergeCache.Get(a.hashString(), b.hashString())
var previous = mergeCache.Get(a.Hash(), b.Hash())
if previous != nil {
return previous.(IPredictionContext)
}
previous = mergeCache.Get(b.hashString(), a.hashString())
previous = mergeCache.Get(b.Hash(), a.Hash())
if previous != nil {
return previous.(IPredictionContext)
}
@ -669,7 +669,7 @@ func mergeArrays(a, b *ArrayPredictionContext, rootIsWildcard bool, mergeCache *
if k == 1 { // for just one merged element, return singleton top
var a_ = SingletonPredictionContextCreate(mergedParents[0], mergedReturnStates[0])
if mergeCache != nil {
mergeCache.set(a.hashString(), b.hashString(), a_)
mergeCache.set(a.Hash(), b.Hash(), a_)
}
return a_
}
@ -683,20 +683,20 @@ func mergeArrays(a, b *ArrayPredictionContext, rootIsWildcard bool, mergeCache *
// TODO: track whether this is possible above during merge sort for speed
if M == a {
if mergeCache != nil {
mergeCache.set(a.hashString(), b.hashString(), a)
mergeCache.set(a.Hash(), b.Hash(), a)
}
return a
}
if M == b {
if mergeCache != nil {
mergeCache.set(a.hashString(), b.hashString(), b)
mergeCache.set(a.Hash(), b.Hash(), b)
}
return b
}
combineCommonParents(mergedParents)
if mergeCache != nil {
mergeCache.set(a.hashString(), b.hashString(), M)
mergeCache.set(a.Hash(), b.Hash(), M)
}
return M
}

View File

@ -88,7 +88,7 @@ func (this *Predicate) evaluate(parser IRecognizer, outerContext IRuleContext) b
return parser.Sempred(localctx, this.ruleIndex, this.predIndex)
}
func (this *Predicate) hashString() string {
func (this *Predicate) Hash() string {
return strconv.Itoa(this.ruleIndex) + "/" + strconv.Itoa(this.predIndex) + "/" + fmt.Sprint(this.isCtxDependent)
}
@ -136,7 +136,7 @@ func (this *PrecedencePredicate) compareTo(other *PrecedencePredicate) int {
return this.precedence - other.precedence
}
func (this *PrecedencePredicate) hashString() string {
func (this *PrecedencePredicate) Hash() string {
return "31"
}
@ -232,7 +232,7 @@ func (this *AND) equals(other interface{}) bool {
}
}
func (this *AND) hashString() string {
func (this *AND) Hash() string {
return fmt.Sprint(this.opnds) + "/AND"
}
@ -371,7 +371,7 @@ func (this *OR) equals(other interface{}) bool {
}
}
func (this *OR) hashString() string {
func (this *OR) Hash() string {
return fmt.Sprint(this.opnds) + "/OR"
}

View File

@ -6,8 +6,8 @@ import (
"hash/fnv"
"strings"
// "regexp"
"bytes"
"encoding/gob"
// "bytes"
// "encoding/gob"
)
func intMin(a, b int) int {
@ -44,11 +44,6 @@ func (s *IntStack) Push(e int) {
*s = append(*s, e)
}
func hashCode(s string) string {
h := fnv.New32a()
h.Write([]byte((s)))
return fmt.Sprint(h.Sum32())
}
type Set struct {
data map[string][]interface{}
@ -78,28 +73,40 @@ func NewSet(hashFunction func(interface{}) string, equalsFunction func(interface
}
func standardHashFunction(a interface{}) string {
h, ok := a.(Hasher)
if ok {
return h.Hash()
}
return fmt.Sprint(a)
}
//func getBytes(key interface{}) ([]byte, error) {
// var buf bytes.Buffer
// enc := gob.NewEncoder(&buf)
// err := enc.Encode(key)
// if err != nil {
// return nil, err
// }
// return buf.Bytes(), nil
//}
type Hasher interface {
Hash() string
}
func hashCode(s string) string {
h := fnv.New32a()
v, _ := getBytes(a)
h.Write(v)
h.Write([]byte((s)))
return fmt.Sprint(h.Sum32())
}
func getBytes(key interface{}) ([]byte, error) {
var buf bytes.Buffer
enc := gob.NewEncoder(&buf)
err := enc.Encode(key)
if err != nil {
return nil, err
}
return buf.Bytes(), nil
}
func standardEqualsFunction(a interface{}, b interface{}) bool {
return standardHashFunction(a) == standardHashFunction(b)
}
func (this *Set) length() int {
return len(this.data)
}
@ -130,7 +137,7 @@ func (this *Set) add(value interface{}) interface{} {
func (this *Set) contains(value interface{}) bool {
hash := this.hashFunction(value)
key := hashCode(hash)
key := "hash_" + hashCode(hash)
values := this.data[key]
@ -157,7 +164,16 @@ func (this *Set) values() []interface{} {
}
func (this *Set) String() string {
return fmt.Sprint(this.data)
s := ""
for _,av := range this.data {
for _,v := range av {
s += fmt.Sprint(v)
}
}
return s
}
type BitSet struct {

View File

@ -150,9 +150,7 @@ BufferedTokenStream.prototype.sync = function(i) {
var n = i - this.tokens.length + 1; // how many more elements we need?
if (n > 0) {
var fetched = this.fetch(n);
var e = new Error();
console.log("sync done")
console.log(e.stack)
return fetched >= n;
}

View File

@ -132,7 +132,6 @@ LexerATNSimulator.prototype.match = function(input, mode) {
return this.matchATN(input);
} else {
console.log("execATN")
console.log((new Error()).stack)
var res = this.execATN(input, dfa.s0);
return res;
}
@ -235,9 +234,7 @@ LexerATNSimulator.prototype.execATN = function(input, ds0) {
}
console.log("Done with execATN loop")
var res = this.failOrAccept(this.prevAccept, input, s.configs, t);
console.log("Done with failOrAccept", res)
return res;
return this.failOrAccept(this.prevAccept, input, s.configs, t);
};
// Get an existing target state for an edge in the DFA. If the target state

View File

@ -1254,6 +1254,7 @@ ParserATNSimulator.prototype.closureCheckingStopState = function(config, configs
} else {
// we have no context info, just chase follow links (if greedy)
if (this.debug) {
console.log("DEBUG 1")
console.log("FALLING off rule " + this.getRuleName(config.state.ruleIndex));
}
this.closure_(config, configs, closureBusy, collectPredicates,
@ -1279,6 +1280,7 @@ ParserATNSimulator.prototype.closureCheckingStopState = function(config, configs
} else {
// else if we have no context info, just chase follow links (if greedy)
if (this.debug) {
console.log("DEBUG 2")
console.log("FALLING off rule " + this.getRuleName(config.state.ruleIndex));
}
}
@ -1288,6 +1290,7 @@ ParserATNSimulator.prototype.closureCheckingStopState = function(config, configs
// Do the actual work of walking epsilon edges//
ParserATNSimulator.prototype.closure_ = function(config, configs, closureBusy, collectPredicates, fullCtx, depth, treatEofAsEpsilon) {
console.log("closure_")
var p = config.state;
// optimization
if (! p.epsilonOnlyTransitions) {
@ -1300,12 +1303,15 @@ ParserATNSimulator.prototype.closure_ = function(config, configs, closureBusy, c
var continueCollecting = collectPredicates && !(t instanceof ActionTransition);
var c = this.getEpsilonTarget(config, t, continueCollecting, depth === 0, fullCtx, treatEofAsEpsilon);
if (c!==null) {
console.log("DEBUG 1")
if (!t.isEpsilon && closureBusy.add(c)!==c){
// avoid infinite recursion for EOF* and EOF+
continue;
}
var newDepth = depth;
if ( config.state instanceof RuleStopState) {
console.log("DEBUG 2")
// target fell off end of rule; mark resulting c as having dipped into outer context
// We can't get here if incoming config was rule stop and we had context
// track how far we dip into outer context. Might
@ -1313,11 +1319,16 @@ ParserATNSimulator.prototype.closure_ = function(config, configs, closureBusy, c
// preds if this is > 0.
if (closureBusy.add(c)!==c) {
console.log("DEBUG 3")
// avoid infinite recursion for right-recursive rules
continue;
} else {
console.log(c.toString())
console.log(closureBusy.toString())
}
if (this._dfa !== null && this._dfa.precedenceDfa) {
console.log("DEBUG 4")
if (t.outermostPrecedenceReturn === this._dfa.atnStartState.ruleIndex) {
c.precedenceFilterSuppressed = true;
}
@ -1327,6 +1338,7 @@ ParserATNSimulator.prototype.closure_ = function(config, configs, closureBusy, c
configs.dipsIntoOuterContext = true; // TODO: can remove? only care when we add to set per middle of this method
newDepth -= 1;
if (this.debug) {
// console.log((new Error()).stack)
console.log("dips into outer ctx: " + c);
}
} else if (t instanceof RuleTransition) {