More bug fixing and debugging

This commit is contained in:
Peter Boyer 2015-12-28 20:31:56 -06:00
parent 4ffb3f81b6
commit c08a19233c
46 changed files with 603 additions and 295 deletions

View File

@ -0,0 +1 @@
1 + 2 = 3 + 5

View File

@ -15,8 +15,12 @@ func main() {
p := parser.NewArithmeticParser(s)
p.BuildParseTrees = true
p.Equation()
}

View File

@ -1,4 +1,5 @@
package antlr4
import "fmt"
var ATNINVALID_ALT_NUMBER = 0
@ -55,7 +56,8 @@ func NewATN(grammarType int, maxTokenType int) *ATN {
// restricted to tokens reachable staying within {@code s}'s rule.
func (this *ATN) nextTokensInContext(s IATNState, ctx IRuleContext) *IntervalSet {
var anal = NewLL1Analyzer(this)
return anal.LOOK(s, nil, ctx)
var res = anal.LOOK(s, nil, ctx)
return res
}
// Compute the set of valid tokens that can occur starting in {@code s} and
@ -63,8 +65,11 @@ func (this *ATN) nextTokensInContext(s IATNState, ctx IRuleContext) *IntervalSet
// rule.
func (this *ATN) nextTokensNoContext(s IATNState) *IntervalSet {
if s.GetNextTokenWithinRule() != nil {
fmt.Println("DEBUG 1")
return s.GetNextTokenWithinRule()
}
fmt.Println("DEBUG 2")
fmt.Println(this.nextTokensInContext(s, nil))
s.SetNextTokenWithinRule(this.nextTokensInContext(s, nil))
s.GetNextTokenWithinRule().readOnly = true
return s.GetNextTokenWithinRule()

View File

@ -29,6 +29,8 @@ type IATNConfig interface {
SetReachesIntoOuterContext(int)
String() string
shortHashString() string
}
type ATNConfig struct {

View File

@ -11,20 +11,18 @@ import (
///
func hashATNConfig(c interface{}) string {
return c.(*ATNConfig).shortHashString()
return c.(IATNConfig).shortHashString()
}
func equalATNConfigs(a, b interface{}) bool {
fmt.Println("compare")
fmt.Println(a)
if a == nil || b == nil {
return false
}
if a == b {
return true
}
if a == nil || b == nil {
return false
}
ai,ok := a.(IATNConfig)
bi,ok1 := b.(IATNConfig)
@ -107,8 +105,6 @@ func NewATNConfigSet(fullCtx bool) *ATNConfigSet {
// /
func (this *ATNConfigSet) add(config IATNConfig, mergeCache *DoubleDict) bool {
// fmt.Println("DEBUG = Adding config : " + config.String())
if this.readOnly {
panic("This set is readonly")
}
@ -136,6 +132,7 @@ func (this *ATNConfigSet) add(config IATNConfig, mergeCache *DoubleDict) bool {
existing.setPrecedenceFilterSuppressed(true)
}
existing.SetContext(merged) // replace context no need to alt mapping
return true
}
@ -288,6 +285,9 @@ func (this *ATNConfigSet) String() string {
return s
}
type OrderedATNConfigSet struct {
*ATNConfigSet
}
@ -297,7 +297,7 @@ func NewOrderedATNConfigSet() *OrderedATNConfigSet {
this := new(OrderedATNConfigSet)
this.ATNConfigSet = NewATNConfigSet(false)
this.configLookup = NewSet(nil, nil)
// this.configLookup = NewSet(nil, nil) // TODO not sure why this would be overriden
return this
}

View File

@ -11,13 +11,15 @@
package antlr4
import "strconv"
import (
"strconv"
)
// bt is just to keep meaningful parameter types to Parser
type BufferedTokenStream struct {
tokenSource TokenSource
tokens []*Token
tokens []IToken
index int
fetchedEOF bool
channel int
@ -33,7 +35,7 @@ func NewBufferedTokenStream(tokenSource TokenSource) *BufferedTokenStream {
// A collection of all tokens fetched from the token source. The list is
// considered a complete view of the input once {@link //fetchedEOF} is set
// to {@code true}.
ts.tokens = make([]*Token, 0)
ts.tokens = make([]IToken, 0)
// The index into {@link //tokens} of the current token (next token to
// {@link //consume}). {@link //tokens}{@code [}{@link //p}{@code ]} should
@ -83,7 +85,7 @@ func (bt *BufferedTokenStream) Seek(index int) {
bt.index = bt.adjustSeekIndex(index)
}
func (bt *BufferedTokenStream) Get(index int) *Token {
func (bt *BufferedTokenStream) Get(index int) IToken {
bt.lazyInit()
return bt.tokens[index]
}
@ -136,10 +138,10 @@ func (bt *BufferedTokenStream) fetch(n int) int {
}
for i := 0; i < n; i++ {
var t *Token = bt.tokenSource.nextToken()
t.tokenIndex = len(bt.tokens)
var t IToken = bt.tokenSource.nextToken()
t.SetTokenIndex( len(bt.tokens) )
bt.tokens = append(bt.tokens, t)
if t.tokenType == TokenEOF {
if t.GetTokenType() == TokenEOF {
bt.fetchedEOF = true
return i + 1
}
@ -148,22 +150,22 @@ func (bt *BufferedTokenStream) fetch(n int) int {
}
// Get all tokens from start..stop inclusively///
func (bt *BufferedTokenStream) GetTokens(start int, stop int, types *IntervalSet) []*Token {
func (bt *BufferedTokenStream) GetTokens(start int, stop int, types *IntervalSet) []IToken {
if start < 0 || stop < 0 {
return nil
}
bt.lazyInit()
var subset = make([]*Token, 0)
var subset = make([]IToken, 0)
if stop >= len(bt.tokens) {
stop = len(bt.tokens) - 1
}
for i := start; i < stop; i++ {
var t = bt.tokens[i]
if t.tokenType == TokenEOF {
if t.GetTokenType() == TokenEOF {
break
}
if types == nil || types.contains(t.tokenType) {
if types == nil || types.contains(t.GetTokenType()) {
subset = append(subset, t)
}
}
@ -171,17 +173,17 @@ func (bt *BufferedTokenStream) GetTokens(start int, stop int, types *IntervalSet
}
func (bt *BufferedTokenStream) LA(i int) int {
return bt.LT(i).tokenType
return bt.LT(i).GetTokenType()
}
func (bt *BufferedTokenStream) LB(k int) *Token {
func (bt *BufferedTokenStream) LB(k int) IToken {
if bt.index-k < 0 {
return nil
}
return bt.tokens[bt.index-k]
}
func (bt *BufferedTokenStream) LT(k int) *Token {
func (bt *BufferedTokenStream) LT(k int) IToken {
bt.lazyInit()
if k == 0 {
return nil
@ -233,7 +235,7 @@ func (bt *BufferedTokenStream) GetTokenSource() TokenSource {
// Reset bt token stream by setting its token source.///
func (bt *BufferedTokenStream) SetTokenSource(tokenSource TokenSource) {
bt.tokenSource = tokenSource
bt.tokens = make([]*Token, 0)
bt.tokens = make([]IToken, 0)
bt.index = -1
}
@ -247,8 +249,8 @@ func (bt *BufferedTokenStream) nextTokenOnChannel(i, channel int) int {
return -1
}
var token = bt.tokens[i]
for token.channel != bt.channel {
if token.tokenType == TokenEOF {
for token.GetChannel() != bt.channel {
if token.GetTokenType() == TokenEOF {
return -1
}
i += 1
@ -262,7 +264,7 @@ func (bt *BufferedTokenStream) nextTokenOnChannel(i, channel int) int {
// Return i if tokens[i] is on channel. Return -1 if there are no tokens
// on channel between i and 0.
func (bt *BufferedTokenStream) previousTokenOnChannel(i, channel int) int {
for i >= 0 && bt.tokens[i].channel != channel {
for i >= 0 && bt.tokens[i].GetChannel() != channel {
i -= 1
}
return i
@ -271,7 +273,7 @@ func (bt *BufferedTokenStream) previousTokenOnChannel(i, channel int) int {
// Collect all tokens on specified channel to the right of
// the current token up until we see a token on DEFAULT_TOKEN_CHANNEL or
// EOF. If channel is -1, find any non default channel token.
func (bt *BufferedTokenStream) getHiddenTokensToRight(tokenIndex, channel int) []*Token {
func (bt *BufferedTokenStream) getHiddenTokensToRight(tokenIndex, channel int) []IToken {
bt.lazyInit()
if tokenIndex < 0 || tokenIndex >= len(bt.tokens) {
panic(strconv.Itoa(tokenIndex) + " not in 0.." + strconv.Itoa(len(bt.tokens)-1))
@ -291,7 +293,7 @@ func (bt *BufferedTokenStream) getHiddenTokensToRight(tokenIndex, channel int) [
// Collect all tokens on specified channel to the left of
// the current token up until we see a token on DEFAULT_TOKEN_CHANNEL.
// If channel is -1, find any non default channel token.
func (bt *BufferedTokenStream) getHiddenTokensToLeft(tokenIndex, channel int) []*Token {
func (bt *BufferedTokenStream) getHiddenTokensToLeft(tokenIndex, channel int) []IToken {
bt.lazyInit()
if tokenIndex < 0 || tokenIndex >= len(bt.tokens) {
panic(strconv.Itoa(tokenIndex) + " not in 0.." + strconv.Itoa(len(bt.tokens)-1))
@ -306,15 +308,15 @@ func (bt *BufferedTokenStream) getHiddenTokensToLeft(tokenIndex, channel int) []
return bt.filterForChannel(from_, to, channel)
}
func (bt *BufferedTokenStream) filterForChannel(left, right, channel int) []*Token {
var hidden = make([]*Token, 0)
func (bt *BufferedTokenStream) filterForChannel(left, right, channel int) []IToken {
var hidden = make([]IToken, 0)
for i := left; i < right+1; i++ {
var t = bt.tokens[i]
if channel == -1 {
if t.channel != LexerDefaultTokenChannel {
if t.GetChannel() != LexerDefaultTokenChannel {
hidden = append(hidden, t)
}
} else if t.channel == channel {
} else if t.GetChannel() == channel {
hidden = append(hidden, t)
}
}
@ -340,7 +342,7 @@ func (bt *BufferedTokenStream) GetAllText() string {
return bt.GetTextFromInterval(nil)
}
func (bt *BufferedTokenStream) GetTextFromTokens(start, end *Token) string {
func (bt *BufferedTokenStream) GetTextFromTokens(start, end IToken) string {
return bt.GetTextFromInterval(NewInterval(start.GetTokenIndex(), end.GetTokenIndex()))
}
@ -349,11 +351,13 @@ func (bt *BufferedTokenStream) GetTextFromRuleContext(interval IRuleContext) str
}
func (bt *BufferedTokenStream) GetTextFromInterval(interval *Interval) string {
bt.lazyInit()
bt.fill()
if interval == nil {
interval = NewInterval(0, len(bt.tokens)-1)
}
var start = interval.start
var stop = interval.stop
if start < 0 || stop < 0 {
@ -362,14 +366,16 @@ func (bt *BufferedTokenStream) GetTextFromInterval(interval *Interval) string {
if stop >= len(bt.tokens) {
stop = len(bt.tokens) - 1
}
var s = ""
for i := start; i < stop+1; i++ {
var t = bt.tokens[i]
if t.tokenType == TokenEOF {
if t.GetTokenType() == TokenEOF {
break
}
s += t.text()
s += t.GetText()
}
return s
}

View File

@ -4,9 +4,10 @@
//
package antlr4
import "fmt"
type TokenFactory interface {
Create(source *TokenSourceCharStreamPair, ttype int, text string, channel, start, stop, line, column int) *Token
Create(source *TokenSourceCharStreamPair, ttype int, text string, channel, start, stop, line, column int) IToken
}
type CommonTokenFactory struct {
@ -45,22 +46,27 @@ func NewCommonTokenFactory(copyText bool) *CommonTokenFactory {
//
var CommonTokenFactoryDEFAULT = NewCommonTokenFactory(false)
func (this *CommonTokenFactory) Create(source *TokenSourceCharStreamPair, ttype int, text string, channel, start, stop, line, column int) *Token {
func (this *CommonTokenFactory) Create(source *TokenSourceCharStreamPair, ttype int, text string, channel, start, stop, line, column int) IToken {
fmt.Println("Token factory creating: " + text)
var t = NewCommonToken(source, ttype, channel, start, stop)
t.line = line
t.column = column
if text != "" {
t.setText(text)
t.SetText(text)
} else if this.copyText && source.charStream != nil {
t.setText(source.charStream.GetTextFromInterval(NewInterval(start, stop)))
t.SetText(source.charStream.GetTextFromInterval(NewInterval(start, stop)))
}
return t.Token
}
func (this *CommonTokenFactory) createThin(ttype int, text string) *Token {
func (this *CommonTokenFactory) createThin(ttype int, text string) IToken {
fmt.Println("Token factory creating: " + text)
var t = NewCommonToken(nil, ttype, TokenDefaultChannel, -1, -1)
t.setText(text)
t.SetText(text)
return t.Token
}

View File

@ -43,7 +43,7 @@ func (ts *CommonTokenStream) adjustSeekIndex(i int) int {
return ts.nextTokenOnChannel(i, ts.channel)
}
func (ts *CommonTokenStream) LB(k int) *Token {
func (ts *CommonTokenStream) LB(k int) IToken {
if k == 0 || ts.index-k < 0 {
return nil
}
@ -61,7 +61,7 @@ func (ts *CommonTokenStream) LB(k int) *Token {
return ts.tokens[i]
}
func (ts *CommonTokenStream) LT(k int) *Token {
func (ts *CommonTokenStream) LT(k int) IToken {
ts.lazyInit()
if k == 0 {
return nil
@ -88,10 +88,10 @@ func (ts *CommonTokenStream) getNumberOfOnChannelTokens() int {
ts.fill()
for i := 0; i < len(ts.tokens); i++ {
var t = ts.tokens[i]
if t.channel == ts.channel {
if t.GetChannel() == ts.channel {
n += 1
}
if t.tokenType == TokenEOF {
if t.GetTokenType() == TokenEOF {
break
}
}

View File

@ -5,7 +5,12 @@ import (
"strconv"
)
// A DFA walker that knows how to dump them to serialized strings.#/
// A DFA walker that knows how to dump them to serialized strings.
type IDFASerializer interface {
}
type DFASerializer struct {
dfa *DFA
@ -66,14 +71,12 @@ func (this *DFASerializer) String() string {
func (this *DFASerializer) getEdgeLabel(i int) string {
if i == 0 {
return "EOF"
} else if this.literalNames != nil || this.symbolicNames != nil {
if this.literalNames[i-1] == "" {
} else if this.literalNames != nil && i - 1 < len(this.literalNames) {
return this.literalNames[i-1]
} else {
} else if this.symbolicNames != nil && i - 1 < len(this.symbolicNames) {
return this.symbolicNames[i-1]
}
} else {
return string(i - 1)
return strconv.Itoa(i-1)
}
}
@ -101,6 +104,8 @@ func (this *DFASerializer) GetStateString(s *DFAState) string {
}
}
type LexerDFASerializer struct {
*DFASerializer
}
@ -117,3 +122,35 @@ func NewLexerDFASerializer(dfa *DFA) *LexerDFASerializer {
func (this *LexerDFASerializer) getEdgeLabel(i int) string {
return "'" + string(i) + "'"
}
func (this *LexerDFASerializer) String() string {
if this.dfa.s0 == nil {
return ""
}
var buf = ""
var states = this.dfa.sortedStates()
for i := 0; i < len(states); i++ {
var s = states[i]
if s.edges != nil {
var n = len(s.edges)
for j := 0; j < n; j++ {
var t = s.edges[j]
if t != nil && t.stateNumber != 0x7FFFFFFF {
buf += this.GetStateString(s)
buf += "-"
buf += this.getEdgeLabel(j)
buf += "->"
buf += this.GetStateString(t)
buf += "\n"
}
}
}
}
if len(buf) == 0 {
return ""
}
return buf
}

View File

@ -132,6 +132,7 @@ func (this *DFAState) GetAltSet() *Set {
// {@link ParserATNSimulator//addDFAState} we need to know if any other state
// exists that has this exact set of ATN configurations. The
// {@link //stateNumber} is irrelevant.</p>
func (this *DFAState) equals(other interface{}) bool {
if this == other {
@ -151,7 +152,7 @@ func (this *DFAState) hashString() string {
var s string
if (this.isAcceptState) {
if (this.predicates == nil) {
if (this.predicates != nil) {
s = "=>" + fmt.Sprint(this.predicates)
} else {
s = "=>" + fmt.Sprint(this.prediction)

View File

@ -24,15 +24,19 @@ func NewErrorListener() *DefaultErrorListener {
}
func (this *DefaultErrorListener) SyntaxError(recognizer IRecognizer, offendingSymbol interface{}, line, column int, msg string, e IRecognitionException) {
fmt.Println("SyntaxError!")
}
func (this *DefaultErrorListener) ReportAmbiguity(recognizer IParser, dfa *DFA, startIndex, stopIndex int, exact bool, ambigAlts *BitSet, configs *ATNConfigSet) {
fmt.Println("ReportAmbiguity!")
}
func (this *DefaultErrorListener) ReportAttemptingFullContext(recognizer IParser, dfa *DFA, startIndex, stopIndex int, conflictingAlts *BitSet, configs *ATNConfigSet) {
fmt.Println("ReportAttemptingFullContext!")
}
func (this *DefaultErrorListener) ReportContextSensitivity(recognizer IParser, dfa *DFA, startIndex, stopIndex, prediction int, configs *ATNConfigSet) {
fmt.Println("ReportContextSensitivity!")
}
type ConsoleErrorListener struct {

View File

@ -9,7 +9,7 @@ import (
type IErrorStrategy interface {
reset(IParser)
RecoverInline(IParser) *Token
RecoverInline(IParser) IToken
Recover(IParser, IRecognitionException)
Sync(IParser)
inErrorRecoveryMode(IParser) bool
@ -208,20 +208,35 @@ func (this *DefaultErrorStrategy) Sync(recognizer IParser) {
if this.inErrorRecoveryMode(recognizer) {
return
}
fmt.Println("STATE" + strconv.Itoa(recognizer.GetState()))
var s = recognizer.GetInterpreter().atn.states[recognizer.GetState()]
var la = recognizer.GetTokenStream().LA(1)
fmt.Println("LA" + strconv.Itoa(la))
// try cheaper subset first might get lucky. seems to shave a wee bit off
if la == TokenEOF || recognizer.GetATN().nextTokens(s, nil).contains(la) {
fmt.Println("OK1")
return
}
// Return but don't end recovery. only do that upon valid token Match
if recognizer.isExpectedToken(la) {
fmt.Println("OK2")
return
}
fmt.Println("LA" + strconv.Itoa(la))
fmt.Println(recognizer.GetATN().nextTokens(s, nil))
switch s.GetStateType() {
case ATNStateBLOCK_START:
fallthrough
case ATNStateSTAR_BLOCK_START:
fallthrough
case ATNStatePLUS_BLOCK_START:
fallthrough
case ATNStateSTAR_LOOP_ENTRY:
// Report error and recover if possible
if this.singleTokenDeletion(recognizer) != nil {
@ -229,15 +244,14 @@ func (this *DefaultErrorStrategy) Sync(recognizer IParser) {
} else {
panic(NewInputMisMatchException(recognizer))
}
break
case ATNStatePLUS_LOOP_BACK:
fallthrough
case ATNStateSTAR_LOOP_BACK:
this.ReportUnwantedToken(recognizer)
var expecting = NewIntervalSet()
expecting.addSet(recognizer.getExpectedTokens())
var whatFollowsLoopIterationOrRule = expecting.addSet(this.getErrorRecoverySet(recognizer))
this.consumeUntil(recognizer, whatFollowsLoopIterationOrRule)
break
default:
// do nothing if we can't identify the exact kind of ATN state
}
@ -255,7 +269,7 @@ func (this *DefaultErrorStrategy) ReportNoViableAlternative(recognizer IParser,
var tokens = recognizer.GetTokenStream()
var input string
if tokens != nil {
if e.startToken.tokenType == TokenEOF {
if e.startToken.GetTokenType() == TokenEOF {
input = "<EOF>"
} else {
input = tokens.GetTextFromTokens(e.startToken, e.offendingToken)
@ -279,6 +293,7 @@ func (this *DefaultErrorStrategy) ReportNoViableAlternative(recognizer IParser,
func (this *DefaultErrorStrategy) ReportInputMisMatch(recognizer IParser, e *InputMisMatchException) {
var msg = "misMatched input " + this.GetTokenErrorDisplay(e.offendingToken) +
" expecting " + e.getExpectedTokens().StringVerbose(recognizer.GetLiteralNames(), recognizer.GetSymbolicNames(), false)
panic(msg)
recognizer.NotifyErrorListeners(msg, e.offendingToken, e)
}
@ -324,6 +339,7 @@ func (this *DefaultErrorStrategy) ReportUnwantedToken(recognizer IParser) {
var expecting = this.getExpectedTokens(recognizer)
var msg = "extraneous input " + tokenName + " expecting " +
expecting.StringVerbose(recognizer.GetLiteralNames(), recognizer.GetSymbolicNames(), false)
panic(msg)
recognizer.NotifyErrorListeners(msg, t, nil)
}
@ -404,7 +420,7 @@ func (this *DefaultErrorStrategy) ReportMissingToken(recognizer IParser) {
// is in the set of tokens that can follow the {@code ')'} token reference
// in rule {@code atom}. It can assume that you forgot the {@code ')'}.
//
func (this *DefaultErrorStrategy) RecoverInline(recognizer IParser) *Token {
func (this *DefaultErrorStrategy) RecoverInline(recognizer IParser) IToken {
// SINGLE TOKEN DELETION
var MatchedSymbol = this.singleTokenDeletion(recognizer)
if MatchedSymbol != nil {
@ -473,7 +489,7 @@ func (this *DefaultErrorStrategy) singleTokenInsertion(recognizer IParser) bool
// deletion successfully recovers from the misMatched input, otherwise
// {@code nil}
//
func (this *DefaultErrorStrategy) singleTokenDeletion(recognizer IParser) *Token {
func (this *DefaultErrorStrategy) singleTokenDeletion(recognizer IParser) IToken {
var nextTokenType = recognizer.GetTokenStream().LA(2)
var expecting = this.getExpectedTokens(recognizer)
if expecting.contains(nextTokenType) {
@ -511,7 +527,7 @@ func (this *DefaultErrorStrategy) singleTokenDeletion(recognizer IParser) *Token
// If you change what tokens must be created by the lexer,
// override this method to create the appropriate tokens.
//
func (this *DefaultErrorStrategy) getMissingSymbol(recognizer IParser) *Token {
func (this *DefaultErrorStrategy) getMissingSymbol(recognizer IParser) IToken {
var currentSymbol = recognizer.getCurrentToken()
var expecting = this.getExpectedTokens(recognizer)
var expectedTokenType = expecting.first()
@ -523,12 +539,12 @@ func (this *DefaultErrorStrategy) getMissingSymbol(recognizer IParser) *Token {
}
var current = currentSymbol
var lookback = recognizer.GetTokenStream().LT(-1)
if current.tokenType == TokenEOF && lookback != nil {
if current.GetTokenType() == TokenEOF && lookback != nil {
current = lookback
}
tf := recognizer.GetTokenFactory()
return tf.Create(current.source, expectedTokenType, tokenText, TokenDefaultChannel, -1, -1, current.line, current.column)
return tf.Create( current.GetSource(), expectedTokenType, tokenText, TokenDefaultChannel, -1, -1, current.GetLine(), current.GetColumn())
}
func (this *DefaultErrorStrategy) getExpectedTokens(recognizer IParser) *IntervalSet {
@ -543,16 +559,16 @@ func (this *DefaultErrorStrategy) getExpectedTokens(recognizer IParser) *Interva
// your token objects because you don't have to go modify your lexer
// so that it creates a NewJava type.
//
func (this *DefaultErrorStrategy) GetTokenErrorDisplay(t *Token) string {
func (this *DefaultErrorStrategy) GetTokenErrorDisplay(t IToken) string {
if t == nil {
return "<no token>"
}
var s = t.text()
var s = t.GetText()
if s == "" {
if t.tokenType == TokenEOF {
if t.GetTokenType() == TokenEOF {
s = "<EOF>"
} else {
s = "<" + strconv.Itoa(t.tokenType) + ">"
s = "<" + strconv.Itoa(t.GetTokenType()) + ">"
}
}
return this.escapeWSAndQuote(s)

View File

@ -9,7 +9,7 @@ import ()
// and what kind of problem occurred.
type IRecognitionException interface {
GetOffendingToken() *Token
GetOffendingToken() IToken
GetMessage() string
GetInputStream() IntStream
}
@ -17,7 +17,7 @@ type IRecognitionException interface {
type RecognitionException struct {
message string
recognizer IRecognizer
offendingToken *Token
offendingToken IToken
offendingState int
ctx IRuleContext
input IntStream
@ -62,7 +62,7 @@ func (this *RecognitionException) GetMessage() string {
return this.message
}
func (this *RecognitionException) GetOffendingToken() *Token {
func (this *RecognitionException) GetOffendingToken() IToken {
return this.offendingToken
}
@ -124,8 +124,8 @@ func (this *LexerNoViableAltException) String() string {
type NoViableAltException struct {
*RecognitionException
startToken *Token
offendingToken *Token
startToken IToken
offendingToken IToken
ctx IParserRuleContext
deadEndConfigs *ATNConfigSet
}
@ -135,7 +135,7 @@ type NoViableAltException struct {
// of the offending input and also knows where the parser was
// in the various paths when the error. Reported by ReportNoViableAlternative()
//
func NewNoViableAltException(recognizer IParser, input TokenStream, startToken *Token, offendingToken *Token, deadEndConfigs *ATNConfigSet, ctx IParserRuleContext) *NoViableAltException {
func NewNoViableAltException(recognizer IParser, input TokenStream, startToken IToken, offendingToken IToken, deadEndConfigs *ATNConfigSet, ctx IParserRuleContext) *NoViableAltException {
if ctx == nil {
ctx = recognizer.GetParserRuleContext()

View File

@ -32,6 +32,7 @@ func (is *InputStream) Consume() {
}
func (is *InputStream) LA(offset int) int {
if offset == 0 {
return 0 // nil
}
@ -39,9 +40,11 @@ func (is *InputStream) LA(offset int) int {
offset += 1 // e.g., translate LA(-1) to use offset=0
}
var pos = is.index + offset - 1
if pos < 0 || pos >= is.size { // invalid
return TokenEOF
}
return int(is.data[pos])
}

View File

@ -3,6 +3,7 @@ package antlr4
import (
"strconv"
"strings"
"fmt"
)
type Interval struct {
@ -67,6 +68,7 @@ func (i *IntervalSet) addRange(l, h int) {
}
func (is *IntervalSet) addInterval(v *Interval) {
fmt.Println("addInterval" + v.String())
if is.intervals == nil {
is.intervals = make([]*Interval, 0)
is.intervals = append(is.intervals, v)
@ -94,7 +96,9 @@ func (is *IntervalSet) addInterval(v *Interval) {
}
func (i *IntervalSet) addSet(other *IntervalSet) *IntervalSet {
fmt.Println("addSet")
if other.intervals != nil {
fmt.Println(len(other.intervals))
for k := 0; k < len(other.intervals); k++ {
var i2 = other.intervals[k]
i.addInterval(NewInterval(i2.start, i2.stop))
@ -249,6 +253,7 @@ func (is *IntervalSet) toCharString() string {
}
func (is *IntervalSet) toIndexString() string {
var names = make([]string, 0)
for i := 0; i < len(is.intervals); i++ {
var v = is.intervals[i]
@ -256,10 +261,10 @@ func (is *IntervalSet) toIndexString() string {
if v.start == TokenEOF {
names = append(names, "<EOF>")
} else {
names = append(names, string(v.start))
names = append(names, strconv.Itoa(v.start))
}
} else {
names = append(names, string(v.start)+".."+string(v.stop-1))
names = append(names, strconv.Itoa(v.start)+".."+strconv.Itoa(v.stop-1))
}
}
if len(names) > 1 {

View File

@ -1,6 +1,8 @@
package antlr4
import ()
import (
"fmt"
)
type LL1Analyzer struct {
atn *ATN
@ -71,9 +73,18 @@ func (la *LL1Analyzer) LOOK(s, stopState IATNState, ctx IRuleContext) *IntervalS
var seeThruPreds = true // ignore preds get all lookahead
var lookContext IPredictionContext
if ctx != nil {
predictionContextFromRuleContext(s.GetATN(), ctx)
lookContext = predictionContextFromRuleContext(s.GetATN(), ctx)
}
fmt.Println("DEBUG 5")
// fmt.Println("DEBUG" + lookContext.String())
fmt.Println(s)
fmt.Println(stopState)
fmt.Println(lookContext)
fmt.Println(r)
fmt.Println(seeThruPreds)
fmt.Println("=====")
la._LOOK(s, stopState, lookContext, r, NewSet(nil, nil), NewBitSet(), seeThruPreds, true)
fmt.Println(r)
return r
}
@ -107,15 +118,36 @@ func (la *LL1Analyzer) LOOK(s, stopState IATNState, ctx IRuleContext) *IntervalS
// outermost context is reached. This parameter has no effect if {@code ctx}
// is {@code nil}.
func (la *LL1Analyzer) __LOOK(s, stopState IATNState, ctx IPredictionContext, look *IntervalSet, lookBusy *Set, calledRuleStack *BitSet, seeThruPreds, addEOF bool, i int){
returnState := la.atn.states[ctx.getReturnState(i)]
removed := calledRuleStack.contains(returnState.GetRuleIndex())
defer func() {
if removed {
calledRuleStack.add(returnState.GetRuleIndex())
}
}()
calledRuleStack.remove(returnState.GetRuleIndex())
la._LOOK(returnState, stopState, ctx.GetParent(i), look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
}
func (la *LL1Analyzer) _LOOK(s, stopState IATNState, ctx IPredictionContext, look *IntervalSet, lookBusy *Set, calledRuleStack *BitSet, seeThruPreds, addEOF bool) {
c := NewATNConfig6(s, 0, ctx)
if lookBusy.add(c) == nil {
if lookBusy.contains(c) {
return
}
lookBusy.add(c)
if s == stopState {
fmt.Println("DEBUG 6")
if ctx == nil {
look.addOne(TokenEpsilon)
return
@ -137,24 +169,13 @@ func (la *LL1Analyzer) _LOOK(s, stopState IATNState, ctx IPredictionContext, loo
}
if ctx != PredictionContextEMPTY {
fmt.Println("DEBUG 7")
// run thru all possible stack tops in ctx
for i := 0; i < ctx.length(); i++ {
returnState := la.atn.states[ctx.getReturnState(i)]
// System.out.println("popping back to "+retState)
removed := calledRuleStack.contains(returnState.GetRuleIndex())
// TODO this is incorrect
defer func() {
if removed {
calledRuleStack.add(returnState.GetRuleIndex())
}
}()
calledRuleStack.clear(returnState.GetRuleIndex())
la._LOOK(returnState, stopState, ctx.GetParent(i), look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
la.__LOOK(returnState, stopState, ctx.GetParent(i), look, lookBusy, calledRuleStack, seeThruPreds, addEOF, i)
}
return
@ -167,6 +188,7 @@ func (la *LL1Analyzer) _LOOK(s, stopState IATNState, ctx IPredictionContext, loo
t := s.GetTransitions()[i]
if t1, ok := t.(*RuleTransition); ok {
fmt.Println("DEBUG 8")
if calledRuleStack.contains(t1.getTarget().GetRuleIndex()) {
continue
@ -174,24 +196,34 @@ func (la *LL1Analyzer) _LOOK(s, stopState IATNState, ctx IPredictionContext, loo
newContext := SingletonPredictionContextCreate(ctx, t1.followState.GetStateNumber())
defer func() {
calledRuleStack.remove(t1.getTarget().GetRuleIndex())
}()
la.___LOOK(stopState, newContext, look, lookBusy, calledRuleStack, seeThruPreds, addEOF, t1)
calledRuleStack.add(t1.getTarget().GetRuleIndex())
la._LOOK(t.getTarget(), stopState, newContext, look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
} else if t2, ok := t.(*AbstractPredicateTransition); ok {
fmt.Println(look)
//
// defer func() {
// calledRuleStack.remove(t1.getTarget().GetRuleIndex())
// }()
//
// calledRuleStack.add(t1.getTarget().GetRuleIndex())
// la._LOOK(t1.getTarget(), stopState, newContext, look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
} else if t2, ok := t.(IAbstractPredicateTransition); ok {
fmt.Println("DEBUG 9")
if seeThruPreds {
la._LOOK(t2.getTarget(), stopState, ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
} else {
look.addOne(LL1AnalyzerHIT_PRED)
}
} else if t.getIsEpsilon() {
fmt.Println("DEBUG 10")
la._LOOK(t.getTarget(), stopState, ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
} else if _, ok := t.(*WildcardTransition); ok {
fmt.Println("DEBUG 11")
look.addRange(TokenMinUserTokenType, la.atn.maxTokenType)
} else {
fmt.Println("DEBUG 12")
set := t.getLabel()
fmt.Println(set)
if set != nil {
if _, ok := t.(*NotSetTransition); ok {
set = set.complement(TokenMinUserTokenType, la.atn.maxTokenType)
@ -201,3 +233,17 @@ func (la *LL1Analyzer) _LOOK(s, stopState IATNState, ctx IPredictionContext, loo
}
}
}
func (la *LL1Analyzer) ___LOOK(stopState IATNState, ctx IPredictionContext, look *IntervalSet, lookBusy *Set, calledRuleStack *BitSet, seeThruPreds, addEOF bool, t1 *RuleTransition) {
newContext := SingletonPredictionContextCreate(ctx, t1.followState.GetStateNumber())
defer func() {
calledRuleStack.remove(t1.getTarget().GetRuleIndex())
}()
calledRuleStack.add(t1.getTarget().GetRuleIndex())
la._LOOK(t1.getTarget(), stopState, newContext, look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
}

View File

@ -30,7 +30,7 @@ type Lexer struct {
_input CharStream
_factory TokenFactory
_tokenFactorySourcePair *TokenSourceCharStreamPair
_token *Token
_token IToken
_tokenStartCharIndex int
_tokenStartLine int
_tokenStartColumn int
@ -166,12 +166,11 @@ func (l *Lexer) safeMatch() (ret int) {
}
// Return a token from l source i.e., Match a token on the char stream.
func (l *Lexer) nextToken() *Token {
func (l *Lexer) nextToken() IToken {
if l._input == nil {
panic("nextToken requires a non-nil input stream.")
}
// do this when done consuming
var tokenStartMarker = l._input.Mark()
// previously in finally block
@ -244,7 +243,7 @@ func (l *Lexer) mode(m int) {
}
func (l *Lexer) pushMode(m int) {
if LexerATNSimulatordebug {
if LexerATNSimulatorDebug {
fmt.Println("pushMode " + strconv.Itoa(m))
}
l._modeStack.Push(l._mode)
@ -255,7 +254,7 @@ func (l *Lexer) popMode() int {
if len(l._modeStack) == 0 {
panic("Empty Stack")
}
if LexerATNSimulatordebug {
if LexerATNSimulatorDebug {
fmt.Println("popMode back to " + fmt.Sprint(l._modeStack[0:len(l._modeStack)-1]))
}
i, _ := l._modeStack.Pop()
@ -280,7 +279,7 @@ func (l *Lexer) setInputStream(input CharStream) {
// and GetToken (to push tokens into a list and pull from that list
// rather than a single variable as l implementation does).
// /
func (l *Lexer) emitToken(token *Token) {
func (l *Lexer) emitToken(token IToken) {
l._token = token
}
@ -290,13 +289,13 @@ func (l *Lexer) emitToken(token *Token) {
// use that to set the token's text. Override l method to emit
// custom Token objects or provide a Newfactory.
// /
func (l *Lexer) emit() *Token {
func (l *Lexer) emit() IToken {
var t = l._factory.Create(l._tokenFactorySourcePair, l._type, l._text, l._channel, l._tokenStartCharIndex, l.getCharIndex()-1, l._tokenStartLine, l._tokenStartColumn)
l.emitToken(t)
return t
}
func (l *Lexer) emitEOF() *Token {
func (l *Lexer) emitEOF() IToken {
cpos := l.getCharPositionInLine()
lpos := l.getLine()
var eof = l._factory.Create(l._tokenFactorySourcePair, TokenEOF, "", TokenDefaultChannel, l._input.Index(), l._input.Index()-1, lpos, cpos)
@ -346,11 +345,13 @@ func (this *Lexer) GetATN() *ATN {
// Return a list of all Token objects in input char stream.
// Forces load of all tokens. Does not include EOF token.
// /
func (l *Lexer) getAllTokens() []*Token {
var tokens = make([]*Token, 0)
func (l *Lexer) getAllTokens() []IToken {
fmt.Println("getAllTokens")
var tokens = make([]IToken, 0)
var t = l.nextToken()
for t.tokenType != TokenEOF {
for t.GetTokenType() != TokenEOF {
tokens = append(tokens, t)
fmt.Println("getAllTokens")
t = l.nextToken()
}
return tokens

View File

@ -88,8 +88,8 @@ func NewLexerATNSimulator(recog ILexer, atn *ATN, decisionToDFA []*DFA, sharedCo
return this
}
var LexerATNSimulatordebug = false
var LexerATNSimulatordfa_debug = false
var LexerATNSimulatorDebug = true
var LexerATNSimulatorDFADebug = false
var LexerATNSimulatorMIN_DFA_EDGE = 0
var LexerATNSimulatorMAX_DFA_EDGE = 127 // forces unicode to stay in ATN
@ -105,6 +105,9 @@ func (this *LexerATNSimulator) copyState(simulator *LexerATNSimulator) {
func (this *LexerATNSimulator) Match(input CharStream, mode int) int {
fmt.Println("Match")
this.Match_calls += 1
this.mode = mode
var mark = input.Mark()
@ -134,7 +137,7 @@ func (this *LexerATNSimulator) reset() {
func (this *LexerATNSimulator) MatchATN(input CharStream) int {
var startState = this.atn.modeToStartState[this.mode]
if LexerATNSimulatordebug {
if LexerATNSimulatorDebug {
fmt.Println("MatchATN mode " + strconv.Itoa(this.mode) + " start: " + startState.String())
}
var old_mode = this.mode
@ -150,14 +153,23 @@ func (this *LexerATNSimulator) MatchATN(input CharStream) int {
var predict = this.execATN(input, next)
if LexerATNSimulatordebug {
if LexerATNSimulatorDebug {
fmt.Println("DFA after MatchATN: " + this.decisionToDFA[old_mode].toLexerString())
}
return predict
}
var countA = 0
func (this *LexerATNSimulator) execATN(input CharStream, ds0 *DFAState) int {
if LexerATNSimulatordebug {
countA += 1
if (countA == 2) {
panic("GAH")
}
if LexerATNSimulatorDebug {
fmt.Println("start state closure=" + ds0.configs.String())
}
if ds0.isAcceptState {
@ -168,7 +180,7 @@ func (this *LexerATNSimulator) execATN(input CharStream, ds0 *DFAState) int {
var s = ds0 // s is current/from DFA state
for true { // while more work
if LexerATNSimulatordebug {
if LexerATNSimulatorDebug {
fmt.Println("execATN loop starting closure: " + s.configs.String())
}
@ -215,6 +227,8 @@ func (this *LexerATNSimulator) execATN(input CharStream, ds0 *DFAState) int {
t = input.LA(1)
s = target // flip current DFA target becomes Newsrc/from state
}
fmt.Println("OUT")
return this.failOrAccept(this.prevAccept, input, s.configs, t)
}
@ -236,7 +250,7 @@ func (this *LexerATNSimulator) getExistingTargetState(s *DFAState, t int) *DFASt
if target == nil {
target = nil
}
if LexerATNSimulatordebug && target != nil {
if LexerATNSimulatorDebug && target != nil {
fmt.Println("reuse state " + strconv.Itoa(s.stateNumber) + " edge to " + strconv.Itoa(target.stateNumber))
}
return target
@ -274,8 +288,9 @@ func (this *LexerATNSimulator) computeTargetState(input CharStream, s *DFAState,
func (this *LexerATNSimulator) failOrAccept(prevAccept *SimState, input CharStream, reach *ATNConfigSet, t int) int {
if this.prevAccept.dfaState != nil {
var lexerActionExecutor = prevAccept.dfaState.lexerActionExecutor
this.accept(input, lexerActionExecutor, this.startIndex,
prevAccept.index, prevAccept.line, prevAccept.column)
this.accept(input, lexerActionExecutor, this.startIndex, prevAccept.index, prevAccept.line, prevAccept.column)
fmt.Println(prevAccept.dfaState.prediction)
return prevAccept.dfaState.prediction
} else {
// if no accept and EOF is first char, return EOF
@ -299,7 +314,7 @@ func (this *LexerATNSimulator) getReachableConfigSet(input CharStream, closure *
if currentAltReachedAcceptState && cfg.(*LexerATNConfig).passedThroughNonGreedyDecision {
continue
}
if LexerATNSimulatordebug {
if LexerATNSimulatorDebug {
fmt.Printf("testing %s at %s\n", this.GetTokenName(t), cfg.String()) // this.recog, true))
}
for j := 0; j < len(cfg.GetState().GetTransitions()); j++ {
@ -324,8 +339,8 @@ func (this *LexerATNSimulator) getReachableConfigSet(input CharStream, closure *
}
func (this *LexerATNSimulator) accept(input CharStream, lexerActionExecutor *LexerActionExecutor, startIndex, index, line, charPos int) {
if LexerATNSimulatordebug {
fmt.Println("ACTION %s\n", lexerActionExecutor)
if LexerATNSimulatorDebug {
fmt.Printf("ACTION %s\n", lexerActionExecutor)
}
// seek to after last char in token
input.Seek(index)
@ -346,12 +361,17 @@ func (this *LexerATNSimulator) getReachableTarget(trans ITransition, t int) IATN
func (this *LexerATNSimulator) computeStartState(input CharStream, p IATNState) *OrderedATNConfigSet {
fmt.Println("DEBUG" + strconv.Itoa(len(p.GetTransitions())))
var configs = NewOrderedATNConfigSet()
for i := 0; i < len(p.GetTransitions()); i++ {
var target = p.GetTransitions()[i].getTarget()
var cfg = NewLexerATNConfig6(target, i+1, PredictionContextEMPTY)
this.closure(input, cfg, configs.ATNConfigSet, false, false, false)
}
fmt.Println("DEBUG" + configs.String())
return configs
}
@ -366,19 +386,21 @@ func (this *LexerATNSimulator) computeStartState(input CharStream, p IATNState)
func (this *LexerATNSimulator) closure(input CharStream, config *LexerATNConfig, configs *ATNConfigSet,
currentAltReachedAcceptState, speculative, treatEofAsEpsilon bool) bool {
if LexerATNSimulatordebug {
if LexerATNSimulatorDebug {
fmt.Println("closure(" + config.String() + ")") // config.String(this.recog, true) + ")")
}
_, ok := config.state.(*RuleStopState)
if ok {
if LexerATNSimulatordebug {
if LexerATNSimulatorDebug {
if this.recog != nil {
fmt.Println("closure at %s rule stop %s\n", this.recog.GetRuleNames()[config.state.GetRuleIndex()], config)
fmt.Printf("closure at %s rule stop %s\n", this.recog.GetRuleNames()[config.state.GetRuleIndex()], config)
} else {
fmt.Println("closure at rule stop %s\n", config)
fmt.Printf("closure at rule stop %s\n", config)
}
}
if config.context == nil || config.context.hasEmptyPath() {
if config.context == nil || config.context.isEmpty() {
configs.add(config, nil)
@ -452,7 +474,7 @@ func (this *LexerATNSimulator) getEpsilonTarget(input CharStream, config *LexerA
pt := trans.(*PredicateTransition)
if LexerATNSimulatordebug {
if LexerATNSimulatorDebug {
fmt.Println("EVAL rule " + strconv.Itoa(trans.(*PredicateTransition).ruleIndex) + ":" + strconv.Itoa(pt.predIndex))
}
configs.hasSemanticContext = true
@ -571,7 +593,7 @@ func (this *LexerATNSimulator) addDFAEdge(from_ *DFAState, tk int, to *DFAState,
// Only track edges within the DFA bounds
return to
}
if LexerATNSimulatordebug {
if LexerATNSimulatorDebug {
fmt.Println("EDGE " + from_.String() + " -> " + to.String() + " upon " + strconv.Itoa(tk))
}
if from_.edges == nil {
@ -643,6 +665,7 @@ func (this *LexerATNSimulator) consume(input CharStream) {
}
func (this *LexerATNSimulator) GetTokenName(tt int) string {
fmt.Println(tt)
if tt == -1 {
return "EOF"
} else {

View File

@ -1,4 +1,5 @@
package antlr4
import "fmt"
type IParser interface {
IRecognizer
@ -8,13 +9,13 @@ type IParser interface {
GetTokenStream() TokenStream
GetTokenFactory() TokenFactory
GetParserRuleContext() IParserRuleContext
Consume() *Token
Consume() IToken
GetParseListeners() []ParseTreeListener
GetInputStream() IntStream
getCurrentToken() *Token
getCurrentToken() IToken
getExpectedTokens() *IntervalSet
NotifyErrorListeners(msg string, offendingToken *Token, err IRecognitionException)
NotifyErrorListeners(msg string, offendingToken IToken, err IRecognitionException)
isExpectedToken(symbol int) bool
getPrecedence() int
getRuleInvocationStack(IParserRuleContext) []string
@ -24,16 +25,16 @@ type Parser struct {
*Recognizer
Interpreter *ParserATNSimulator
BuildParseTrees bool
_input TokenStream
_errHandler IErrorStrategy
_precedenceStack IntStack
_ctx IParserRuleContext
buildParseTrees bool
_tracer *TraceListener
_parseListeners []ParseTreeListener
_SyntaxErrors int
}
// p.is all the parsing support code essentially most of it is error
@ -42,7 +43,6 @@ func NewParser(input TokenStream) *Parser {
p := new(Parser)
p.Recognizer = NewRecognizer()
// The input stream.
@ -57,7 +57,7 @@ func NewParser(input TokenStream) *Parser {
p._ctx = nil
// Specifies whether or not the parser should construct a parse tree during
// the parsing process. The default value is {@code true}.
p.buildParseTrees = true
p.BuildParseTrees = true
// When {@link //setTrace}{@code (true)} is called, a reference to the
// {@link TraceListener} is stored here so it can be easily removed in a
// later call to {@link //setTrace}{@code (false)}. The listener itself is
@ -124,14 +124,17 @@ func (p *Parser) GetParseListeners() []ParseTreeListener {
// {@code ttype} and the error strategy could not recover from the
// misMatched symbol
func (p *Parser) Match(ttype int) *Token {
func (p *Parser) Match(ttype int) IToken {
var t = p.getCurrentToken()
if t.tokenType == ttype {
fmt.Println("TOKEN IS " + t.GetText())
if t.GetTokenType() == ttype {
p._errHandler.ReportMatch(p)
p.Consume()
} else {
t = p._errHandler.RecoverInline(p)
if p.buildParseTrees && t.tokenIndex == -1 {
if p.BuildParseTrees && t.GetTokenIndex() == -1 {
// we must have conjured up a Newtoken during single token
// insertion
// if it's not the current symbol
@ -157,14 +160,14 @@ func (p *Parser) Match(ttype int) *Token {
// a wildcard and the error strategy could not recover from the misMatched
// symbol
func (p *Parser) MatchWildcard() *Token {
func (p *Parser) MatchWildcard() IToken {
var t = p.getCurrentToken()
if t.tokenType > 0 {
if t.GetTokenType() > 0 {
p._errHandler.ReportMatch(p)
p.Consume()
} else {
t = p._errHandler.RecoverInline(p)
if p.buildParseTrees && t.tokenIndex == -1 {
if p.BuildParseTrees && t.GetTokenIndex() == -1 {
// we must have conjured up a Newtoken during single token
// insertion
// if it's not the current symbol
@ -232,24 +235,24 @@ func (p *Parser) addParseListener(listener ParseTreeListener) {
//
func (p *Parser) removeParseListener(listener ParseTreeListener) {
if (p._parseListeners != nil) {
if p._parseListeners != nil {
idx := -1
for i,v := range p._parseListeners {
for i, v := range p._parseListeners {
if v == listener {
idx = i
break;
break
}
}
if (idx == -1){
if idx == -1 {
return
}
// remove the listener from the slice
p._parseListeners = append( p._parseListeners[0:idx], p._parseListeners[idx+1:]... )
p._parseListeners = append(p._parseListeners[0:idx], p._parseListeners[idx+1:]...)
if (len(p._parseListeners) == 0) {
if len(p._parseListeners) == 0 {
p._parseListeners = nil
}
}
@ -385,28 +388,28 @@ func (p *Parser) setTokenStream(input TokenStream) {
// Match needs to return the current input symbol, which gets put
// into the label for the associated token ref e.g., x=ID.
//
func (p *Parser) getCurrentToken() *Token {
func (p *Parser) getCurrentToken() IToken {
return p._input.LT(1)
}
func (p *Parser) NotifyErrorListeners(msg string, offendingToken *Token, err IRecognitionException) {
func (p *Parser) NotifyErrorListeners(msg string, offendingToken IToken, err IRecognitionException) {
if offendingToken == nil {
offendingToken = p.getCurrentToken()
}
p._SyntaxErrors += 1
var line = offendingToken.line
var column = offendingToken.column
var line = offendingToken.GetLine()
var column = offendingToken.GetColumn()
listener := p.getErrorListenerDispatch()
listener.SyntaxError(p, offendingToken, line, column, msg, err)
}
func (p *Parser) Consume() *Token {
func (p *Parser) Consume() IToken {
var o = p.getCurrentToken()
if o.tokenType != TokenEOF {
if o.GetTokenType() != TokenEOF {
p.GetInputStream().Consume()
}
var hasListener = p._parseListeners != nil && len(p._parseListeners) > 0
if p.buildParseTrees || hasListener {
if p.BuildParseTrees || hasListener {
if p._errHandler.inErrorRecoveryMode(p) {
var node = p._ctx.addErrorNode(o)
if p._parseListeners != nil {
@ -437,10 +440,10 @@ func (p *Parser) addContextToParseTree() {
}
func (p *Parser) EnterRule(localctx IParserRuleContext, state, ruleIndex int) {
p.state = state
p.SetState(state)
p._ctx = localctx
p._ctx.setStart(p._input.LT(1))
if p.buildParseTrees {
if p.BuildParseTrees {
p.addContextToParseTree()
}
if p._parseListeners != nil {
@ -454,8 +457,8 @@ func (p *Parser) ExitRule() {
if p._parseListeners != nil {
p.TriggerExitRuleEvent()
}
p.state = p._ctx.getInvokingState()
if (p._ctx.GetParent() != nil){
p.SetState(p._ctx.getInvokingState())
if p._ctx.GetParent() != nil {
p._ctx = p._ctx.GetParent().(IParserRuleContext)
} else {
p._ctx = nil
@ -465,7 +468,7 @@ func (p *Parser) ExitRule() {
func (p *Parser) EnterOuterAlt(localctx IParserRuleContext, altNum int) {
// if we have Newlocalctx, make sure we replace existing ctx
// that is previous child of parse tree
if p.buildParseTrees && p._ctx != localctx {
if p.BuildParseTrees && p._ctx != localctx {
if p._ctx.GetParent() != nil {
p._ctx.GetParent().(IParserRuleContext).removeLastChild()
p._ctx.GetParent().(IParserRuleContext).addChild(localctx)
@ -488,7 +491,7 @@ func (p *Parser) getPrecedence() int {
}
func (p *Parser) EnterRecursionRule(localctx IParserRuleContext, state, ruleIndex, precedence int) {
p.state = state
p.SetState(state)
p._precedenceStack.Push(precedence)
p._ctx = localctx
p._ctx.setStart(p._input.LT(1))
@ -509,7 +512,7 @@ func (p *Parser) PushNewRecursionContext(localctx IParserRuleContext, state, rul
p._ctx = localctx
p._ctx.setStart(previous.getStart())
if p.buildParseTrees {
if p.BuildParseTrees {
p._ctx.addChild(previous)
}
if p._parseListeners != nil {
@ -533,7 +536,7 @@ func (p *Parser) UnrollRecursionContexts(parentCtx IParserRuleContext) {
}
// hook into tree
retCtx.setParent(parentCtx)
if p.buildParseTrees && parentCtx != nil {
if p.BuildParseTrees && parentCtx != nil {
// add return ctx into invoking rule's tree
parentCtx.addChild(retCtx)
}

View File

@ -47,7 +47,7 @@ func NewParserATNSimulator(parser IParser, atn *ATN, decisionToDFA []*DFA, share
return this
}
var ParserATNSimulatorDebug = false
var ParserATNSimulatorDebug = true
var ParserATNSimulatorListATNDecisions = false
var ParserATNSimulatorDFADebug = false
var ParserATNSimulatorRetryDebug = false
@ -57,11 +57,14 @@ func (this *ParserATNSimulator) reset() {
func (this *ParserATNSimulator) AdaptivePredict(input TokenStream, decision int, outerContext IParserRuleContext) int {
fmt.Println("Adaptive preduct")
if ParserATNSimulatorDebug || ParserATNSimulatorListATNDecisions {
fmt.Println("AdaptivePredict decision " + strconv.Itoa(decision) +
" exec LA(1)==" + this.getLookaheadName(input) +
" line " + strconv.Itoa(input.LT(1).line) + ":" +
strconv.Itoa(input.LT(1).column))
" line " + strconv.Itoa(input.LT(1).GetLine()) + ":" +
strconv.Itoa(input.LT(1).GetColumn()))
}
this._input = input
@ -174,7 +177,7 @@ func (this *ParserATNSimulator) execATN(dfa *DFA, s0 *DFAState, input TokenStrea
if ParserATNSimulatorDebug || ParserATNSimulatorListATNDecisions {
fmt.Println("execATN decision " + strconv.Itoa(dfa.decision) +
" exec LA(1)==" + this.getLookaheadName(input) +
" line " + strconv.Itoa(input.LT(1).line) + ":" + strconv.Itoa(input.LT(1).column))
" line " + strconv.Itoa(input.LT(1).GetLine()) + ":" + strconv.Itoa(input.LT(1).GetColumn()))
}
var previousD = s0
@ -1278,18 +1281,22 @@ func (this *ParserATNSimulator) getConflictingAltsOrUniqueAlt(configs *ATNConfig
func (this *ParserATNSimulator) GetTokenName(t int) string {
fmt.Println("Get token name")
if t == TokenEOF {
return "EOF"
}
if this.parser != nil && this.parser.GetLiteralNames() != nil {
if t >= len(this.parser.GetLiteralNames()) {
fmt.Println(strconv.Itoa(t) + " ttype out of range: " + strings.Join(this.parser.GetLiteralNames(), ","))
fmt.Println(this.parser.GetInputStream().(TokenStream).GetAllText())
// fmt.Println(this.parser.GetInputStream().(TokenStream).GetAllText()) // this seems incorrect
} else {
return this.parser.GetLiteralNames()[t] + "<" + strconv.Itoa(t) + ">"
}
}
return "" + strconv.Itoa(t)
return strconv.Itoa(t)
}
func (this *ParserATNSimulator) getLookaheadName(input TokenStream) string {

View File

@ -8,16 +8,16 @@ type IParserRuleContext interface {
IRuleContext
SetException(IRecognitionException)
addTokenNode(token *Token) *TerminalNodeImpl
addErrorNode(badToken *Token) *ErrorNodeImpl
addTokenNode(token IToken) *TerminalNodeImpl
addErrorNode(badToken IToken) *ErrorNodeImpl
EnterRule(listener ParseTreeListener)
ExitRule(listener ParseTreeListener)
setStart(*Token)
getStart() *Token
setStart(IToken)
getStart() IToken
setStop(*Token)
getStop() *Token
setStop(IToken)
getStop() IToken
addChild(child IRuleContext) IRuleContext
removeLastChild()
@ -27,7 +27,7 @@ type ParserRuleContext struct {
*RuleContext
children []ParseTree
start, stop *Token
start, stop IToken
exception IRecognitionException
}
@ -117,7 +117,7 @@ func (prc *ParserRuleContext) removeLastChild() {
}
}
func (prc *ParserRuleContext) addTokenNode(token *Token) *TerminalNodeImpl {
func (prc *ParserRuleContext) addTokenNode(token IToken) *TerminalNodeImpl {
var node = NewTerminalNodeImpl(token)
prc.addTerminalNodeChild(node)
@ -126,7 +126,7 @@ func (prc *ParserRuleContext) addTokenNode(token *Token) *TerminalNodeImpl {
}
func (prc *ParserRuleContext) addErrorNode(badToken *Token) *ErrorNodeImpl {
func (prc *ParserRuleContext) addErrorNode(badToken IToken) *ErrorNodeImpl {
var node = NewErrorNodeImpl(badToken)
prc.addTerminalNodeChild(node)
node.parentCtx = prc
@ -159,19 +159,19 @@ func (prc *ParserRuleContext) getChildOfType(i int, childType reflect.Type) IRul
}
}
func (prc *ParserRuleContext) setStart(t *Token) {
func (prc *ParserRuleContext) setStart(t IToken) {
prc.start = t
}
func (prc *ParserRuleContext) getStart() *Token {
func (prc *ParserRuleContext) getStart() IToken {
return prc.start
}
func (prc *ParserRuleContext) setStop(t *Token) {
func (prc *ParserRuleContext) setStop(t IToken) {
prc.stop = t
}
func (prc *ParserRuleContext) getStop() *Token {
func (prc *ParserRuleContext) getStop() IToken {
return prc.stop
}
@ -180,7 +180,7 @@ func (prc *ParserRuleContext) GetToken(ttype int, i int) TerminalNode {
for j := 0; j < len(prc.children); j++ {
var child = prc.children[j]
if c2, ok := child.(TerminalNode); ok {
if c2.getSymbol().tokenType == ttype {
if c2.getSymbol().GetTokenType() == ttype {
if i == 0 {
return c2
} else {
@ -200,7 +200,7 @@ func (prc *ParserRuleContext) GetTokens(ttype int) []TerminalNode {
for j := 0; j < len(prc.children); j++ {
var child = prc.children[j]
if tchild, ok := child.(TerminalNode); ok {
if tchild.getSymbol().tokenType == ttype {
if tchild.getSymbol().GetTokenType() == ttype {
tokens = append(tokens, tchild)
}
}
@ -242,7 +242,7 @@ func (prc *ParserRuleContext) GetSourceInterval() *Interval {
if prc.start == nil || prc.stop == nil {
return TreeINVALID_INTERVAL
} else {
return NewInterval(prc.start.tokenIndex, prc.stop.tokenIndex)
return NewInterval(prc.start.GetTokenIndex(), prc.stop.GetTokenIndex())
}
}

View File

@ -72,10 +72,6 @@ func (this *PredictionContext) isEmpty() bool {
return false
}
func (this *PredictionContext) hasEmptyPath() bool {
return this.getReturnState(this.length()-1) == PredictionContextEMPTY_RETURN_STATE
}
func (this *PredictionContext) hashString() string {
return this.cachedHashString
}
@ -88,22 +84,6 @@ func calculateEmptyHashString() string {
return ""
}
func (this *PredictionContext) String() string {
panic("Not implemented")
}
func (this *PredictionContext) GetParent(index int) IPredictionContext {
panic("Not implemented")
}
func (this *PredictionContext) length() int {
panic("Not implemented")
}
func (this *PredictionContext) getReturnState(index int) int {
panic("Not implemented")
}
// Used to cache {@link PredictionContext} objects. Its used for the shared
// context cash associated with contexts in DFA states. This cache
// can be used for both lexers and parsers.
@ -191,6 +171,10 @@ func (this *SingletonPredictionContext) getReturnState(index int) int {
return this.returnState
}
func (this *SingletonPredictionContext) hasEmptyPath() bool {
return this.returnState == PredictionContextEMPTY_RETURN_STATE
}
func (this *SingletonPredictionContext) equals(other IPredictionContext) bool {
if this == other {
return true
@ -301,6 +285,10 @@ func (c *ArrayPredictionContext) GetReturnStates() []int {
return c.returnStates
}
func (this *ArrayPredictionContext) hasEmptyPath() bool {
return this.getReturnState(this.length()-1) == PredictionContextEMPTY_RETURN_STATE
}
func (this *ArrayPredictionContext) isEmpty() bool {
// since EMPTY_RETURN_STATE can only appear in the last position, we
// don't need to verify that size==1
@ -320,9 +308,7 @@ func (this *ArrayPredictionContext) getReturnState(index int) int {
}
func (this *ArrayPredictionContext) equals(other IPredictionContext) bool {
if this == other {
return true
} else if _, ok := other.(*ArrayPredictionContext); !ok {
if _, ok := other.(*ArrayPredictionContext); !ok {
return false
} else if this.cachedHashString != other.hashString() {
return false // can't be same if hash is different

View File

@ -83,6 +83,8 @@ func (this *Recognizer) GetState() int {
}
func (this *Recognizer) SetState(v int) {
fmt.Println("SETTING STATE " + strconv.Itoa(v) + " from " + strconv.Itoa(this.state))
this.state = v
}
@ -159,8 +161,8 @@ func (this *Recognizer) GetTokenType(tokenName string) int {
// What is the error header, normally line/character position information?//
func (this *Recognizer) getErrorHeader(e IRecognitionException) string {
var line = e.GetOffendingToken().line
var column = e.GetOffendingToken().column
var line = e.GetOffendingToken().GetLine()
var column = e.GetOffendingToken().GetColumn()
return "line " + strconv.Itoa(line) + ":" + strconv.Itoa(column)
}
@ -177,16 +179,16 @@ func (this *Recognizer) getErrorHeader(e IRecognitionException) string {
// feature when necessary. For example, see
// {@link DefaultErrorStrategy//GetTokenErrorDisplay}.
//
func (this *Recognizer) GetTokenErrorDisplay(t *Token) string {
func (this *Recognizer) GetTokenErrorDisplay(t IToken) string {
if t == nil {
return "<no token>"
}
var s = t.text()
var s = t.GetText()
if s == "" {
if t.tokenType == TokenEOF {
if t.GetTokenType() == TokenEOF {
s = "<EOF>"
} else {
s = "<" + strconv.Itoa(t.tokenType) + ">"
s = "<" + strconv.Itoa(t.GetTokenType()) + ">"
}
}
s = strings.Replace(s, "\t", "\\t", -1)

View File

@ -5,8 +5,6 @@ import (
"strings"
)
type TokenSourceCharStreamPair struct {
tokenSource TokenSource
charStream CharStream
@ -16,6 +14,26 @@ type TokenSourceCharStreamPair struct {
// (so we can ignore tabs), token channel, index, and source from which
// we obtained this token.
type IToken interface {
GetSource() *TokenSourceCharStreamPair
GetTokenType() int
GetChannel() int
GetStart() int
GetStop() int
GetLine() int
GetColumn() int
GetText() string
SetText(s string)
GetTokenIndex() int
SetTokenIndex(v int)
GetTokenSource() TokenSource
GetInputStream() CharStream
}
type Token struct {
source *TokenSourceCharStreamPair
tokenType int // token type of the token
@ -52,19 +70,40 @@ const (
TokenHiddenChannel = 1
)
// Explicitly set the text for this token. If {code text} is not
// {@code nil}, then {@link //GetText} will return this value rather than
// extracting the text from the input.
//
// @param text The explicit text of the token, or {@code nil} if the text
// should be obtained from the input along with the start and stop indexes
// of the token.
func (this *Token) GetChannel() int {
return this.channel
}
func (this *Token) text() string {
func (this *Token) GetStart() int {
return this.start
}
func (this *Token) GetStop() int {
return this.stop
}
func (this *Token) GetLine() int {
return this.line
}
func (this *Token) GetColumn() int {
return this.column
}
func (this *Token) GetTokenType() int {
return this.tokenType
}
func (this *Token) GetSource() *TokenSourceCharStreamPair{
return this.source
}
func (this *Token) GetText() string {
return this._text
}
func (this *Token) setText(s string) {
func (this *Token) SetText(s string) {
this._text = s
}
@ -72,6 +111,10 @@ func (this *Token) GetTokenIndex() int {
return this.tokenIndex
}
func (this *Token) SetTokenIndex(v int) {
this.tokenIndex = v
}
func (this *Token) GetTokenSource() TokenSource {
return this.source.tokenSource
}
@ -91,7 +134,7 @@ func NewCommonToken(source *TokenSourceCharStreamPair, tokenType, channel, start
t.Token = new(Token)
t.source = source
t.tokenType = -1
t.tokenType = tokenType
t.channel = channel
t.start = start
t.stop = stop
@ -123,16 +166,15 @@ func NewCommonToken(source *TokenSourceCharStreamPair, tokenType, channel, start
// @param oldToken The token to copy.
//
func (ct *CommonToken) clone() *CommonToken {
var t = NewCommonToken(ct.source, ct.tokenType, ct.channel, ct.start,
ct.stop)
t.tokenIndex = ct.tokenIndex
t.line = ct.line
t.column = ct.column
t._text = ct.text()
var t = NewCommonToken(ct.source, ct.tokenType, ct.channel, ct.start, ct.stop)
t.tokenIndex = ct.GetTokenIndex()
t.line = ct.GetLine()
t.column = ct.GetColumn()
t._text = ct.GetText()
return t
}
func (this *CommonToken) text() string {
func (this *CommonToken) GetText() string {
if this._text != "" {
return this._text
}
@ -148,12 +190,12 @@ func (this *CommonToken) text() string {
}
}
func (this *CommonToken) setText(text string) {
func (this *CommonToken) SetText(text string) {
this._text = text
}
func (this *CommonToken) String() string {
var txt = this.text()
var txt = this.GetText()
if txt != "" {
txt = strings.Replace(txt, "\n", "", -1)
txt = strings.Replace(txt, "\r", "", -1)

View File

@ -1,7 +1,7 @@
package antlr4
type TokenSource interface {
nextToken() *Token
nextToken() IToken
skip()
more()
getLine() int

View File

@ -3,14 +3,14 @@ package antlr4
type TokenStream interface {
IntStream
LT(k int) *Token
LT(k int) IToken
Get(index int) *Token
Get(index int) IToken
GetTokenSource() TokenSource
SetTokenSource(TokenSource)
GetAllText() string
GetTextFromInterval(*Interval) string
GetTextFromRuleContext(IRuleContext) string
GetTextFromTokens(*Token, *Token) string
GetTextFromTokens(IToken, IToken) string
}

View File

@ -16,7 +16,7 @@ func (this *TraceListener) VisitErrorNode(_ ErrorNode) {
}
func (this *TraceListener) EnterEveryRule(ctx IParserRuleContext) {
fmt.Println("enter " + this.parser.GetRuleNames()[ctx.GetRuleIndex()] + ", LT(1)=" + this.parser._input.LT(1).text())
fmt.Println("enter " + this.parser.GetRuleNames()[ctx.GetRuleIndex()] + ", LT(1)=" + this.parser._input.LT(1).GetText())
}
func (this *TraceListener) VisitTerminal(node TerminalNode) {
@ -24,5 +24,5 @@ func (this *TraceListener) VisitTerminal(node TerminalNode) {
}
func (this *TraceListener) ExitEveryRule(ctx IParserRuleContext) {
fmt.Println("exit " + this.parser.GetRuleNames()[ctx.GetRuleIndex()] + ", LT(1)=" + this.parser._input.LT(1).text())
fmt.Println("exit " + this.parser.GetRuleNames()[ctx.GetRuleIndex()] + ", LT(1)=" + this.parser._input.LT(1).GetText())
}

View File

@ -26,6 +26,7 @@ type ITransition interface {
type Transition struct {
target IATNState
isEpsilon bool
label_ int
label *IntervalSet
serializationType int
}
@ -124,8 +125,6 @@ var TransitionserializationNames = []string{
// TODO: make all transitions sets? no, should remove set edges
type AtomTransition struct {
*Transition
label_ int
label *IntervalSet
}
func NewAtomTransition(target IATNState, label int) *AtomTransition {
@ -236,6 +235,11 @@ func (t *RangeTransition) String() string {
return "'" + string(t.start) + "'..'" + string(t.stop) + "'"
}
type IAbstractPredicateTransition interface {
ITransition
IAbstractPredicateTransitionFoo()
}
type AbstractPredicateTransition struct {
*Transition
}
@ -248,8 +252,10 @@ func NewAbstractPredicateTransition(target IATNState) *AbstractPredicateTransiti
return t
}
func (a *AbstractPredicateTransition) IAbstractPredicateTransitionFoo(){}
type PredicateTransition struct {
*Transition
*AbstractPredicateTransition
isCtxDependent bool
ruleIndex, predIndex int
@ -258,7 +264,7 @@ type PredicateTransition struct {
func NewPredicateTransition(target IATNState, ruleIndex, predIndex int, isCtxDependent bool) *PredicateTransition {
t := new(PredicateTransition)
t.Transition = NewTransition(target)
t.AbstractPredicateTransition = NewAbstractPredicateTransition(target)
t.serializationType = TransitionPREDICATE
t.ruleIndex = ruleIndex
@ -381,7 +387,7 @@ func (t *WildcardTransition) String() string {
}
type PrecedencePredicateTransition struct {
*Transition
*AbstractPredicateTransition
precedence int
}
@ -389,7 +395,7 @@ type PrecedencePredicateTransition struct {
func NewPrecedencePredicateTransition(target IATNState, precedence int) *PrecedencePredicateTransition {
t := new(PrecedencePredicateTransition)
t.Transition = NewTransition(target)
t.AbstractPredicateTransition = NewAbstractPredicateTransition(target)
t.serializationType = TransitionPRECEDENCE
t.precedence = precedence

View File

@ -41,7 +41,7 @@ type RuleNode interface {
type TerminalNode interface {
ParseTree
getSymbol() *Token
getSymbol() IToken
}
type ErrorNode interface {
@ -87,10 +87,10 @@ type ParseTreeListener interface {
type TerminalNodeImpl struct {
parentCtx IRuleContext
symbol *Token
symbol IToken
}
func NewTerminalNodeImpl(symbol *Token) *TerminalNodeImpl {
func NewTerminalNodeImpl(symbol IToken) *TerminalNodeImpl {
tn := new(TerminalNodeImpl)
tn.parentCtx = nil
@ -112,7 +112,7 @@ func (this *TerminalNodeImpl) setChildren(t []Tree) {
panic("Cannot set children on terminal node")
}
func (this *TerminalNodeImpl) getSymbol() *Token {
func (this *TerminalNodeImpl) getSymbol() IToken {
return this.symbol
}
@ -132,7 +132,7 @@ func (this *TerminalNodeImpl) GetSourceInterval() *Interval {
if this.symbol == nil {
return TreeINVALID_INTERVAL
}
var tokenIndex = this.symbol.tokenIndex
var tokenIndex = this.symbol.GetTokenIndex()
return NewInterval(tokenIndex, tokenIndex)
}
@ -145,14 +145,14 @@ func (this *TerminalNodeImpl) accept(Visitor ParseTreeVisitor) interface{} {
}
func (this *TerminalNodeImpl) GetText() string {
return this.symbol.text()
return this.symbol.GetText()
}
func (this *TerminalNodeImpl) String() string {
if this.symbol.tokenType == TokenEOF {
if this.symbol.GetTokenType() == TokenEOF {
return "<EOF>"
} else {
return this.symbol.text()
return this.symbol.GetText()
}
}
@ -166,7 +166,7 @@ type ErrorNodeImpl struct {
*TerminalNodeImpl
}
func NewErrorNodeImpl(token *Token) *ErrorNodeImpl {
func NewErrorNodeImpl(token IToken) *ErrorNodeImpl {
en := new(ErrorNodeImpl)
en.TerminalNodeImpl = NewTerminalNodeImpl(token)
return en

View File

@ -46,15 +46,15 @@ func TreesgetNodeText(t Tree, ruleNames []string, recog *Parser) string {
return fmt.Sprint(t2)
} else if t2, ok := t.(TerminalNode); ok {
if t2.getSymbol() != nil {
return t2.getSymbol().text()
return t2.getSymbol().GetText()
}
}
}
// no recog for rule names
var payload = t.getPayload()
if p2, ok := payload.(*Token); ok {
return p2.text()
if p2, ok := payload.(IToken); ok {
return p2.GetText()
}
return fmt.Sprint(t.getPayload())
@ -104,7 +104,7 @@ func Trees_findAllNodes(t ParseTree, index int, findTokens bool, nodes []ParseTr
t3, ok2 := t.(IParserRuleContext)
if findTokens && ok {
if t2.getSymbol().tokenType == index {
if t2.getSymbol().GetTokenType() == index {
nodes = append(nodes, t2)
}
} else if !findTokens && ok2 {

View File

@ -44,10 +44,6 @@ func (s *IntStack) Push(e int) {
*s = append(*s, e)
}
func arrayString(a []interface{}) string {
return fmt.Sprint(a)
}
func hashCode(s string) string {
h := fnv.New32a()
h.Write([]byte((s)))
@ -81,8 +77,11 @@ func NewSet(hashFunction func(interface{}) string, equalsFunction func(interface
return s
}
func standardEqualsFunction(a interface{}, b interface{}) bool {
return standardHashFunction(a) == standardHashFunction(b)
func standardHashFunction(a interface{}) string {
h := fnv.New32a()
v, _ := getBytes(a)
h.Write(v)
return fmt.Sprint(h.Sum32())
}
func getBytes(key interface{}) ([]byte, error) {
@ -95,13 +94,12 @@ func getBytes(key interface{}) ([]byte, error) {
return buf.Bytes(), nil
}
func standardHashFunction(a interface{}) string {
h := fnv.New32a()
v, _ := getBytes(a)
h.Write(v)
return fmt.Sprint(h.Sum32())
func standardEqualsFunction(a interface{}, b interface{}) bool {
return standardHashFunction(a) == standardHashFunction(b)
}
func (this *Set) length() int {
return len(this.data)
}
@ -110,6 +108,7 @@ func (this *Set) add(value interface{}) interface{} {
var hash = this.hashFunction(value)
var key = "hash_" + hashCode(hash)
values := this.data[key]
if this.data[key] != nil {

View File

@ -177,9 +177,11 @@ BufferedTokenStream.prototype.getTokens = function(start, stop, types) {
if (types === undefined) {
types = null;
}
if (start < 0 || stop < 0) {
return null;
}
this.lazyInit();
var subset = [];
if (stop >= this.tokens.length) {
@ -194,6 +196,7 @@ BufferedTokenStream.prototype.getTokens = function(start, stop, types) {
subset.push(t);
}
}
return subset;
};

View File

@ -73,6 +73,9 @@ CommonTokenFactory.prototype.constructor = CommonTokenFactory;
CommonTokenFactory.DEFAULT = new CommonTokenFactory();
CommonTokenFactory.prototype.create = function(source, type, text, channel, start, stop, line, column) {
console.log("Token factory creating: " + text)
var t = new CommonToken(source, type, channel, start, stop);
t.line = line;
t.column = column;
@ -85,6 +88,9 @@ CommonTokenFactory.prototype.create = function(source, type, text, channel, star
};
CommonTokenFactory.prototype.createThin = function(type, text) {
console.log("Token factory creating: " + text)
var t = new CommonToken(null, type);
t.text = text;
return t;

View File

@ -41,6 +41,8 @@ function FileStream(fileName) {
var data = fs.readFileSync(fileName, "utf8");
InputStream.call(this, data);
this.fileName = fileName;
console.log(data);
return this;
}

View File

@ -50,6 +50,7 @@ IntervalSet.prototype.addRange = function(l, h) {
};
IntervalSet.prototype.addInterval = function(v) {
console.log("addInterval" + v.toString())
if (this.intervals === null) {
this.intervals = [];
this.intervals.push(v);
@ -80,7 +81,9 @@ IntervalSet.prototype.addInterval = function(v) {
};
IntervalSet.prototype.addSet = function(other) {
console.log("addSet")
if (other.intervals !== null) {
console.log(other.intervals.length)
for (var k = 0; k < other.intervals.length; k++) {
var i = other.intervals[k];
this.addInterval(new Interval(i.start, i.stop));

View File

@ -110,7 +110,15 @@ LL1Analyzer.prototype.LOOK = function(s, stopState, ctx) {
var seeThruPreds = true; // ignore preds; get all lookahead
ctx = ctx || null;
var lookContext = ctx!==null ? predictionContextFromRuleContext(s.atn, ctx) : null;
console.log("DEBUG 5")
console.log(s.toString())
console.log(stopState)
console.log(lookContext)
console.log(r.toString())
console.log(seeThruPreds)
console.log("=====")
this._LOOK(s, stopState, lookContext, r, new Set(), new BitSet(), seeThruPreds, true);
console.log(r.toString())
return r;
};
@ -151,6 +159,7 @@ LL1Analyzer.prototype._LOOK = function(s, stopState , ctx, look, lookBusy, calle
}
lookBusy.add(c);
if (s === stopState) {
console.log("DEBUG 6")
if (ctx ===null) {
look.addOne(Token.EPSILON);
return;
@ -168,6 +177,7 @@ LL1Analyzer.prototype._LOOK = function(s, stopState , ctx, look, lookBusy, calle
return;
}
if (ctx !== PredictionContext.EMPTY) {
console.log("DEBUG 7")
// run thru all possible stack tops in ctx
for(var i=0; i<ctx.length; i++) {
var returnState = this.atn.states[ctx.getReturnState(i)];
@ -187,6 +197,9 @@ LL1Analyzer.prototype._LOOK = function(s, stopState , ctx, look, lookBusy, calle
for(var j=0; j<s.transitions.length; j++) {
var t = s.transitions[j];
if (t.constructor === RuleTransition) {
console.log("DEBUG 8")
if (calledRuleStack.contains(t.target.ruleIndex)) {
continue;
}
@ -197,18 +210,26 @@ LL1Analyzer.prototype._LOOK = function(s, stopState , ctx, look, lookBusy, calle
} finally {
calledRuleStack.remove(t.target.ruleIndex);
}
console.log(look.toString())
} else if (t instanceof AbstractPredicateTransition ) {
console.log("DEBUG 9")
if (seeThruPreds) {
this._LOOK(t.target, stopState, ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF);
} else {
look.addOne(LL1Analyzer.HIT_PRED);
}
} else if( t.isEpsilon) {
console.log("DEBUG 10")
this._LOOK(t.target, stopState, ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF);
} else if (t.constructor === WildcardTransition) {
console.log("DEBUG 11")
look.addRange( Token.MIN_USER_TOKEN_TYPE, this.atn.maxTokenType );
} else {
console.log("DEBUG 12")
var set = t.label;
console.log(set.toString())
if (set !== null) {
if (t instanceof NotSetTransition) {
set = set.complement(Token.MIN_USER_TOKEN_TYPE, this.atn.maxTokenType);

View File

@ -134,6 +134,8 @@ Parser.prototype.reset = function() {
Parser.prototype.match = function(ttype) {
var t = this.getCurrentToken();
console.log("TOKEN IS " + t.text)
if (t.type === ttype) {
this._errHandler.reportMatch(this);
this.consume();

View File

@ -160,9 +160,11 @@ Recognizer.prototype.precpred = function(localctx , precedence) {
Object.defineProperty(Recognizer.prototype, "state", {
get : function() {
return this._stateNumber;
},
set : function(state) {
console.log("SETTING STATE" + state + " from " + this._stateNumber )
this._stateNumber = state;
}
});

View File

@ -75,8 +75,11 @@ ATN.prototype.nextTokensInContext = function(s, ctx) {
// rule.
ATN.prototype.nextTokensNoContext = function(s) {
if (s.nextTokenWithinRule !== null ) {
console.log("DEBUG 1")
return s.nextTokenWithinRule;
}
console.log("DEBUG 2")
console.log(this.nextTokensInContext(s, null).toString())
s.nextTokenWithinRule = this.nextTokensInContext(s, null);
s.nextTokenWithinRule.readOnly = true;
return s.nextTokenWithinRule;

View File

@ -101,8 +101,8 @@ function LexerATNSimulator(recog, atn, decisionToDFA, sharedContextCache) {
LexerATNSimulator.prototype = Object.create(ATNSimulator.prototype);
LexerATNSimulator.prototype.constructor = LexerATNSimulator;
LexerATNSimulator.debug = false;
LexerATNSimulator.dfa_debug = false;
LexerATNSimulator.prototype.debug = true;
LexerATNSimulator.prototype.dfa_debug = false;
LexerATNSimulator.MIN_DFA_EDGE = 0;
LexerATNSimulator.MAX_DFA_EDGE = 127; // forces unicode to stay in ATN
@ -117,6 +117,9 @@ LexerATNSimulator.prototype.copyState = function(simulator) {
};
LexerATNSimulator.prototype.match = function(input, mode) {
console.log("MATCH")
this.match_calls += 1;
this.mode = mode;
var mark = input.mark();
@ -225,6 +228,8 @@ LexerATNSimulator.prototype.execATN = function(input, ds0) {
t = input.LA(1);
s = target; // flip; current DFA target becomes new src/from state
}
console.log("OUT")
return this.failOrAccept(this.prevAccept, input, s.configs, t);
};
@ -286,6 +291,7 @@ LexerATNSimulator.prototype.failOrAccept = function(prevAccept, input, reach, t)
var lexerActionExecutor = prevAccept.dfaState.lexerActionExecutor;
this.accept(input, lexerActionExecutor, this.startIndex,
prevAccept.index, prevAccept.line, prevAccept.column);
console.log(prevAccept.dfaState.prediction)
return prevAccept.dfaState.prediction;
} else {
// if no accept and EOF is first char, return EOF
@ -311,7 +317,7 @@ LexerATNSimulator.prototype.getReachableConfigSet = function(input, closure,
continue;
}
if (this.debug) {
console.log("testing %s at %s\n", this.getTokenName(t), cfg
console.log("testing %s at %s", this.getTokenName(t), cfg
.toString(this.recog, true));
}
for (var j = 0; j < cfg.state.transitions.length; j++) {
@ -338,7 +344,7 @@ LexerATNSimulator.prototype.getReachableConfigSet = function(input, closure,
LexerATNSimulator.prototype.accept = function(input, lexerActionExecutor,
startIndex, index, line, charPos) {
if (this.debug) {
console.log("ACTION %s\n", lexerActionExecutor);
console.log("ACTION %s", lexerActionExecutor);
}
// seek to after last char in token
input.seek(index);
@ -358,6 +364,7 @@ LexerATNSimulator.prototype.getReachableTarget = function(trans, t) {
};
LexerATNSimulator.prototype.computeStartState = function(input, p) {
var initialContext = PredictionContext.EMPTY;
var configs = new OrderedATNConfigSet();
for (var i = 0; i < p.transitions.length; i++) {
@ -365,6 +372,7 @@ LexerATNSimulator.prototype.computeStartState = function(input, p) {
var cfg = new LexerATNConfig({state:target, alt:i+1, context:initialContext}, null);
this.closure(input, cfg, configs, false, false, false);
}
return configs;
};
@ -382,14 +390,17 @@ LexerATNSimulator.prototype.closure = function(input, config, configs,
if (this.debug) {
console.log("closure(" + config.toString(this.recog, true) + ")");
}
if (config.state instanceof RuleStopState) {
if (this.debug) {
if (this.recog !== null) {
console.log("closure at %s rule stop %s\n", this.recog.getRuleNames()[config.state.ruleIndex], config);
if (this.recog !== null && this.recog.getRuleNames) {
console.log("closure at %s rule stop %s", this.recog.getRuleNames()[config.state.ruleIndex], config);
} else {
console.log("closure at rule stop %s\n", config);
console.log("closure at rule stop %s", config);
}
}
if (config.context === null || config.context.hasEmptyPath()) {
if (config.context === null || config.context.isEmpty()) {
configs.add(config);
@ -651,6 +662,7 @@ LexerATNSimulator.prototype.consume = function(input) {
};
LexerATNSimulator.prototype.getTokenName = function(tt) {
console.log(tt);
if (tt === -1) {
return "EOF";
} else {

View File

@ -313,7 +313,7 @@ function ParserATNSimulator(parser, atn, decisionToDFA, sharedContextCache) {
ParserATNSimulator.prototype = Object.create(ATNSimulator.prototype);
ParserATNSimulator.prototype.constructor = ParserATNSimulator;
ParserATNSimulator.prototype.debug = false;
ParserATNSimulator.prototype.debug = true;
ParserATNSimulator.prototype.debug_list_atn_decisions = false;
ParserATNSimulator.prototype.dfa_debug = false;
ParserATNSimulator.prototype.retry_debug = false;
@ -323,6 +323,9 @@ ParserATNSimulator.prototype.reset = function() {
};
ParserATNSimulator.prototype.adaptivePredict = function(input, decision, outerContext) {
console.log("adaptive predict")
if (this.debug || this.debug_list_atn_decisions) {
console.log("adaptivePredict decision " + decision +
" exec LA(1)==" + this.getLookaheadName(input) +
@ -1512,13 +1515,16 @@ ParserATNSimulator.prototype.getConflictingAltsOrUniqueAlt = function(configs) {
};
ParserATNSimulator.prototype.getTokenName = function( t) {
console.log("Get token name")
if (t===Token.EOF) {
return "EOF";
}
if( this.parser!==null && this.parser.literalNames!==null) {
if (t >= this.parser.literalNames.length) {
console.log("" + t + " ttype out of range: " + this.parser.literalNames);
console.log("" + this.parser.getInputStream().getTokens());
// console.log(this.parser.getInputStream().getTokens());
} else {
return this.parser.literalNames[t] + "<" + t + ">";
}
@ -1529,7 +1535,7 @@ ParserATNSimulator.prototype.getTokenName = function( t) {
ParserATNSimulator.prototype.getLookaheadName = function(input) {
return this.getTokenName(input.LA(1));
};
``
// Used for debugging in adaptivePredict around execATN but I cut
// it out for clarity now that alg. works well. We can leave this
// "dead" code for a bit.

View File

@ -74,7 +74,7 @@ ConsoleErrorListener.INSTANCE = new ConsoleErrorListener();
// </pre>
//
ConsoleErrorListener.prototype.syntaxError = function(recognizer, offendingSymbol, line, column, msg, e) {
console.error("line " + line + ":" + column + " " + msg);
console.log("line " + line + ":" + column + " " + msg);
};
function ProxyErrorListener(delegates) {

View File

@ -244,16 +244,28 @@ DefaultErrorStrategy.prototype.sync = function(recognizer) {
if (this.inErrorRecoveryMode(recognizer)) {
return;
}
console.log("STATE" + recognizer.state)
var s = recognizer._interp.atn.states[recognizer.state];
var la = recognizer.getTokenStream().LA(1);
console.log("LA" + la);
// try cheaper subset first; might get lucky. seems to shave a wee bit off
if (la===Token.EOF || recognizer.atn.nextTokens(s).contains(la)) {
console.log("OK1")
return;
}
// Return but don't end recovery. only do that upon valid token match
if(recognizer.isExpectedToken(la)) {
console.log("OK2")
return;
}
console.log("LA" + la)
// console.log(recognizer.GetATN().nextTokens(s, nil))
switch (s.stateType) {
case ATNState.BLOCK_START:
case ATNState.STAR_BLOCK_START:

View File

@ -0,0 +1 @@
1 + 2 = 3 + 5

View File

@ -1,11 +1,41 @@
var antlr4 = require("./antlr4/index"),
tree = antlr4.tree
ArithmeticLexer = require("./ArithmeticLexer").ArithmeticLexer,
ArithmeticParser = require("./ArithmeticParser").ArithmeticParser;
ArithmeticParser = require("./ArithmeticParser").ArithmeticParser,
ArithmeticListener = require("./ArithmeticListener").ArithmeticListener;
var a = new antlr4.FileStream("foo.txt");
var l = new ArithmeticLexer(a);
var s = new antlr4.CommonTokenStream(l, 0);
var p = new ArithmeticParser(s);
p.buildParseTrees = true;
//KeyPrinter = function() {
// ArithmeticListener.call(this); // inherit default listener
// return this;
//};
//
//// inherit default listener
//KeyPrinter.prototype = Object.create(ArithmeticListener.prototype);
//KeyPrinter.prototype.constructor = KeyPrinter;
//
//// override default listener behavior
//KeyPrinter.prototype.exitAtom = function(ctx) {
//
// console.log("Oh, a atom!", ctx.start.source[1].strdata[ctx.start.start]);
//};
//
//KeyPrinter.prototype.exitExpression = function(ctx) {
//
// console.log("Oh, an expression!", ctx);
// throw new Error();
//};
var tree = p.equation();
//var printer = new KeyPrinter();
//antlr4.tree.ParseTreeWalker.DEFAULT.walk(printer, tree);
//console.log( tree.children[0].children[0].children[0].children );
p.equation();

View File

@ -820,15 +820,15 @@ function <lexer.name>(input) {
<rest(lexer.modes):{m| <lexer.name>.<m> = <i>;}; separator="\n">
<lexer.name>.modeNames = [ <lexer.modes:{m| "<m>"}; separator=", ", wrap, anchor> ];
<lexer.name>.prototype.modeNames = [ <lexer.modes:{m| "<m>"}; separator=", ", wrap, anchor> ];
<lexer.name>.literalNames = [ <lexer.literalNames:{t | <t>}; null="null", separator=", ", wrap, anchor> ];
<lexer.name>.prototype.literalNames = [ <lexer.literalNames:{t | <t>}; null="null", separator=", ", wrap, anchor> ];
<lexer.name>.symbolicNames = [ <lexer.symbolicNames:{t | <t>}; null="null", separator=", ", wrap, anchor> ];
<lexer.name>.prototype.symbolicNames = [ <lexer.symbolicNames:{t | <t>}; null="null", separator=", ", wrap, anchor> ];
<lexer.name>.ruleNames = [ <lexer.ruleNames:{r | "<r>"}; separator=", ", wrap, anchor> ];
<lexer.name>.prototype.ruleNames = [ <lexer.ruleNames:{r | "<r>"}; separator=", ", wrap, anchor> ];
<lexer.name>.grammarFileName = "<lexer.grammarFileName>";
<lexer.name>.prototype.grammarFileName = "<lexer.grammarFileName>";
<namedActions.members>