diff --git a/pb/src/parserTest/foo.txt b/pb/src/parserTest/foo.txt
index e69de29bb..067b749fd 100644
--- a/pb/src/parserTest/foo.txt
+++ b/pb/src/parserTest/foo.txt
@@ -0,0 +1 @@
+1 + 2 = 3 + 5
diff --git a/pb/src/parserTest/test.go b/pb/src/parserTest/test.go
index a3e60de7e..54179b96b 100644
--- a/pb/src/parserTest/test.go
+++ b/pb/src/parserTest/test.go
@@ -15,8 +15,12 @@ func main() {
p := parser.NewArithmeticParser(s)
+ p.BuildParseTrees = true
+
p.Equation()
+
+
}
diff --git a/runtime/Go/src/antlr4/ATN.go b/runtime/Go/src/antlr4/ATN.go
index f95fdb0e0..3dbd57afc 100644
--- a/runtime/Go/src/antlr4/ATN.go
+++ b/runtime/Go/src/antlr4/ATN.go
@@ -1,4 +1,5 @@
package antlr4
+import "fmt"
var ATNINVALID_ALT_NUMBER = 0
@@ -55,7 +56,8 @@ func NewATN(grammarType int, maxTokenType int) *ATN {
// restricted to tokens reachable staying within {@code s}'s rule.
func (this *ATN) nextTokensInContext(s IATNState, ctx IRuleContext) *IntervalSet {
var anal = NewLL1Analyzer(this)
- return anal.LOOK(s, nil, ctx)
+ var res = anal.LOOK(s, nil, ctx)
+ return res
}
// Compute the set of valid tokens that can occur starting in {@code s} and
@@ -63,8 +65,11 @@ func (this *ATN) nextTokensInContext(s IATNState, ctx IRuleContext) *IntervalSet
// rule.
func (this *ATN) nextTokensNoContext(s IATNState) *IntervalSet {
if s.GetNextTokenWithinRule() != nil {
+ fmt.Println("DEBUG 1")
return s.GetNextTokenWithinRule()
}
+ fmt.Println("DEBUG 2")
+ fmt.Println(this.nextTokensInContext(s, nil))
s.SetNextTokenWithinRule(this.nextTokensInContext(s, nil))
s.GetNextTokenWithinRule().readOnly = true
return s.GetNextTokenWithinRule()
diff --git a/runtime/Go/src/antlr4/ATNConfig.go b/runtime/Go/src/antlr4/ATNConfig.go
index 4889a3d1b..ae0958c1b 100644
--- a/runtime/Go/src/antlr4/ATNConfig.go
+++ b/runtime/Go/src/antlr4/ATNConfig.go
@@ -29,6 +29,8 @@ type IATNConfig interface {
SetReachesIntoOuterContext(int)
String() string
+
+ shortHashString() string
}
type ATNConfig struct {
diff --git a/runtime/Go/src/antlr4/ATNConfigSet.go b/runtime/Go/src/antlr4/ATNConfigSet.go
index 1df0a07e8..b081642de 100644
--- a/runtime/Go/src/antlr4/ATNConfigSet.go
+++ b/runtime/Go/src/antlr4/ATNConfigSet.go
@@ -11,20 +11,18 @@ import (
///
func hashATNConfig(c interface{}) string {
- return c.(*ATNConfig).shortHashString()
+ return c.(IATNConfig).shortHashString()
}
func equalATNConfigs(a, b interface{}) bool {
- fmt.Println("compare")
- fmt.Println(a)
+ if a == nil || b == nil {
+ return false
+ }
if a == b {
return true
}
- if a == nil || b == nil {
- return false
- }
ai,ok := a.(IATNConfig)
bi,ok1 := b.(IATNConfig)
@@ -34,8 +32,8 @@ func equalATNConfigs(a, b interface{}) bool {
}
return ai.GetState().GetStateNumber() == bi.GetState().GetStateNumber() &&
- ai.GetAlt() == bi.GetAlt() &&
- ai.GetSemanticContext().equals(bi.GetSemanticContext())
+ ai.GetAlt() == bi.GetAlt() &&
+ ai.GetSemanticContext().equals(bi.GetSemanticContext())
}
@@ -107,8 +105,6 @@ func NewATNConfigSet(fullCtx bool) *ATNConfigSet {
// /
func (this *ATNConfigSet) add(config IATNConfig, mergeCache *DoubleDict) bool {
-// fmt.Println("DEBUG = Adding config : " + config.String())
-
if this.readOnly {
panic("This set is readonly")
}
@@ -136,6 +132,7 @@ func (this *ATNConfigSet) add(config IATNConfig, mergeCache *DoubleDict) bool {
existing.setPrecedenceFilterSuppressed(true)
}
existing.SetContext(merged) // replace context no need to alt mapping
+
return true
}
@@ -288,6 +285,9 @@ func (this *ATNConfigSet) String() string {
return s
}
+
+
+
type OrderedATNConfigSet struct {
*ATNConfigSet
}
@@ -297,7 +297,7 @@ func NewOrderedATNConfigSet() *OrderedATNConfigSet {
this := new(OrderedATNConfigSet)
this.ATNConfigSet = NewATNConfigSet(false)
- this.configLookup = NewSet(nil, nil)
+// this.configLookup = NewSet(nil, nil) // TODO not sure why this would be overriden
return this
}
diff --git a/runtime/Go/src/antlr4/BufferedTokenStream.go b/runtime/Go/src/antlr4/BufferedTokenStream.go
index 49e860aef..7914a184e 100644
--- a/runtime/Go/src/antlr4/BufferedTokenStream.go
+++ b/runtime/Go/src/antlr4/BufferedTokenStream.go
@@ -11,13 +11,15 @@
package antlr4
-import "strconv"
+import (
+ "strconv"
+)
// bt is just to keep meaningful parameter types to Parser
type BufferedTokenStream struct {
tokenSource TokenSource
- tokens []*Token
+ tokens []IToken
index int
fetchedEOF bool
channel int
@@ -33,7 +35,7 @@ func NewBufferedTokenStream(tokenSource TokenSource) *BufferedTokenStream {
// A collection of all tokens fetched from the token source. The list is
// considered a complete view of the input once {@link //fetchedEOF} is set
// to {@code true}.
- ts.tokens = make([]*Token, 0)
+ ts.tokens = make([]IToken, 0)
// The index into {@link //tokens} of the current token (next token to
// {@link //consume}). {@link //tokens}{@code [}{@link //p}{@code ]} should
@@ -83,7 +85,7 @@ func (bt *BufferedTokenStream) Seek(index int) {
bt.index = bt.adjustSeekIndex(index)
}
-func (bt *BufferedTokenStream) Get(index int) *Token {
+func (bt *BufferedTokenStream) Get(index int) IToken {
bt.lazyInit()
return bt.tokens[index]
}
@@ -136,10 +138,10 @@ func (bt *BufferedTokenStream) fetch(n int) int {
}
for i := 0; i < n; i++ {
- var t *Token = bt.tokenSource.nextToken()
- t.tokenIndex = len(bt.tokens)
+ var t IToken = bt.tokenSource.nextToken()
+ t.SetTokenIndex( len(bt.tokens) )
bt.tokens = append(bt.tokens, t)
- if t.tokenType == TokenEOF {
+ if t.GetTokenType() == TokenEOF {
bt.fetchedEOF = true
return i + 1
}
@@ -148,22 +150,22 @@ func (bt *BufferedTokenStream) fetch(n int) int {
}
// Get all tokens from start..stop inclusively///
-func (bt *BufferedTokenStream) GetTokens(start int, stop int, types *IntervalSet) []*Token {
+func (bt *BufferedTokenStream) GetTokens(start int, stop int, types *IntervalSet) []IToken {
if start < 0 || stop < 0 {
return nil
}
bt.lazyInit()
- var subset = make([]*Token, 0)
+ var subset = make([]IToken, 0)
if stop >= len(bt.tokens) {
stop = len(bt.tokens) - 1
}
for i := start; i < stop; i++ {
var t = bt.tokens[i]
- if t.tokenType == TokenEOF {
+ if t.GetTokenType() == TokenEOF {
break
}
- if types == nil || types.contains(t.tokenType) {
+ if types == nil || types.contains(t.GetTokenType()) {
subset = append(subset, t)
}
}
@@ -171,17 +173,17 @@ func (bt *BufferedTokenStream) GetTokens(start int, stop int, types *IntervalSet
}
func (bt *BufferedTokenStream) LA(i int) int {
- return bt.LT(i).tokenType
+ return bt.LT(i).GetTokenType()
}
-func (bt *BufferedTokenStream) LB(k int) *Token {
+func (bt *BufferedTokenStream) LB(k int) IToken {
if bt.index-k < 0 {
return nil
}
return bt.tokens[bt.index-k]
}
-func (bt *BufferedTokenStream) LT(k int) *Token {
+func (bt *BufferedTokenStream) LT(k int) IToken {
bt.lazyInit()
if k == 0 {
return nil
@@ -233,7 +235,7 @@ func (bt *BufferedTokenStream) GetTokenSource() TokenSource {
// Reset bt token stream by setting its token source.///
func (bt *BufferedTokenStream) SetTokenSource(tokenSource TokenSource) {
bt.tokenSource = tokenSource
- bt.tokens = make([]*Token, 0)
+ bt.tokens = make([]IToken, 0)
bt.index = -1
}
@@ -247,8 +249,8 @@ func (bt *BufferedTokenStream) nextTokenOnChannel(i, channel int) int {
return -1
}
var token = bt.tokens[i]
- for token.channel != bt.channel {
- if token.tokenType == TokenEOF {
+ for token.GetChannel() != bt.channel {
+ if token.GetTokenType() == TokenEOF {
return -1
}
i += 1
@@ -262,7 +264,7 @@ func (bt *BufferedTokenStream) nextTokenOnChannel(i, channel int) int {
// Return i if tokens[i] is on channel. Return -1 if there are no tokens
// on channel between i and 0.
func (bt *BufferedTokenStream) previousTokenOnChannel(i, channel int) int {
- for i >= 0 && bt.tokens[i].channel != channel {
+ for i >= 0 && bt.tokens[i].GetChannel() != channel {
i -= 1
}
return i
@@ -271,7 +273,7 @@ func (bt *BufferedTokenStream) previousTokenOnChannel(i, channel int) int {
// Collect all tokens on specified channel to the right of
// the current token up until we see a token on DEFAULT_TOKEN_CHANNEL or
// EOF. If channel is -1, find any non default channel token.
-func (bt *BufferedTokenStream) getHiddenTokensToRight(tokenIndex, channel int) []*Token {
+func (bt *BufferedTokenStream) getHiddenTokensToRight(tokenIndex, channel int) []IToken {
bt.lazyInit()
if tokenIndex < 0 || tokenIndex >= len(bt.tokens) {
panic(strconv.Itoa(tokenIndex) + " not in 0.." + strconv.Itoa(len(bt.tokens)-1))
@@ -291,7 +293,7 @@ func (bt *BufferedTokenStream) getHiddenTokensToRight(tokenIndex, channel int) [
// Collect all tokens on specified channel to the left of
// the current token up until we see a token on DEFAULT_TOKEN_CHANNEL.
// If channel is -1, find any non default channel token.
-func (bt *BufferedTokenStream) getHiddenTokensToLeft(tokenIndex, channel int) []*Token {
+func (bt *BufferedTokenStream) getHiddenTokensToLeft(tokenIndex, channel int) []IToken {
bt.lazyInit()
if tokenIndex < 0 || tokenIndex >= len(bt.tokens) {
panic(strconv.Itoa(tokenIndex) + " not in 0.." + strconv.Itoa(len(bt.tokens)-1))
@@ -306,15 +308,15 @@ func (bt *BufferedTokenStream) getHiddenTokensToLeft(tokenIndex, channel int) []
return bt.filterForChannel(from_, to, channel)
}
-func (bt *BufferedTokenStream) filterForChannel(left, right, channel int) []*Token {
- var hidden = make([]*Token, 0)
+func (bt *BufferedTokenStream) filterForChannel(left, right, channel int) []IToken {
+ var hidden = make([]IToken, 0)
for i := left; i < right+1; i++ {
var t = bt.tokens[i]
if channel == -1 {
- if t.channel != LexerDefaultTokenChannel {
+ if t.GetChannel() != LexerDefaultTokenChannel {
hidden = append(hidden, t)
}
- } else if t.channel == channel {
+ } else if t.GetChannel() == channel {
hidden = append(hidden, t)
}
}
@@ -340,7 +342,7 @@ func (bt *BufferedTokenStream) GetAllText() string {
return bt.GetTextFromInterval(nil)
}
-func (bt *BufferedTokenStream) GetTextFromTokens(start, end *Token) string {
+func (bt *BufferedTokenStream) GetTextFromTokens(start, end IToken) string {
return bt.GetTextFromInterval(NewInterval(start.GetTokenIndex(), end.GetTokenIndex()))
}
@@ -349,11 +351,13 @@ func (bt *BufferedTokenStream) GetTextFromRuleContext(interval IRuleContext) str
}
func (bt *BufferedTokenStream) GetTextFromInterval(interval *Interval) string {
+
bt.lazyInit()
bt.fill()
if interval == nil {
interval = NewInterval(0, len(bt.tokens)-1)
}
+
var start = interval.start
var stop = interval.stop
if start < 0 || stop < 0 {
@@ -362,14 +366,16 @@ func (bt *BufferedTokenStream) GetTextFromInterval(interval *Interval) string {
if stop >= len(bt.tokens) {
stop = len(bt.tokens) - 1
}
+
var s = ""
for i := start; i < stop+1; i++ {
var t = bt.tokens[i]
- if t.tokenType == TokenEOF {
+ if t.GetTokenType() == TokenEOF {
break
}
- s += t.text()
+ s += t.GetText()
}
+
return s
}
diff --git a/runtime/Go/src/antlr4/CommonTokenFactory.go b/runtime/Go/src/antlr4/CommonTokenFactory.go
index a0d95374e..85d9b187c 100644
--- a/runtime/Go/src/antlr4/CommonTokenFactory.go
+++ b/runtime/Go/src/antlr4/CommonTokenFactory.go
@@ -4,9 +4,10 @@
//
package antlr4
+import "fmt"
type TokenFactory interface {
- Create(source *TokenSourceCharStreamPair, ttype int, text string, channel, start, stop, line, column int) *Token
+ Create(source *TokenSourceCharStreamPair, ttype int, text string, channel, start, stop, line, column int) IToken
}
type CommonTokenFactory struct {
@@ -45,22 +46,27 @@ func NewCommonTokenFactory(copyText bool) *CommonTokenFactory {
//
var CommonTokenFactoryDEFAULT = NewCommonTokenFactory(false)
-func (this *CommonTokenFactory) Create(source *TokenSourceCharStreamPair, ttype int, text string, channel, start, stop, line, column int) *Token {
+func (this *CommonTokenFactory) Create(source *TokenSourceCharStreamPair, ttype int, text string, channel, start, stop, line, column int) IToken {
+
+ fmt.Println("Token factory creating: " + text)
var t = NewCommonToken(source, ttype, channel, start, stop)
t.line = line
t.column = column
if text != "" {
- t.setText(text)
+ t.SetText(text)
} else if this.copyText && source.charStream != nil {
- t.setText(source.charStream.GetTextFromInterval(NewInterval(start, stop)))
+ t.SetText(source.charStream.GetTextFromInterval(NewInterval(start, stop)))
}
return t.Token
}
-func (this *CommonTokenFactory) createThin(ttype int, text string) *Token {
+func (this *CommonTokenFactory) createThin(ttype int, text string) IToken {
+
+ fmt.Println("Token factory creating: " + text)
+
var t = NewCommonToken(nil, ttype, TokenDefaultChannel, -1, -1)
- t.setText(text)
+ t.SetText(text)
return t.Token
}
diff --git a/runtime/Go/src/antlr4/CommonTokenStream.go b/runtime/Go/src/antlr4/CommonTokenStream.go
index 422029113..a2bbf99eb 100644
--- a/runtime/Go/src/antlr4/CommonTokenStream.go
+++ b/runtime/Go/src/antlr4/CommonTokenStream.go
@@ -43,7 +43,7 @@ func (ts *CommonTokenStream) adjustSeekIndex(i int) int {
return ts.nextTokenOnChannel(i, ts.channel)
}
-func (ts *CommonTokenStream) LB(k int) *Token {
+func (ts *CommonTokenStream) LB(k int) IToken {
if k == 0 || ts.index-k < 0 {
return nil
}
@@ -61,7 +61,7 @@ func (ts *CommonTokenStream) LB(k int) *Token {
return ts.tokens[i]
}
-func (ts *CommonTokenStream) LT(k int) *Token {
+func (ts *CommonTokenStream) LT(k int) IToken {
ts.lazyInit()
if k == 0 {
return nil
@@ -88,10 +88,10 @@ func (ts *CommonTokenStream) getNumberOfOnChannelTokens() int {
ts.fill()
for i := 0; i < len(ts.tokens); i++ {
var t = ts.tokens[i]
- if t.channel == ts.channel {
+ if t.GetChannel() == ts.channel {
n += 1
}
- if t.tokenType == TokenEOF {
+ if t.GetTokenType() == TokenEOF {
break
}
}
diff --git a/runtime/Go/src/antlr4/DFASerializer.go b/runtime/Go/src/antlr4/DFASerializer.go
index 9bd163899..a0a6b1f5e 100644
--- a/runtime/Go/src/antlr4/DFASerializer.go
+++ b/runtime/Go/src/antlr4/DFASerializer.go
@@ -5,7 +5,12 @@ import (
"strconv"
)
-// A DFA walker that knows how to dump them to serialized strings.#/
+// A DFA walker that knows how to dump them to serialized strings.
+
+type IDFASerializer interface {
+
+}
+
type DFASerializer struct {
dfa *DFA
@@ -66,14 +71,12 @@ func (this *DFASerializer) String() string {
func (this *DFASerializer) getEdgeLabel(i int) string {
if i == 0 {
return "EOF"
- } else if this.literalNames != nil || this.symbolicNames != nil {
- if this.literalNames[i-1] == "" {
- return this.literalNames[i-1]
- } else {
- return this.symbolicNames[i-1]
- }
+ } else if this.literalNames != nil && i - 1 < len(this.literalNames) {
+ return this.literalNames[i-1]
+ } else if this.symbolicNames != nil && i - 1 < len(this.symbolicNames) {
+ return this.symbolicNames[i-1]
} else {
- return string(i - 1)
+ return strconv.Itoa(i-1)
}
}
@@ -101,6 +104,8 @@ func (this *DFASerializer) GetStateString(s *DFAState) string {
}
}
+
+
type LexerDFASerializer struct {
*DFASerializer
}
@@ -117,3 +122,35 @@ func NewLexerDFASerializer(dfa *DFA) *LexerDFASerializer {
func (this *LexerDFASerializer) getEdgeLabel(i int) string {
return "'" + string(i) + "'"
}
+
+func (this *LexerDFASerializer) String() string {
+
+ if this.dfa.s0 == nil {
+ return ""
+ }
+
+ var buf = ""
+ var states = this.dfa.sortedStates()
+ for i := 0; i < len(states); i++ {
+ var s = states[i]
+ if s.edges != nil {
+ var n = len(s.edges)
+ for j := 0; j < n; j++ {
+ var t = s.edges[j]
+ if t != nil && t.stateNumber != 0x7FFFFFFF {
+ buf += this.GetStateString(s)
+ buf += "-"
+ buf += this.getEdgeLabel(j)
+ buf += "->"
+ buf += this.GetStateString(t)
+ buf += "\n"
+ }
+ }
+ }
+ }
+ if len(buf) == 0 {
+ return ""
+ }
+
+ return buf
+}
diff --git a/runtime/Go/src/antlr4/DFAState.go b/runtime/Go/src/antlr4/DFAState.go
index c969a8d59..aa27a15d9 100644
--- a/runtime/Go/src/antlr4/DFAState.go
+++ b/runtime/Go/src/antlr4/DFAState.go
@@ -132,6 +132,7 @@ func (this *DFAState) GetAltSet() *Set {
// {@link ParserATNSimulator//addDFAState} we need to know if any other state
// exists that has this exact set of ATN configurations. The
// {@link //stateNumber} is irrelevant.
+
func (this *DFAState) equals(other interface{}) bool {
if this == other {
@@ -151,7 +152,7 @@ func (this *DFAState) hashString() string {
var s string
if (this.isAcceptState) {
- if (this.predicates == nil) {
+ if (this.predicates != nil) {
s = "=>" + fmt.Sprint(this.predicates)
} else {
s = "=>" + fmt.Sprint(this.prediction)
diff --git a/runtime/Go/src/antlr4/ErrorListener.go b/runtime/Go/src/antlr4/ErrorListener.go
index 675562aa9..6e7c71b6a 100644
--- a/runtime/Go/src/antlr4/ErrorListener.go
+++ b/runtime/Go/src/antlr4/ErrorListener.go
@@ -24,15 +24,19 @@ func NewErrorListener() *DefaultErrorListener {
}
func (this *DefaultErrorListener) SyntaxError(recognizer IRecognizer, offendingSymbol interface{}, line, column int, msg string, e IRecognitionException) {
+ fmt.Println("SyntaxError!")
}
func (this *DefaultErrorListener) ReportAmbiguity(recognizer IParser, dfa *DFA, startIndex, stopIndex int, exact bool, ambigAlts *BitSet, configs *ATNConfigSet) {
+ fmt.Println("ReportAmbiguity!")
}
func (this *DefaultErrorListener) ReportAttemptingFullContext(recognizer IParser, dfa *DFA, startIndex, stopIndex int, conflictingAlts *BitSet, configs *ATNConfigSet) {
+ fmt.Println("ReportAttemptingFullContext!")
}
func (this *DefaultErrorListener) ReportContextSensitivity(recognizer IParser, dfa *DFA, startIndex, stopIndex, prediction int, configs *ATNConfigSet) {
+ fmt.Println("ReportContextSensitivity!")
}
type ConsoleErrorListener struct {
diff --git a/runtime/Go/src/antlr4/ErrorStrategy.go b/runtime/Go/src/antlr4/ErrorStrategy.go
index d72f04935..50b142257 100644
--- a/runtime/Go/src/antlr4/ErrorStrategy.go
+++ b/runtime/Go/src/antlr4/ErrorStrategy.go
@@ -9,7 +9,7 @@ import (
type IErrorStrategy interface {
reset(IParser)
- RecoverInline(IParser) *Token
+ RecoverInline(IParser) IToken
Recover(IParser, IRecognitionException)
Sync(IParser)
inErrorRecoveryMode(IParser) bool
@@ -208,20 +208,35 @@ func (this *DefaultErrorStrategy) Sync(recognizer IParser) {
if this.inErrorRecoveryMode(recognizer) {
return
}
+
+ fmt.Println("STATE" + strconv.Itoa(recognizer.GetState()))
+
var s = recognizer.GetInterpreter().atn.states[recognizer.GetState()]
var la = recognizer.GetTokenStream().LA(1)
+
+ fmt.Println("LA" + strconv.Itoa(la))
+
// try cheaper subset first might get lucky. seems to shave a wee bit off
if la == TokenEOF || recognizer.GetATN().nextTokens(s, nil).contains(la) {
+ fmt.Println("OK1")
return
}
// Return but don't end recovery. only do that upon valid token Match
if recognizer.isExpectedToken(la) {
+ fmt.Println("OK2")
return
}
+
+ fmt.Println("LA" + strconv.Itoa(la))
+ fmt.Println(recognizer.GetATN().nextTokens(s, nil))
+
switch s.GetStateType() {
case ATNStateBLOCK_START:
+ fallthrough
case ATNStateSTAR_BLOCK_START:
+ fallthrough
case ATNStatePLUS_BLOCK_START:
+ fallthrough
case ATNStateSTAR_LOOP_ENTRY:
// Report error and recover if possible
if this.singleTokenDeletion(recognizer) != nil {
@@ -229,15 +244,14 @@ func (this *DefaultErrorStrategy) Sync(recognizer IParser) {
} else {
panic(NewInputMisMatchException(recognizer))
}
- break
case ATNStatePLUS_LOOP_BACK:
+ fallthrough
case ATNStateSTAR_LOOP_BACK:
this.ReportUnwantedToken(recognizer)
var expecting = NewIntervalSet()
expecting.addSet(recognizer.getExpectedTokens())
var whatFollowsLoopIterationOrRule = expecting.addSet(this.getErrorRecoverySet(recognizer))
this.consumeUntil(recognizer, whatFollowsLoopIterationOrRule)
- break
default:
// do nothing if we can't identify the exact kind of ATN state
}
@@ -255,7 +269,7 @@ func (this *DefaultErrorStrategy) ReportNoViableAlternative(recognizer IParser,
var tokens = recognizer.GetTokenStream()
var input string
if tokens != nil {
- if e.startToken.tokenType == TokenEOF {
+ if e.startToken.GetTokenType() == TokenEOF {
input = ""
} else {
input = tokens.GetTextFromTokens(e.startToken, e.offendingToken)
@@ -279,6 +293,7 @@ func (this *DefaultErrorStrategy) ReportNoViableAlternative(recognizer IParser,
func (this *DefaultErrorStrategy) ReportInputMisMatch(recognizer IParser, e *InputMisMatchException) {
var msg = "misMatched input " + this.GetTokenErrorDisplay(e.offendingToken) +
" expecting " + e.getExpectedTokens().StringVerbose(recognizer.GetLiteralNames(), recognizer.GetSymbolicNames(), false)
+ panic(msg)
recognizer.NotifyErrorListeners(msg, e.offendingToken, e)
}
@@ -324,6 +339,7 @@ func (this *DefaultErrorStrategy) ReportUnwantedToken(recognizer IParser) {
var expecting = this.getExpectedTokens(recognizer)
var msg = "extraneous input " + tokenName + " expecting " +
expecting.StringVerbose(recognizer.GetLiteralNames(), recognizer.GetSymbolicNames(), false)
+ panic(msg)
recognizer.NotifyErrorListeners(msg, t, nil)
}
@@ -404,7 +420,7 @@ func (this *DefaultErrorStrategy) ReportMissingToken(recognizer IParser) {
// is in the set of tokens that can follow the {@code ')'} token reference
// in rule {@code atom}. It can assume that you forgot the {@code ')'}.
//
-func (this *DefaultErrorStrategy) RecoverInline(recognizer IParser) *Token {
+func (this *DefaultErrorStrategy) RecoverInline(recognizer IParser) IToken {
// SINGLE TOKEN DELETION
var MatchedSymbol = this.singleTokenDeletion(recognizer)
if MatchedSymbol != nil {
@@ -473,7 +489,7 @@ func (this *DefaultErrorStrategy) singleTokenInsertion(recognizer IParser) bool
// deletion successfully recovers from the misMatched input, otherwise
// {@code nil}
//
-func (this *DefaultErrorStrategy) singleTokenDeletion(recognizer IParser) *Token {
+func (this *DefaultErrorStrategy) singleTokenDeletion(recognizer IParser) IToken {
var nextTokenType = recognizer.GetTokenStream().LA(2)
var expecting = this.getExpectedTokens(recognizer)
if expecting.contains(nextTokenType) {
@@ -511,7 +527,7 @@ func (this *DefaultErrorStrategy) singleTokenDeletion(recognizer IParser) *Token
// If you change what tokens must be created by the lexer,
// override this method to create the appropriate tokens.
//
-func (this *DefaultErrorStrategy) getMissingSymbol(recognizer IParser) *Token {
+func (this *DefaultErrorStrategy) getMissingSymbol(recognizer IParser) IToken {
var currentSymbol = recognizer.getCurrentToken()
var expecting = this.getExpectedTokens(recognizer)
var expectedTokenType = expecting.first()
@@ -523,12 +539,12 @@ func (this *DefaultErrorStrategy) getMissingSymbol(recognizer IParser) *Token {
}
var current = currentSymbol
var lookback = recognizer.GetTokenStream().LT(-1)
- if current.tokenType == TokenEOF && lookback != nil {
+ if current.GetTokenType() == TokenEOF && lookback != nil {
current = lookback
}
tf := recognizer.GetTokenFactory()
- return tf.Create(current.source, expectedTokenType, tokenText, TokenDefaultChannel, -1, -1, current.line, current.column)
+ return tf.Create( current.GetSource(), expectedTokenType, tokenText, TokenDefaultChannel, -1, -1, current.GetLine(), current.GetColumn())
}
func (this *DefaultErrorStrategy) getExpectedTokens(recognizer IParser) *IntervalSet {
@@ -543,16 +559,16 @@ func (this *DefaultErrorStrategy) getExpectedTokens(recognizer IParser) *Interva
// your token objects because you don't have to go modify your lexer
// so that it creates a NewJava type.
//
-func (this *DefaultErrorStrategy) GetTokenErrorDisplay(t *Token) string {
+func (this *DefaultErrorStrategy) GetTokenErrorDisplay(t IToken) string {
if t == nil {
return ""
}
- var s = t.text()
+ var s = t.GetText()
if s == "" {
- if t.tokenType == TokenEOF {
+ if t.GetTokenType() == TokenEOF {
s = ""
} else {
- s = "<" + strconv.Itoa(t.tokenType) + ">"
+ s = "<" + strconv.Itoa(t.GetTokenType()) + ">"
}
}
return this.escapeWSAndQuote(s)
diff --git a/runtime/Go/src/antlr4/Errors.go b/runtime/Go/src/antlr4/Errors.go
index 8247ad01b..009bd6adf 100644
--- a/runtime/Go/src/antlr4/Errors.go
+++ b/runtime/Go/src/antlr4/Errors.go
@@ -9,7 +9,7 @@ import ()
// and what kind of problem occurred.
type IRecognitionException interface {
- GetOffendingToken() *Token
+ GetOffendingToken() IToken
GetMessage() string
GetInputStream() IntStream
}
@@ -17,7 +17,7 @@ type IRecognitionException interface {
type RecognitionException struct {
message string
recognizer IRecognizer
- offendingToken *Token
+ offendingToken IToken
offendingState int
ctx IRuleContext
input IntStream
@@ -62,7 +62,7 @@ func (this *RecognitionException) GetMessage() string {
return this.message
}
-func (this *RecognitionException) GetOffendingToken() *Token {
+func (this *RecognitionException) GetOffendingToken() IToken {
return this.offendingToken
}
@@ -124,8 +124,8 @@ func (this *LexerNoViableAltException) String() string {
type NoViableAltException struct {
*RecognitionException
- startToken *Token
- offendingToken *Token
+ startToken IToken
+ offendingToken IToken
ctx IParserRuleContext
deadEndConfigs *ATNConfigSet
}
@@ -135,7 +135,7 @@ type NoViableAltException struct {
// of the offending input and also knows where the parser was
// in the various paths when the error. Reported by ReportNoViableAlternative()
//
-func NewNoViableAltException(recognizer IParser, input TokenStream, startToken *Token, offendingToken *Token, deadEndConfigs *ATNConfigSet, ctx IParserRuleContext) *NoViableAltException {
+func NewNoViableAltException(recognizer IParser, input TokenStream, startToken IToken, offendingToken IToken, deadEndConfigs *ATNConfigSet, ctx IParserRuleContext) *NoViableAltException {
if ctx == nil {
ctx = recognizer.GetParserRuleContext()
diff --git a/runtime/Go/src/antlr4/InputStream.go b/runtime/Go/src/antlr4/InputStream.go
index e39aea0e3..c0bf179b7 100644
--- a/runtime/Go/src/antlr4/InputStream.go
+++ b/runtime/Go/src/antlr4/InputStream.go
@@ -32,6 +32,7 @@ func (is *InputStream) Consume() {
}
func (is *InputStream) LA(offset int) int {
+
if offset == 0 {
return 0 // nil
}
@@ -39,9 +40,11 @@ func (is *InputStream) LA(offset int) int {
offset += 1 // e.g., translate LA(-1) to use offset=0
}
var pos = is.index + offset - 1
+
if pos < 0 || pos >= is.size { // invalid
return TokenEOF
}
+
return int(is.data[pos])
}
diff --git a/runtime/Go/src/antlr4/IntervalSet.go b/runtime/Go/src/antlr4/IntervalSet.go
index 13c0a534f..5b30a2a21 100644
--- a/runtime/Go/src/antlr4/IntervalSet.go
+++ b/runtime/Go/src/antlr4/IntervalSet.go
@@ -3,6 +3,7 @@ package antlr4
import (
"strconv"
"strings"
+ "fmt"
)
type Interval struct {
@@ -67,6 +68,7 @@ func (i *IntervalSet) addRange(l, h int) {
}
func (is *IntervalSet) addInterval(v *Interval) {
+ fmt.Println("addInterval" + v.String())
if is.intervals == nil {
is.intervals = make([]*Interval, 0)
is.intervals = append(is.intervals, v)
@@ -94,7 +96,9 @@ func (is *IntervalSet) addInterval(v *Interval) {
}
func (i *IntervalSet) addSet(other *IntervalSet) *IntervalSet {
+ fmt.Println("addSet")
if other.intervals != nil {
+ fmt.Println(len(other.intervals))
for k := 0; k < len(other.intervals); k++ {
var i2 = other.intervals[k]
i.addInterval(NewInterval(i2.start, i2.stop))
@@ -249,6 +253,7 @@ func (is *IntervalSet) toCharString() string {
}
func (is *IntervalSet) toIndexString() string {
+
var names = make([]string, 0)
for i := 0; i < len(is.intervals); i++ {
var v = is.intervals[i]
@@ -256,10 +261,10 @@ func (is *IntervalSet) toIndexString() string {
if v.start == TokenEOF {
names = append(names, "")
} else {
- names = append(names, string(v.start))
+ names = append(names, strconv.Itoa(v.start))
}
} else {
- names = append(names, string(v.start)+".."+string(v.stop-1))
+ names = append(names, strconv.Itoa(v.start)+".."+strconv.Itoa(v.stop-1))
}
}
if len(names) > 1 {
diff --git a/runtime/Go/src/antlr4/LL1Analyzer.go b/runtime/Go/src/antlr4/LL1Analyzer.go
index 430ec2d47..588ef88e4 100644
--- a/runtime/Go/src/antlr4/LL1Analyzer.go
+++ b/runtime/Go/src/antlr4/LL1Analyzer.go
@@ -1,6 +1,8 @@
package antlr4
-import ()
+import (
+ "fmt"
+)
type LL1Analyzer struct {
atn *ATN
@@ -71,9 +73,18 @@ func (la *LL1Analyzer) LOOK(s, stopState IATNState, ctx IRuleContext) *IntervalS
var seeThruPreds = true // ignore preds get all lookahead
var lookContext IPredictionContext
if ctx != nil {
- predictionContextFromRuleContext(s.GetATN(), ctx)
+ lookContext = predictionContextFromRuleContext(s.GetATN(), ctx)
}
+ fmt.Println("DEBUG 5")
+// fmt.Println("DEBUG" + lookContext.String())
+ fmt.Println(s)
+ fmt.Println(stopState)
+ fmt.Println(lookContext)
+ fmt.Println(r)
+ fmt.Println(seeThruPreds)
+ fmt.Println("=====")
la._LOOK(s, stopState, lookContext, r, NewSet(nil, nil), NewBitSet(), seeThruPreds, true)
+ fmt.Println(r)
return r
}
@@ -107,15 +118,36 @@ func (la *LL1Analyzer) LOOK(s, stopState IATNState, ctx IRuleContext) *IntervalS
// outermost context is reached. This parameter has no effect if {@code ctx}
// is {@code nil}.
+
+func (la *LL1Analyzer) __LOOK(s, stopState IATNState, ctx IPredictionContext, look *IntervalSet, lookBusy *Set, calledRuleStack *BitSet, seeThruPreds, addEOF bool, i int){
+
+ returnState := la.atn.states[ctx.getReturnState(i)]
+
+ removed := calledRuleStack.contains(returnState.GetRuleIndex())
+
+ defer func() {
+ if removed {
+ calledRuleStack.add(returnState.GetRuleIndex())
+ }
+ }()
+
+ calledRuleStack.remove(returnState.GetRuleIndex())
+ la._LOOK(returnState, stopState, ctx.GetParent(i), look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
+
+}
+
func (la *LL1Analyzer) _LOOK(s, stopState IATNState, ctx IPredictionContext, look *IntervalSet, lookBusy *Set, calledRuleStack *BitSet, seeThruPreds, addEOF bool) {
c := NewATNConfig6(s, 0, ctx)
- if lookBusy.add(c) == nil {
+ if lookBusy.contains(c) {
return
}
+ lookBusy.add(c)
+
if s == stopState {
+ fmt.Println("DEBUG 6")
if ctx == nil {
look.addOne(TokenEpsilon)
return
@@ -137,24 +169,13 @@ func (la *LL1Analyzer) _LOOK(s, stopState IATNState, ctx IPredictionContext, loo
}
if ctx != PredictionContextEMPTY {
+ fmt.Println("DEBUG 7")
// run thru all possible stack tops in ctx
for i := 0; i < ctx.length(); i++ {
returnState := la.atn.states[ctx.getReturnState(i)]
- // System.out.println("popping back to "+retState)
-
- removed := calledRuleStack.contains(returnState.GetRuleIndex())
-
- // TODO this is incorrect
- defer func() {
- if removed {
- calledRuleStack.add(returnState.GetRuleIndex())
- }
- }()
-
- calledRuleStack.clear(returnState.GetRuleIndex())
- la._LOOK(returnState, stopState, ctx.GetParent(i), look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
+ la.__LOOK(returnState, stopState, ctx.GetParent(i), look, lookBusy, calledRuleStack, seeThruPreds, addEOF, i)
}
return
@@ -167,6 +188,7 @@ func (la *LL1Analyzer) _LOOK(s, stopState IATNState, ctx IPredictionContext, loo
t := s.GetTransitions()[i]
if t1, ok := t.(*RuleTransition); ok {
+ fmt.Println("DEBUG 8")
if calledRuleStack.contains(t1.getTarget().GetRuleIndex()) {
continue
@@ -174,24 +196,34 @@ func (la *LL1Analyzer) _LOOK(s, stopState IATNState, ctx IPredictionContext, loo
newContext := SingletonPredictionContextCreate(ctx, t1.followState.GetStateNumber())
- defer func() {
- calledRuleStack.remove(t1.getTarget().GetRuleIndex())
- }()
+ la.___LOOK(stopState, newContext, look, lookBusy, calledRuleStack, seeThruPreds, addEOF, t1)
- calledRuleStack.add(t1.getTarget().GetRuleIndex())
- la._LOOK(t.getTarget(), stopState, newContext, look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
- } else if t2, ok := t.(*AbstractPredicateTransition); ok {
+ fmt.Println(look)
+//
+// defer func() {
+// calledRuleStack.remove(t1.getTarget().GetRuleIndex())
+// }()
+//
+// calledRuleStack.add(t1.getTarget().GetRuleIndex())
+// la._LOOK(t1.getTarget(), stopState, newContext, look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
+
+ } else if t2, ok := t.(IAbstractPredicateTransition); ok {
+ fmt.Println("DEBUG 9")
if seeThruPreds {
la._LOOK(t2.getTarget(), stopState, ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
} else {
look.addOne(LL1AnalyzerHIT_PRED)
}
} else if t.getIsEpsilon() {
+ fmt.Println("DEBUG 10")
la._LOOK(t.getTarget(), stopState, ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
} else if _, ok := t.(*WildcardTransition); ok {
+ fmt.Println("DEBUG 11")
look.addRange(TokenMinUserTokenType, la.atn.maxTokenType)
} else {
+ fmt.Println("DEBUG 12")
set := t.getLabel()
+ fmt.Println(set)
if set != nil {
if _, ok := t.(*NotSetTransition); ok {
set = set.complement(TokenMinUserTokenType, la.atn.maxTokenType)
@@ -201,3 +233,17 @@ func (la *LL1Analyzer) _LOOK(s, stopState IATNState, ctx IPredictionContext, loo
}
}
}
+
+func (la *LL1Analyzer) ___LOOK(stopState IATNState, ctx IPredictionContext, look *IntervalSet, lookBusy *Set, calledRuleStack *BitSet, seeThruPreds, addEOF bool, t1 *RuleTransition) {
+
+ newContext := SingletonPredictionContextCreate(ctx, t1.followState.GetStateNumber())
+
+ defer func() {
+ calledRuleStack.remove(t1.getTarget().GetRuleIndex())
+ }()
+
+ calledRuleStack.add(t1.getTarget().GetRuleIndex())
+ la._LOOK(t1.getTarget(), stopState, newContext, look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
+
+
+}
diff --git a/runtime/Go/src/antlr4/Lexer.go b/runtime/Go/src/antlr4/Lexer.go
index 57c8eb17e..17e66cccc 100644
--- a/runtime/Go/src/antlr4/Lexer.go
+++ b/runtime/Go/src/antlr4/Lexer.go
@@ -30,7 +30,7 @@ type Lexer struct {
_input CharStream
_factory TokenFactory
_tokenFactorySourcePair *TokenSourceCharStreamPair
- _token *Token
+ _token IToken
_tokenStartCharIndex int
_tokenStartLine int
_tokenStartColumn int
@@ -166,12 +166,11 @@ func (l *Lexer) safeMatch() (ret int) {
}
// Return a token from l source i.e., Match a token on the char stream.
-func (l *Lexer) nextToken() *Token {
+func (l *Lexer) nextToken() IToken {
if l._input == nil {
panic("nextToken requires a non-nil input stream.")
}
- // do this when done consuming
var tokenStartMarker = l._input.Mark()
// previously in finally block
@@ -244,7 +243,7 @@ func (l *Lexer) mode(m int) {
}
func (l *Lexer) pushMode(m int) {
- if LexerATNSimulatordebug {
+ if LexerATNSimulatorDebug {
fmt.Println("pushMode " + strconv.Itoa(m))
}
l._modeStack.Push(l._mode)
@@ -255,7 +254,7 @@ func (l *Lexer) popMode() int {
if len(l._modeStack) == 0 {
panic("Empty Stack")
}
- if LexerATNSimulatordebug {
+ if LexerATNSimulatorDebug {
fmt.Println("popMode back to " + fmt.Sprint(l._modeStack[0:len(l._modeStack)-1]))
}
i, _ := l._modeStack.Pop()
@@ -280,7 +279,7 @@ func (l *Lexer) setInputStream(input CharStream) {
// and GetToken (to push tokens into a list and pull from that list
// rather than a single variable as l implementation does).
// /
-func (l *Lexer) emitToken(token *Token) {
+func (l *Lexer) emitToken(token IToken) {
l._token = token
}
@@ -290,13 +289,13 @@ func (l *Lexer) emitToken(token *Token) {
// use that to set the token's text. Override l method to emit
// custom Token objects or provide a Newfactory.
// /
-func (l *Lexer) emit() *Token {
+func (l *Lexer) emit() IToken {
var t = l._factory.Create(l._tokenFactorySourcePair, l._type, l._text, l._channel, l._tokenStartCharIndex, l.getCharIndex()-1, l._tokenStartLine, l._tokenStartColumn)
l.emitToken(t)
return t
}
-func (l *Lexer) emitEOF() *Token {
+func (l *Lexer) emitEOF() IToken {
cpos := l.getCharPositionInLine()
lpos := l.getLine()
var eof = l._factory.Create(l._tokenFactorySourcePair, TokenEOF, "", TokenDefaultChannel, l._input.Index(), l._input.Index()-1, lpos, cpos)
@@ -346,11 +345,13 @@ func (this *Lexer) GetATN() *ATN {
// Return a list of all Token objects in input char stream.
// Forces load of all tokens. Does not include EOF token.
// /
-func (l *Lexer) getAllTokens() []*Token {
- var tokens = make([]*Token, 0)
+func (l *Lexer) getAllTokens() []IToken {
+ fmt.Println("getAllTokens")
+ var tokens = make([]IToken, 0)
var t = l.nextToken()
- for t.tokenType != TokenEOF {
+ for t.GetTokenType() != TokenEOF {
tokens = append(tokens, t)
+ fmt.Println("getAllTokens")
t = l.nextToken()
}
return tokens
diff --git a/runtime/Go/src/antlr4/LexerATNSimulator.go b/runtime/Go/src/antlr4/LexerATNSimulator.go
index 8b5d1def9..e2937c53c 100644
--- a/runtime/Go/src/antlr4/LexerATNSimulator.go
+++ b/runtime/Go/src/antlr4/LexerATNSimulator.go
@@ -88,8 +88,8 @@ func NewLexerATNSimulator(recog ILexer, atn *ATN, decisionToDFA []*DFA, sharedCo
return this
}
-var LexerATNSimulatordebug = false
-var LexerATNSimulatordfa_debug = false
+var LexerATNSimulatorDebug = true
+var LexerATNSimulatorDFADebug = false
var LexerATNSimulatorMIN_DFA_EDGE = 0
var LexerATNSimulatorMAX_DFA_EDGE = 127 // forces unicode to stay in ATN
@@ -105,6 +105,9 @@ func (this *LexerATNSimulator) copyState(simulator *LexerATNSimulator) {
func (this *LexerATNSimulator) Match(input CharStream, mode int) int {
+ fmt.Println("Match")
+
+
this.Match_calls += 1
this.mode = mode
var mark = input.Mark()
@@ -134,7 +137,7 @@ func (this *LexerATNSimulator) reset() {
func (this *LexerATNSimulator) MatchATN(input CharStream) int {
var startState = this.atn.modeToStartState[this.mode]
- if LexerATNSimulatordebug {
+ if LexerATNSimulatorDebug {
fmt.Println("MatchATN mode " + strconv.Itoa(this.mode) + " start: " + startState.String())
}
var old_mode = this.mode
@@ -150,14 +153,23 @@ func (this *LexerATNSimulator) MatchATN(input CharStream) int {
var predict = this.execATN(input, next)
- if LexerATNSimulatordebug {
+ if LexerATNSimulatorDebug {
fmt.Println("DFA after MatchATN: " + this.decisionToDFA[old_mode].toLexerString())
}
return predict
}
+var countA = 0
+
func (this *LexerATNSimulator) execATN(input CharStream, ds0 *DFAState) int {
- if LexerATNSimulatordebug {
+
+ countA += 1
+
+ if (countA == 2) {
+ panic("GAH")
+ }
+
+ if LexerATNSimulatorDebug {
fmt.Println("start state closure=" + ds0.configs.String())
}
if ds0.isAcceptState {
@@ -168,7 +180,7 @@ func (this *LexerATNSimulator) execATN(input CharStream, ds0 *DFAState) int {
var s = ds0 // s is current/from DFA state
for true { // while more work
- if LexerATNSimulatordebug {
+ if LexerATNSimulatorDebug {
fmt.Println("execATN loop starting closure: " + s.configs.String())
}
@@ -215,6 +227,8 @@ func (this *LexerATNSimulator) execATN(input CharStream, ds0 *DFAState) int {
t = input.LA(1)
s = target // flip current DFA target becomes Newsrc/from state
}
+
+ fmt.Println("OUT")
return this.failOrAccept(this.prevAccept, input, s.configs, t)
}
@@ -236,7 +250,7 @@ func (this *LexerATNSimulator) getExistingTargetState(s *DFAState, t int) *DFASt
if target == nil {
target = nil
}
- if LexerATNSimulatordebug && target != nil {
+ if LexerATNSimulatorDebug && target != nil {
fmt.Println("reuse state " + strconv.Itoa(s.stateNumber) + " edge to " + strconv.Itoa(target.stateNumber))
}
return target
@@ -274,8 +288,9 @@ func (this *LexerATNSimulator) computeTargetState(input CharStream, s *DFAState,
func (this *LexerATNSimulator) failOrAccept(prevAccept *SimState, input CharStream, reach *ATNConfigSet, t int) int {
if this.prevAccept.dfaState != nil {
var lexerActionExecutor = prevAccept.dfaState.lexerActionExecutor
- this.accept(input, lexerActionExecutor, this.startIndex,
- prevAccept.index, prevAccept.line, prevAccept.column)
+ this.accept(input, lexerActionExecutor, this.startIndex, prevAccept.index, prevAccept.line, prevAccept.column)
+
+ fmt.Println(prevAccept.dfaState.prediction)
return prevAccept.dfaState.prediction
} else {
// if no accept and EOF is first char, return EOF
@@ -299,7 +314,7 @@ func (this *LexerATNSimulator) getReachableConfigSet(input CharStream, closure *
if currentAltReachedAcceptState && cfg.(*LexerATNConfig).passedThroughNonGreedyDecision {
continue
}
- if LexerATNSimulatordebug {
+ if LexerATNSimulatorDebug {
fmt.Printf("testing %s at %s\n", this.GetTokenName(t), cfg.String()) // this.recog, true))
}
for j := 0; j < len(cfg.GetState().GetTransitions()); j++ {
@@ -324,8 +339,8 @@ func (this *LexerATNSimulator) getReachableConfigSet(input CharStream, closure *
}
func (this *LexerATNSimulator) accept(input CharStream, lexerActionExecutor *LexerActionExecutor, startIndex, index, line, charPos int) {
- if LexerATNSimulatordebug {
- fmt.Println("ACTION %s\n", lexerActionExecutor)
+ if LexerATNSimulatorDebug {
+ fmt.Printf("ACTION %s\n", lexerActionExecutor)
}
// seek to after last char in token
input.Seek(index)
@@ -346,12 +361,17 @@ func (this *LexerATNSimulator) getReachableTarget(trans ITransition, t int) IATN
func (this *LexerATNSimulator) computeStartState(input CharStream, p IATNState) *OrderedATNConfigSet {
+ fmt.Println("DEBUG" + strconv.Itoa(len(p.GetTransitions())))
+
var configs = NewOrderedATNConfigSet()
for i := 0; i < len(p.GetTransitions()); i++ {
var target = p.GetTransitions()[i].getTarget()
var cfg = NewLexerATNConfig6(target, i+1, PredictionContextEMPTY)
this.closure(input, cfg, configs.ATNConfigSet, false, false, false)
}
+
+ fmt.Println("DEBUG" + configs.String())
+
return configs
}
@@ -366,19 +386,21 @@ func (this *LexerATNSimulator) computeStartState(input CharStream, p IATNState)
func (this *LexerATNSimulator) closure(input CharStream, config *LexerATNConfig, configs *ATNConfigSet,
currentAltReachedAcceptState, speculative, treatEofAsEpsilon bool) bool {
- if LexerATNSimulatordebug {
+ if LexerATNSimulatorDebug {
fmt.Println("closure(" + config.String() + ")") // config.String(this.recog, true) + ")")
}
_, ok := config.state.(*RuleStopState)
if ok {
- if LexerATNSimulatordebug {
+
+ if LexerATNSimulatorDebug {
if this.recog != nil {
- fmt.Println("closure at %s rule stop %s\n", this.recog.GetRuleNames()[config.state.GetRuleIndex()], config)
+ fmt.Printf("closure at %s rule stop %s\n", this.recog.GetRuleNames()[config.state.GetRuleIndex()], config)
} else {
- fmt.Println("closure at rule stop %s\n", config)
+ fmt.Printf("closure at rule stop %s\n", config)
}
}
+
if config.context == nil || config.context.hasEmptyPath() {
if config.context == nil || config.context.isEmpty() {
configs.add(config, nil)
@@ -452,7 +474,7 @@ func (this *LexerATNSimulator) getEpsilonTarget(input CharStream, config *LexerA
pt := trans.(*PredicateTransition)
- if LexerATNSimulatordebug {
+ if LexerATNSimulatorDebug {
fmt.Println("EVAL rule " + strconv.Itoa(trans.(*PredicateTransition).ruleIndex) + ":" + strconv.Itoa(pt.predIndex))
}
configs.hasSemanticContext = true
@@ -571,7 +593,7 @@ func (this *LexerATNSimulator) addDFAEdge(from_ *DFAState, tk int, to *DFAState,
// Only track edges within the DFA bounds
return to
}
- if LexerATNSimulatordebug {
+ if LexerATNSimulatorDebug {
fmt.Println("EDGE " + from_.String() + " -> " + to.String() + " upon " + strconv.Itoa(tk))
}
if from_.edges == nil {
@@ -643,6 +665,7 @@ func (this *LexerATNSimulator) consume(input CharStream) {
}
func (this *LexerATNSimulator) GetTokenName(tt int) string {
+ fmt.Println(tt)
if tt == -1 {
return "EOF"
} else {
diff --git a/runtime/Go/src/antlr4/Parser.go b/runtime/Go/src/antlr4/Parser.go
index 22bfdaaa6..54549c0de 100644
--- a/runtime/Go/src/antlr4/Parser.go
+++ b/runtime/Go/src/antlr4/Parser.go
@@ -1,4 +1,5 @@
package antlr4
+import "fmt"
type IParser interface {
IRecognizer
@@ -8,13 +9,13 @@ type IParser interface {
GetTokenStream() TokenStream
GetTokenFactory() TokenFactory
GetParserRuleContext() IParserRuleContext
- Consume() *Token
+ Consume() IToken
GetParseListeners() []ParseTreeListener
GetInputStream() IntStream
- getCurrentToken() *Token
+ getCurrentToken() IToken
getExpectedTokens() *IntervalSet
- NotifyErrorListeners(msg string, offendingToken *Token, err IRecognitionException)
+ NotifyErrorListeners(msg string, offendingToken IToken, err IRecognitionException)
isExpectedToken(symbol int) bool
getPrecedence() int
getRuleInvocationStack(IParserRuleContext) []string
@@ -23,17 +24,17 @@ type IParser interface {
type Parser struct {
*Recognizer
- Interpreter *ParserATNSimulator
+ Interpreter *ParserATNSimulator
+ BuildParseTrees bool
_input TokenStream
_errHandler IErrorStrategy
_precedenceStack IntStack
_ctx IParserRuleContext
- buildParseTrees bool
- _tracer *TraceListener
- _parseListeners []ParseTreeListener
- _SyntaxErrors int
+ _tracer *TraceListener
+ _parseListeners []ParseTreeListener
+ _SyntaxErrors int
}
// p.is all the parsing support code essentially most of it is error
@@ -42,7 +43,6 @@ func NewParser(input TokenStream) *Parser {
p := new(Parser)
-
p.Recognizer = NewRecognizer()
// The input stream.
@@ -57,7 +57,7 @@ func NewParser(input TokenStream) *Parser {
p._ctx = nil
// Specifies whether or not the parser should construct a parse tree during
// the parsing process. The default value is {@code true}.
- p.buildParseTrees = true
+ p.BuildParseTrees = true
// When {@link //setTrace}{@code (true)} is called, a reference to the
// {@link TraceListener} is stored here so it can be easily removed in a
// later call to {@link //setTrace}{@code (false)}. The listener itself is
@@ -124,14 +124,17 @@ func (p *Parser) GetParseListeners() []ParseTreeListener {
// {@code ttype} and the error strategy could not recover from the
// misMatched symbol
-func (p *Parser) Match(ttype int) *Token {
+func (p *Parser) Match(ttype int) IToken {
var t = p.getCurrentToken()
- if t.tokenType == ttype {
+
+ fmt.Println("TOKEN IS " + t.GetText())
+
+ if t.GetTokenType() == ttype {
p._errHandler.ReportMatch(p)
p.Consume()
} else {
t = p._errHandler.RecoverInline(p)
- if p.buildParseTrees && t.tokenIndex == -1 {
+ if p.BuildParseTrees && t.GetTokenIndex() == -1 {
// we must have conjured up a Newtoken during single token
// insertion
// if it's not the current symbol
@@ -157,14 +160,14 @@ func (p *Parser) Match(ttype int) *Token {
// a wildcard and the error strategy could not recover from the misMatched
// symbol
-func (p *Parser) MatchWildcard() *Token {
+func (p *Parser) MatchWildcard() IToken {
var t = p.getCurrentToken()
- if t.tokenType > 0 {
+ if t.GetTokenType() > 0 {
p._errHandler.ReportMatch(p)
p.Consume()
} else {
t = p._errHandler.RecoverInline(p)
- if p.buildParseTrees && t.tokenIndex == -1 {
+ if p.BuildParseTrees && t.GetTokenIndex() == -1 {
// we must have conjured up a Newtoken during single token
// insertion
// if it's not the current symbol
@@ -232,24 +235,24 @@ func (p *Parser) addParseListener(listener ParseTreeListener) {
//
func (p *Parser) removeParseListener(listener ParseTreeListener) {
- if (p._parseListeners != nil) {
+ if p._parseListeners != nil {
idx := -1
- for i,v := range p._parseListeners {
+ for i, v := range p._parseListeners {
if v == listener {
idx = i
- break;
+ break
}
}
- if (idx == -1){
+ if idx == -1 {
return
}
// remove the listener from the slice
- p._parseListeners = append( p._parseListeners[0:idx], p._parseListeners[idx+1:]... )
+ p._parseListeners = append(p._parseListeners[0:idx], p._parseListeners[idx+1:]...)
- if (len(p._parseListeners) == 0) {
+ if len(p._parseListeners) == 0 {
p._parseListeners = nil
}
}
@@ -385,28 +388,28 @@ func (p *Parser) setTokenStream(input TokenStream) {
// Match needs to return the current input symbol, which gets put
// into the label for the associated token ref e.g., x=ID.
//
-func (p *Parser) getCurrentToken() *Token {
+func (p *Parser) getCurrentToken() IToken {
return p._input.LT(1)
}
-func (p *Parser) NotifyErrorListeners(msg string, offendingToken *Token, err IRecognitionException) {
+func (p *Parser) NotifyErrorListeners(msg string, offendingToken IToken, err IRecognitionException) {
if offendingToken == nil {
offendingToken = p.getCurrentToken()
}
p._SyntaxErrors += 1
- var line = offendingToken.line
- var column = offendingToken.column
+ var line = offendingToken.GetLine()
+ var column = offendingToken.GetColumn()
listener := p.getErrorListenerDispatch()
listener.SyntaxError(p, offendingToken, line, column, msg, err)
}
-func (p *Parser) Consume() *Token {
+func (p *Parser) Consume() IToken {
var o = p.getCurrentToken()
- if o.tokenType != TokenEOF {
+ if o.GetTokenType() != TokenEOF {
p.GetInputStream().Consume()
}
var hasListener = p._parseListeners != nil && len(p._parseListeners) > 0
- if p.buildParseTrees || hasListener {
+ if p.BuildParseTrees || hasListener {
if p._errHandler.inErrorRecoveryMode(p) {
var node = p._ctx.addErrorNode(o)
if p._parseListeners != nil {
@@ -437,10 +440,10 @@ func (p *Parser) addContextToParseTree() {
}
func (p *Parser) EnterRule(localctx IParserRuleContext, state, ruleIndex int) {
- p.state = state
+ p.SetState(state)
p._ctx = localctx
p._ctx.setStart(p._input.LT(1))
- if p.buildParseTrees {
+ if p.BuildParseTrees {
p.addContextToParseTree()
}
if p._parseListeners != nil {
@@ -454,8 +457,8 @@ func (p *Parser) ExitRule() {
if p._parseListeners != nil {
p.TriggerExitRuleEvent()
}
- p.state = p._ctx.getInvokingState()
- if (p._ctx.GetParent() != nil){
+ p.SetState(p._ctx.getInvokingState())
+ if p._ctx.GetParent() != nil {
p._ctx = p._ctx.GetParent().(IParserRuleContext)
} else {
p._ctx = nil
@@ -465,7 +468,7 @@ func (p *Parser) ExitRule() {
func (p *Parser) EnterOuterAlt(localctx IParserRuleContext, altNum int) {
// if we have Newlocalctx, make sure we replace existing ctx
// that is previous child of parse tree
- if p.buildParseTrees && p._ctx != localctx {
+ if p.BuildParseTrees && p._ctx != localctx {
if p._ctx.GetParent() != nil {
p._ctx.GetParent().(IParserRuleContext).removeLastChild()
p._ctx.GetParent().(IParserRuleContext).addChild(localctx)
@@ -488,7 +491,7 @@ func (p *Parser) getPrecedence() int {
}
func (p *Parser) EnterRecursionRule(localctx IParserRuleContext, state, ruleIndex, precedence int) {
- p.state = state
+ p.SetState(state)
p._precedenceStack.Push(precedence)
p._ctx = localctx
p._ctx.setStart(p._input.LT(1))
@@ -509,7 +512,7 @@ func (p *Parser) PushNewRecursionContext(localctx IParserRuleContext, state, rul
p._ctx = localctx
p._ctx.setStart(previous.getStart())
- if p.buildParseTrees {
+ if p.BuildParseTrees {
p._ctx.addChild(previous)
}
if p._parseListeners != nil {
@@ -533,7 +536,7 @@ func (p *Parser) UnrollRecursionContexts(parentCtx IParserRuleContext) {
}
// hook into tree
retCtx.setParent(parentCtx)
- if p.buildParseTrees && parentCtx != nil {
+ if p.BuildParseTrees && parentCtx != nil {
// add return ctx into invoking rule's tree
parentCtx.addChild(retCtx)
}
diff --git a/runtime/Go/src/antlr4/ParserATNSimulator.go b/runtime/Go/src/antlr4/ParserATNSimulator.go
index 8c67c5f44..f3bf3dfad 100644
--- a/runtime/Go/src/antlr4/ParserATNSimulator.go
+++ b/runtime/Go/src/antlr4/ParserATNSimulator.go
@@ -47,7 +47,7 @@ func NewParserATNSimulator(parser IParser, atn *ATN, decisionToDFA []*DFA, share
return this
}
-var ParserATNSimulatorDebug = false
+var ParserATNSimulatorDebug = true
var ParserATNSimulatorListATNDecisions = false
var ParserATNSimulatorDFADebug = false
var ParserATNSimulatorRetryDebug = false
@@ -57,11 +57,14 @@ func (this *ParserATNSimulator) reset() {
func (this *ParserATNSimulator) AdaptivePredict(input TokenStream, decision int, outerContext IParserRuleContext) int {
+ fmt.Println("Adaptive preduct")
+
if ParserATNSimulatorDebug || ParserATNSimulatorListATNDecisions {
+
fmt.Println("AdaptivePredict decision " + strconv.Itoa(decision) +
" exec LA(1)==" + this.getLookaheadName(input) +
- " line " + strconv.Itoa(input.LT(1).line) + ":" +
- strconv.Itoa(input.LT(1).column))
+ " line " + strconv.Itoa(input.LT(1).GetLine()) + ":" +
+ strconv.Itoa(input.LT(1).GetColumn()))
}
this._input = input
@@ -174,7 +177,7 @@ func (this *ParserATNSimulator) execATN(dfa *DFA, s0 *DFAState, input TokenStrea
if ParserATNSimulatorDebug || ParserATNSimulatorListATNDecisions {
fmt.Println("execATN decision " + strconv.Itoa(dfa.decision) +
" exec LA(1)==" + this.getLookaheadName(input) +
- " line " + strconv.Itoa(input.LT(1).line) + ":" + strconv.Itoa(input.LT(1).column))
+ " line " + strconv.Itoa(input.LT(1).GetLine()) + ":" + strconv.Itoa(input.LT(1).GetColumn()))
}
var previousD = s0
@@ -1278,18 +1281,22 @@ func (this *ParserATNSimulator) getConflictingAltsOrUniqueAlt(configs *ATNConfig
func (this *ParserATNSimulator) GetTokenName(t int) string {
+ fmt.Println("Get token name")
+
if t == TokenEOF {
return "EOF"
}
+
if this.parser != nil && this.parser.GetLiteralNames() != nil {
if t >= len(this.parser.GetLiteralNames()) {
fmt.Println(strconv.Itoa(t) + " ttype out of range: " + strings.Join(this.parser.GetLiteralNames(), ","))
- fmt.Println(this.parser.GetInputStream().(TokenStream).GetAllText())
+// fmt.Println(this.parser.GetInputStream().(TokenStream).GetAllText()) // this seems incorrect
} else {
return this.parser.GetLiteralNames()[t] + "<" + strconv.Itoa(t) + ">"
}
}
- return "" + strconv.Itoa(t)
+
+ return strconv.Itoa(t)
}
func (this *ParserATNSimulator) getLookaheadName(input TokenStream) string {
diff --git a/runtime/Go/src/antlr4/ParserRuleContext.go b/runtime/Go/src/antlr4/ParserRuleContext.go
index 131cfaed2..c3f846f22 100644
--- a/runtime/Go/src/antlr4/ParserRuleContext.go
+++ b/runtime/Go/src/antlr4/ParserRuleContext.go
@@ -8,16 +8,16 @@ type IParserRuleContext interface {
IRuleContext
SetException(IRecognitionException)
- addTokenNode(token *Token) *TerminalNodeImpl
- addErrorNode(badToken *Token) *ErrorNodeImpl
+ addTokenNode(token IToken) *TerminalNodeImpl
+ addErrorNode(badToken IToken) *ErrorNodeImpl
EnterRule(listener ParseTreeListener)
ExitRule(listener ParseTreeListener)
- setStart(*Token)
- getStart() *Token
+ setStart(IToken)
+ getStart() IToken
- setStop(*Token)
- getStop() *Token
+ setStop(IToken)
+ getStop() IToken
addChild(child IRuleContext) IRuleContext
removeLastChild()
@@ -27,7 +27,7 @@ type ParserRuleContext struct {
*RuleContext
children []ParseTree
- start, stop *Token
+ start, stop IToken
exception IRecognitionException
}
@@ -117,7 +117,7 @@ func (prc *ParserRuleContext) removeLastChild() {
}
}
-func (prc *ParserRuleContext) addTokenNode(token *Token) *TerminalNodeImpl {
+func (prc *ParserRuleContext) addTokenNode(token IToken) *TerminalNodeImpl {
var node = NewTerminalNodeImpl(token)
prc.addTerminalNodeChild(node)
@@ -126,7 +126,7 @@ func (prc *ParserRuleContext) addTokenNode(token *Token) *TerminalNodeImpl {
}
-func (prc *ParserRuleContext) addErrorNode(badToken *Token) *ErrorNodeImpl {
+func (prc *ParserRuleContext) addErrorNode(badToken IToken) *ErrorNodeImpl {
var node = NewErrorNodeImpl(badToken)
prc.addTerminalNodeChild(node)
node.parentCtx = prc
@@ -159,19 +159,19 @@ func (prc *ParserRuleContext) getChildOfType(i int, childType reflect.Type) IRul
}
}
-func (prc *ParserRuleContext) setStart(t *Token) {
+func (prc *ParserRuleContext) setStart(t IToken) {
prc.start = t
}
-func (prc *ParserRuleContext) getStart() *Token {
+func (prc *ParserRuleContext) getStart() IToken {
return prc.start
}
-func (prc *ParserRuleContext) setStop(t *Token) {
+func (prc *ParserRuleContext) setStop(t IToken) {
prc.stop = t
}
-func (prc *ParserRuleContext) getStop() *Token {
+func (prc *ParserRuleContext) getStop() IToken {
return prc.stop
}
@@ -180,7 +180,7 @@ func (prc *ParserRuleContext) GetToken(ttype int, i int) TerminalNode {
for j := 0; j < len(prc.children); j++ {
var child = prc.children[j]
if c2, ok := child.(TerminalNode); ok {
- if c2.getSymbol().tokenType == ttype {
+ if c2.getSymbol().GetTokenType() == ttype {
if i == 0 {
return c2
} else {
@@ -200,7 +200,7 @@ func (prc *ParserRuleContext) GetTokens(ttype int) []TerminalNode {
for j := 0; j < len(prc.children); j++ {
var child = prc.children[j]
if tchild, ok := child.(TerminalNode); ok {
- if tchild.getSymbol().tokenType == ttype {
+ if tchild.getSymbol().GetTokenType() == ttype {
tokens = append(tokens, tchild)
}
}
@@ -242,7 +242,7 @@ func (prc *ParserRuleContext) GetSourceInterval() *Interval {
if prc.start == nil || prc.stop == nil {
return TreeINVALID_INTERVAL
} else {
- return NewInterval(prc.start.tokenIndex, prc.stop.tokenIndex)
+ return NewInterval(prc.start.GetTokenIndex(), prc.stop.GetTokenIndex())
}
}
diff --git a/runtime/Go/src/antlr4/PredictionContext.go b/runtime/Go/src/antlr4/PredictionContext.go
index 493572416..58a0b78cf 100644
--- a/runtime/Go/src/antlr4/PredictionContext.go
+++ b/runtime/Go/src/antlr4/PredictionContext.go
@@ -72,10 +72,6 @@ func (this *PredictionContext) isEmpty() bool {
return false
}
-func (this *PredictionContext) hasEmptyPath() bool {
- return this.getReturnState(this.length()-1) == PredictionContextEMPTY_RETURN_STATE
-}
-
func (this *PredictionContext) hashString() string {
return this.cachedHashString
}
@@ -88,22 +84,6 @@ func calculateEmptyHashString() string {
return ""
}
-func (this *PredictionContext) String() string {
- panic("Not implemented")
-}
-
-func (this *PredictionContext) GetParent(index int) IPredictionContext {
- panic("Not implemented")
-}
-
-func (this *PredictionContext) length() int {
- panic("Not implemented")
-}
-
-func (this *PredictionContext) getReturnState(index int) int {
- panic("Not implemented")
-}
-
// Used to cache {@link PredictionContext} objects. Its used for the shared
// context cash associated with contexts in DFA states. This cache
// can be used for both lexers and parsers.
@@ -191,6 +171,10 @@ func (this *SingletonPredictionContext) getReturnState(index int) int {
return this.returnState
}
+func (this *SingletonPredictionContext) hasEmptyPath() bool {
+ return this.returnState == PredictionContextEMPTY_RETURN_STATE
+}
+
func (this *SingletonPredictionContext) equals(other IPredictionContext) bool {
if this == other {
return true
@@ -301,6 +285,10 @@ func (c *ArrayPredictionContext) GetReturnStates() []int {
return c.returnStates
}
+func (this *ArrayPredictionContext) hasEmptyPath() bool {
+ return this.getReturnState(this.length()-1) == PredictionContextEMPTY_RETURN_STATE
+}
+
func (this *ArrayPredictionContext) isEmpty() bool {
// since EMPTY_RETURN_STATE can only appear in the last position, we
// don't need to verify that size==1
@@ -320,9 +308,7 @@ func (this *ArrayPredictionContext) getReturnState(index int) int {
}
func (this *ArrayPredictionContext) equals(other IPredictionContext) bool {
- if this == other {
- return true
- } else if _, ok := other.(*ArrayPredictionContext); !ok {
+ if _, ok := other.(*ArrayPredictionContext); !ok {
return false
} else if this.cachedHashString != other.hashString() {
return false // can't be same if hash is different
diff --git a/runtime/Go/src/antlr4/Recognizer.go b/runtime/Go/src/antlr4/Recognizer.go
index ba93037b3..8e37e20c7 100644
--- a/runtime/Go/src/antlr4/Recognizer.go
+++ b/runtime/Go/src/antlr4/Recognizer.go
@@ -83,6 +83,8 @@ func (this *Recognizer) GetState() int {
}
func (this *Recognizer) SetState(v int) {
+ fmt.Println("SETTING STATE " + strconv.Itoa(v) + " from " + strconv.Itoa(this.state))
+
this.state = v
}
@@ -159,8 +161,8 @@ func (this *Recognizer) GetTokenType(tokenName string) int {
// What is the error header, normally line/character position information?//
func (this *Recognizer) getErrorHeader(e IRecognitionException) string {
- var line = e.GetOffendingToken().line
- var column = e.GetOffendingToken().column
+ var line = e.GetOffendingToken().GetLine()
+ var column = e.GetOffendingToken().GetColumn()
return "line " + strconv.Itoa(line) + ":" + strconv.Itoa(column)
}
@@ -177,16 +179,16 @@ func (this *Recognizer) getErrorHeader(e IRecognitionException) string {
// feature when necessary. For example, see
// {@link DefaultErrorStrategy//GetTokenErrorDisplay}.
//
-func (this *Recognizer) GetTokenErrorDisplay(t *Token) string {
+func (this *Recognizer) GetTokenErrorDisplay(t IToken) string {
if t == nil {
return ""
}
- var s = t.text()
+ var s = t.GetText()
if s == "" {
- if t.tokenType == TokenEOF {
+ if t.GetTokenType() == TokenEOF {
s = ""
} else {
- s = "<" + strconv.Itoa(t.tokenType) + ">"
+ s = "<" + strconv.Itoa(t.GetTokenType()) + ">"
}
}
s = strings.Replace(s, "\t", "\\t", -1)
diff --git a/runtime/Go/src/antlr4/Token.go b/runtime/Go/src/antlr4/Token.go
index 2751fae09..094550639 100644
--- a/runtime/Go/src/antlr4/Token.go
+++ b/runtime/Go/src/antlr4/Token.go
@@ -5,8 +5,6 @@ import (
"strings"
)
-
-
type TokenSourceCharStreamPair struct {
tokenSource TokenSource
charStream CharStream
@@ -16,6 +14,26 @@ type TokenSourceCharStreamPair struct {
// (so we can ignore tabs), token channel, index, and source from which
// we obtained this token.
+
+type IToken interface {
+ GetSource() *TokenSourceCharStreamPair
+ GetTokenType() int
+ GetChannel() int
+ GetStart() int
+ GetStop() int
+ GetLine() int
+ GetColumn() int
+
+ GetText() string
+ SetText(s string)
+
+ GetTokenIndex() int
+ SetTokenIndex(v int)
+
+ GetTokenSource() TokenSource
+ GetInputStream() CharStream
+}
+
type Token struct {
source *TokenSourceCharStreamPair
tokenType int // token type of the token
@@ -52,19 +70,40 @@ const (
TokenHiddenChannel = 1
)
-// Explicitly set the text for this token. If {code text} is not
-// {@code nil}, then {@link //GetText} will return this value rather than
-// extracting the text from the input.
-//
-// @param text The explicit text of the token, or {@code nil} if the text
-// should be obtained from the input along with the start and stop indexes
-// of the token.
+func (this *Token) GetChannel() int {
+ return this.channel
+}
-func (this *Token) text() string {
+func (this *Token) GetStart() int {
+ return this.start
+}
+
+func (this *Token) GetStop() int {
+ return this.stop
+}
+
+func (this *Token) GetLine() int {
+ return this.line
+}
+
+func (this *Token) GetColumn() int {
+ return this.column
+}
+
+func (this *Token) GetTokenType() int {
+ return this.tokenType
+}
+
+func (this *Token) GetSource() *TokenSourceCharStreamPair{
+ return this.source
+}
+
+func (this *Token) GetText() string {
return this._text
}
-func (this *Token) setText(s string) {
+
+func (this *Token) SetText(s string) {
this._text = s
}
@@ -72,6 +111,10 @@ func (this *Token) GetTokenIndex() int {
return this.tokenIndex
}
+func (this *Token) SetTokenIndex(v int) {
+ this.tokenIndex = v
+}
+
func (this *Token) GetTokenSource() TokenSource {
return this.source.tokenSource
}
@@ -91,7 +134,7 @@ func NewCommonToken(source *TokenSourceCharStreamPair, tokenType, channel, start
t.Token = new(Token)
t.source = source
- t.tokenType = -1
+ t.tokenType = tokenType
t.channel = channel
t.start = start
t.stop = stop
@@ -123,16 +166,15 @@ func NewCommonToken(source *TokenSourceCharStreamPair, tokenType, channel, start
// @param oldToken The token to copy.
//
func (ct *CommonToken) clone() *CommonToken {
- var t = NewCommonToken(ct.source, ct.tokenType, ct.channel, ct.start,
- ct.stop)
- t.tokenIndex = ct.tokenIndex
- t.line = ct.line
- t.column = ct.column
- t._text = ct.text()
+ var t = NewCommonToken(ct.source, ct.tokenType, ct.channel, ct.start, ct.stop)
+ t.tokenIndex = ct.GetTokenIndex()
+ t.line = ct.GetLine()
+ t.column = ct.GetColumn()
+ t._text = ct.GetText()
return t
}
-func (this *CommonToken) text() string {
+func (this *CommonToken) GetText() string {
if this._text != "" {
return this._text
}
@@ -148,12 +190,12 @@ func (this *CommonToken) text() string {
}
}
-func (this *CommonToken) setText(text string) {
+func (this *CommonToken) SetText(text string) {
this._text = text
}
func (this *CommonToken) String() string {
- var txt = this.text()
+ var txt = this.GetText()
if txt != "" {
txt = strings.Replace(txt, "\n", "", -1)
txt = strings.Replace(txt, "\r", "", -1)
diff --git a/runtime/Go/src/antlr4/TokenSource.go b/runtime/Go/src/antlr4/TokenSource.go
index a883a2875..36bc53eff 100644
--- a/runtime/Go/src/antlr4/TokenSource.go
+++ b/runtime/Go/src/antlr4/TokenSource.go
@@ -1,7 +1,7 @@
package antlr4
type TokenSource interface {
- nextToken() *Token
+ nextToken() IToken
skip()
more()
getLine() int
diff --git a/runtime/Go/src/antlr4/TokenStream.go b/runtime/Go/src/antlr4/TokenStream.go
index 725adcecb..e42123574 100644
--- a/runtime/Go/src/antlr4/TokenStream.go
+++ b/runtime/Go/src/antlr4/TokenStream.go
@@ -3,14 +3,14 @@ package antlr4
type TokenStream interface {
IntStream
- LT(k int) *Token
+ LT(k int) IToken
- Get(index int) *Token
+ Get(index int) IToken
GetTokenSource() TokenSource
SetTokenSource(TokenSource)
GetAllText() string
GetTextFromInterval(*Interval) string
GetTextFromRuleContext(IRuleContext) string
- GetTextFromTokens(*Token, *Token) string
+ GetTextFromTokens(IToken, IToken) string
}
diff --git a/runtime/Go/src/antlr4/TraceListener.go b/runtime/Go/src/antlr4/TraceListener.go
index 9b5a6b00a..2d2c18e38 100644
--- a/runtime/Go/src/antlr4/TraceListener.go
+++ b/runtime/Go/src/antlr4/TraceListener.go
@@ -16,7 +16,7 @@ func (this *TraceListener) VisitErrorNode(_ ErrorNode) {
}
func (this *TraceListener) EnterEveryRule(ctx IParserRuleContext) {
- fmt.Println("enter " + this.parser.GetRuleNames()[ctx.GetRuleIndex()] + ", LT(1)=" + this.parser._input.LT(1).text())
+ fmt.Println("enter " + this.parser.GetRuleNames()[ctx.GetRuleIndex()] + ", LT(1)=" + this.parser._input.LT(1).GetText())
}
func (this *TraceListener) VisitTerminal(node TerminalNode) {
@@ -24,5 +24,5 @@ func (this *TraceListener) VisitTerminal(node TerminalNode) {
}
func (this *TraceListener) ExitEveryRule(ctx IParserRuleContext) {
- fmt.Println("exit " + this.parser.GetRuleNames()[ctx.GetRuleIndex()] + ", LT(1)=" + this.parser._input.LT(1).text())
+ fmt.Println("exit " + this.parser.GetRuleNames()[ctx.GetRuleIndex()] + ", LT(1)=" + this.parser._input.LT(1).GetText())
}
diff --git a/runtime/Go/src/antlr4/Transition.go b/runtime/Go/src/antlr4/Transition.go
index e24d654d0..55967c21a 100644
--- a/runtime/Go/src/antlr4/Transition.go
+++ b/runtime/Go/src/antlr4/Transition.go
@@ -26,7 +26,8 @@ type ITransition interface {
type Transition struct {
target IATNState
isEpsilon bool
- label *IntervalSet
+ label_ int
+ label *IntervalSet
serializationType int
}
@@ -124,8 +125,6 @@ var TransitionserializationNames = []string{
// TODO: make all transitions sets? no, should remove set edges
type AtomTransition struct {
*Transition
- label_ int
- label *IntervalSet
}
func NewAtomTransition(target IATNState, label int) *AtomTransition {
@@ -236,6 +235,11 @@ func (t *RangeTransition) String() string {
return "'" + string(t.start) + "'..'" + string(t.stop) + "'"
}
+type IAbstractPredicateTransition interface {
+ ITransition
+ IAbstractPredicateTransitionFoo()
+}
+
type AbstractPredicateTransition struct {
*Transition
}
@@ -248,8 +252,10 @@ func NewAbstractPredicateTransition(target IATNState) *AbstractPredicateTransiti
return t
}
+func (a *AbstractPredicateTransition) IAbstractPredicateTransitionFoo(){}
+
type PredicateTransition struct {
- *Transition
+ *AbstractPredicateTransition
isCtxDependent bool
ruleIndex, predIndex int
@@ -258,7 +264,7 @@ type PredicateTransition struct {
func NewPredicateTransition(target IATNState, ruleIndex, predIndex int, isCtxDependent bool) *PredicateTransition {
t := new(PredicateTransition)
- t.Transition = NewTransition(target)
+ t.AbstractPredicateTransition = NewAbstractPredicateTransition(target)
t.serializationType = TransitionPREDICATE
t.ruleIndex = ruleIndex
@@ -381,7 +387,7 @@ func (t *WildcardTransition) String() string {
}
type PrecedencePredicateTransition struct {
- *Transition
+ *AbstractPredicateTransition
precedence int
}
@@ -389,7 +395,7 @@ type PrecedencePredicateTransition struct {
func NewPrecedencePredicateTransition(target IATNState, precedence int) *PrecedencePredicateTransition {
t := new(PrecedencePredicateTransition)
- t.Transition = NewTransition(target)
+ t.AbstractPredicateTransition = NewAbstractPredicateTransition(target)
t.serializationType = TransitionPRECEDENCE
t.precedence = precedence
diff --git a/runtime/Go/src/antlr4/Tree.go b/runtime/Go/src/antlr4/Tree.go
index 5b8f15205..740d36fb1 100644
--- a/runtime/Go/src/antlr4/Tree.go
+++ b/runtime/Go/src/antlr4/Tree.go
@@ -41,7 +41,7 @@ type RuleNode interface {
type TerminalNode interface {
ParseTree
- getSymbol() *Token
+ getSymbol() IToken
}
type ErrorNode interface {
@@ -87,10 +87,10 @@ type ParseTreeListener interface {
type TerminalNodeImpl struct {
parentCtx IRuleContext
- symbol *Token
+ symbol IToken
}
-func NewTerminalNodeImpl(symbol *Token) *TerminalNodeImpl {
+func NewTerminalNodeImpl(symbol IToken) *TerminalNodeImpl {
tn := new(TerminalNodeImpl)
tn.parentCtx = nil
@@ -112,7 +112,7 @@ func (this *TerminalNodeImpl) setChildren(t []Tree) {
panic("Cannot set children on terminal node")
}
-func (this *TerminalNodeImpl) getSymbol() *Token {
+func (this *TerminalNodeImpl) getSymbol() IToken {
return this.symbol
}
@@ -132,7 +132,7 @@ func (this *TerminalNodeImpl) GetSourceInterval() *Interval {
if this.symbol == nil {
return TreeINVALID_INTERVAL
}
- var tokenIndex = this.symbol.tokenIndex
+ var tokenIndex = this.symbol.GetTokenIndex()
return NewInterval(tokenIndex, tokenIndex)
}
@@ -145,14 +145,14 @@ func (this *TerminalNodeImpl) accept(Visitor ParseTreeVisitor) interface{} {
}
func (this *TerminalNodeImpl) GetText() string {
- return this.symbol.text()
+ return this.symbol.GetText()
}
func (this *TerminalNodeImpl) String() string {
- if this.symbol.tokenType == TokenEOF {
+ if this.symbol.GetTokenType() == TokenEOF {
return ""
} else {
- return this.symbol.text()
+ return this.symbol.GetText()
}
}
@@ -166,7 +166,7 @@ type ErrorNodeImpl struct {
*TerminalNodeImpl
}
-func NewErrorNodeImpl(token *Token) *ErrorNodeImpl {
+func NewErrorNodeImpl(token IToken) *ErrorNodeImpl {
en := new(ErrorNodeImpl)
en.TerminalNodeImpl = NewTerminalNodeImpl(token)
return en
diff --git a/runtime/Go/src/antlr4/Trees.go b/runtime/Go/src/antlr4/Trees.go
index 540d956f3..41e71a80e 100644
--- a/runtime/Go/src/antlr4/Trees.go
+++ b/runtime/Go/src/antlr4/Trees.go
@@ -46,15 +46,15 @@ func TreesgetNodeText(t Tree, ruleNames []string, recog *Parser) string {
return fmt.Sprint(t2)
} else if t2, ok := t.(TerminalNode); ok {
if t2.getSymbol() != nil {
- return t2.getSymbol().text()
+ return t2.getSymbol().GetText()
}
}
}
// no recog for rule names
var payload = t.getPayload()
- if p2, ok := payload.(*Token); ok {
- return p2.text()
+ if p2, ok := payload.(IToken); ok {
+ return p2.GetText()
}
return fmt.Sprint(t.getPayload())
@@ -104,7 +104,7 @@ func Trees_findAllNodes(t ParseTree, index int, findTokens bool, nodes []ParseTr
t3, ok2 := t.(IParserRuleContext)
if findTokens && ok {
- if t2.getSymbol().tokenType == index {
+ if t2.getSymbol().GetTokenType() == index {
nodes = append(nodes, t2)
}
} else if !findTokens && ok2 {
diff --git a/runtime/Go/src/antlr4/Utils.go b/runtime/Go/src/antlr4/Utils.go
index ea0999f06..910930e21 100644
--- a/runtime/Go/src/antlr4/Utils.go
+++ b/runtime/Go/src/antlr4/Utils.go
@@ -44,10 +44,6 @@ func (s *IntStack) Push(e int) {
*s = append(*s, e)
}
-func arrayString(a []interface{}) string {
- return fmt.Sprint(a)
-}
-
func hashCode(s string) string {
h := fnv.New32a()
h.Write([]byte((s)))
@@ -81,8 +77,11 @@ func NewSet(hashFunction func(interface{}) string, equalsFunction func(interface
return s
}
-func standardEqualsFunction(a interface{}, b interface{}) bool {
- return standardHashFunction(a) == standardHashFunction(b)
+func standardHashFunction(a interface{}) string {
+ h := fnv.New32a()
+ v, _ := getBytes(a)
+ h.Write(v)
+ return fmt.Sprint(h.Sum32())
}
func getBytes(key interface{}) ([]byte, error) {
@@ -95,13 +94,12 @@ func getBytes(key interface{}) ([]byte, error) {
return buf.Bytes(), nil
}
-func standardHashFunction(a interface{}) string {
- h := fnv.New32a()
- v, _ := getBytes(a)
- h.Write(v)
- return fmt.Sprint(h.Sum32())
+
+func standardEqualsFunction(a interface{}, b interface{}) bool {
+ return standardHashFunction(a) == standardHashFunction(b)
}
+
func (this *Set) length() int {
return len(this.data)
}
@@ -110,6 +108,7 @@ func (this *Set) add(value interface{}) interface{} {
var hash = this.hashFunction(value)
var key = "hash_" + hashCode(hash)
+
values := this.data[key]
if this.data[key] != nil {
diff --git a/runtime/JavaScript/src/antlr4/BufferedTokenStream.js b/runtime/JavaScript/src/antlr4/BufferedTokenStream.js
index d8a6ba3c9..2438e714a 100644
--- a/runtime/JavaScript/src/antlr4/BufferedTokenStream.js
+++ b/runtime/JavaScript/src/antlr4/BufferedTokenStream.js
@@ -177,9 +177,11 @@ BufferedTokenStream.prototype.getTokens = function(start, stop, types) {
if (types === undefined) {
types = null;
}
+
if (start < 0 || stop < 0) {
return null;
}
+
this.lazyInit();
var subset = [];
if (stop >= this.tokens.length) {
@@ -194,6 +196,7 @@ BufferedTokenStream.prototype.getTokens = function(start, stop, types) {
subset.push(t);
}
}
+
return subset;
};
diff --git a/runtime/JavaScript/src/antlr4/CommonTokenFactory.js b/runtime/JavaScript/src/antlr4/CommonTokenFactory.js
index 49971190b..90d5e38aa 100644
--- a/runtime/JavaScript/src/antlr4/CommonTokenFactory.js
+++ b/runtime/JavaScript/src/antlr4/CommonTokenFactory.js
@@ -73,6 +73,9 @@ CommonTokenFactory.prototype.constructor = CommonTokenFactory;
CommonTokenFactory.DEFAULT = new CommonTokenFactory();
CommonTokenFactory.prototype.create = function(source, type, text, channel, start, stop, line, column) {
+
+ console.log("Token factory creating: " + text)
+
var t = new CommonToken(source, type, channel, start, stop);
t.line = line;
t.column = column;
@@ -85,6 +88,9 @@ CommonTokenFactory.prototype.create = function(source, type, text, channel, star
};
CommonTokenFactory.prototype.createThin = function(type, text) {
+
+ console.log("Token factory creating: " + text)
+
var t = new CommonToken(null, type);
t.text = text;
return t;
diff --git a/runtime/JavaScript/src/antlr4/FileStream.js b/runtime/JavaScript/src/antlr4/FileStream.js
index 664056155..b7d3d8d64 100644
--- a/runtime/JavaScript/src/antlr4/FileStream.js
+++ b/runtime/JavaScript/src/antlr4/FileStream.js
@@ -41,6 +41,8 @@ function FileStream(fileName) {
var data = fs.readFileSync(fileName, "utf8");
InputStream.call(this, data);
this.fileName = fileName;
+
+ console.log(data);
return this;
}
diff --git a/runtime/JavaScript/src/antlr4/IntervalSet.js b/runtime/JavaScript/src/antlr4/IntervalSet.js
index cfeff5f83..ffb60f96c 100644
--- a/runtime/JavaScript/src/antlr4/IntervalSet.js
+++ b/runtime/JavaScript/src/antlr4/IntervalSet.js
@@ -50,6 +50,7 @@ IntervalSet.prototype.addRange = function(l, h) {
};
IntervalSet.prototype.addInterval = function(v) {
+ console.log("addInterval" + v.toString())
if (this.intervals === null) {
this.intervals = [];
this.intervals.push(v);
@@ -80,7 +81,9 @@ IntervalSet.prototype.addInterval = function(v) {
};
IntervalSet.prototype.addSet = function(other) {
+ console.log("addSet")
if (other.intervals !== null) {
+ console.log(other.intervals.length)
for (var k = 0; k < other.intervals.length; k++) {
var i = other.intervals[k];
this.addInterval(new Interval(i.start, i.stop));
diff --git a/runtime/JavaScript/src/antlr4/LL1Analyzer.js b/runtime/JavaScript/src/antlr4/LL1Analyzer.js
index c0270bb55..e42678dc6 100644
--- a/runtime/JavaScript/src/antlr4/LL1Analyzer.js
+++ b/runtime/JavaScript/src/antlr4/LL1Analyzer.js
@@ -110,7 +110,15 @@ LL1Analyzer.prototype.LOOK = function(s, stopState, ctx) {
var seeThruPreds = true; // ignore preds; get all lookahead
ctx = ctx || null;
var lookContext = ctx!==null ? predictionContextFromRuleContext(s.atn, ctx) : null;
+ console.log("DEBUG 5")
+ console.log(s.toString())
+ console.log(stopState)
+ console.log(lookContext)
+ console.log(r.toString())
+ console.log(seeThruPreds)
+ console.log("=====")
this._LOOK(s, stopState, lookContext, r, new Set(), new BitSet(), seeThruPreds, true);
+ console.log(r.toString())
return r;
};
@@ -151,6 +159,7 @@ LL1Analyzer.prototype._LOOK = function(s, stopState , ctx, look, lookBusy, calle
}
lookBusy.add(c);
if (s === stopState) {
+ console.log("DEBUG 6")
if (ctx ===null) {
look.addOne(Token.EPSILON);
return;
@@ -168,6 +177,7 @@ LL1Analyzer.prototype._LOOK = function(s, stopState , ctx, look, lookBusy, calle
return;
}
if (ctx !== PredictionContext.EMPTY) {
+ console.log("DEBUG 7")
// run thru all possible stack tops in ctx
for(var i=0; i= this.parser.literalNames.length) {
console.log("" + t + " ttype out of range: " + this.parser.literalNames);
- console.log("" + this.parser.getInputStream().getTokens());
+// console.log(this.parser.getInputStream().getTokens());
} else {
return this.parser.literalNames[t] + "<" + t + ">";
}
@@ -1529,7 +1535,7 @@ ParserATNSimulator.prototype.getTokenName = function( t) {
ParserATNSimulator.prototype.getLookaheadName = function(input) {
return this.getTokenName(input.LA(1));
};
-
+``
// Used for debugging in adaptivePredict around execATN but I cut
// it out for clarity now that alg. works well. We can leave this
// "dead" code for a bit.
diff --git a/runtime/JavaScript/src/antlr4/error/ErrorListener.js b/runtime/JavaScript/src/antlr4/error/ErrorListener.js
index 7a1b512cf..dc8442a51 100644
--- a/runtime/JavaScript/src/antlr4/error/ErrorListener.js
+++ b/runtime/JavaScript/src/antlr4/error/ErrorListener.js
@@ -74,7 +74,7 @@ ConsoleErrorListener.INSTANCE = new ConsoleErrorListener();
//
//
ConsoleErrorListener.prototype.syntaxError = function(recognizer, offendingSymbol, line, column, msg, e) {
- console.error("line " + line + ":" + column + " " + msg);
+ console.log("line " + line + ":" + column + " " + msg);
};
function ProxyErrorListener(delegates) {
diff --git a/runtime/JavaScript/src/antlr4/error/ErrorStrategy.js b/runtime/JavaScript/src/antlr4/error/ErrorStrategy.js
index f2993e713..dfb4ecbd1 100644
--- a/runtime/JavaScript/src/antlr4/error/ErrorStrategy.js
+++ b/runtime/JavaScript/src/antlr4/error/ErrorStrategy.js
@@ -244,16 +244,28 @@ DefaultErrorStrategy.prototype.sync = function(recognizer) {
if (this.inErrorRecoveryMode(recognizer)) {
return;
}
+
+ console.log("STATE" + recognizer.state)
+
var s = recognizer._interp.atn.states[recognizer.state];
var la = recognizer.getTokenStream().LA(1);
+
+ console.log("LA" + la);
+
// try cheaper subset first; might get lucky. seems to shave a wee bit off
if (la===Token.EOF || recognizer.atn.nextTokens(s).contains(la)) {
+ console.log("OK1")
return;
}
// Return but don't end recovery. only do that upon valid token match
if(recognizer.isExpectedToken(la)) {
+ console.log("OK2")
return;
}
+
+ console.log("LA" + la)
+// console.log(recognizer.GetATN().nextTokens(s, nil))
+
switch (s.stateType) {
case ATNState.BLOCK_START:
case ATNState.STAR_BLOCK_START:
diff --git a/runtime/JavaScript/src/foo.txt b/runtime/JavaScript/src/foo.txt
index e69de29bb..067b749fd 100644
--- a/runtime/JavaScript/src/foo.txt
+++ b/runtime/JavaScript/src/foo.txt
@@ -0,0 +1 @@
+1 + 2 = 3 + 5
diff --git a/runtime/JavaScript/src/test.js b/runtime/JavaScript/src/test.js
index 214b0e226..c541ea14e 100644
--- a/runtime/JavaScript/src/test.js
+++ b/runtime/JavaScript/src/test.js
@@ -1,11 +1,41 @@
var antlr4 = require("./antlr4/index"),
+ tree = antlr4.tree
ArithmeticLexer = require("./ArithmeticLexer").ArithmeticLexer,
- ArithmeticParser = require("./ArithmeticParser").ArithmeticParser;
-
+ ArithmeticParser = require("./ArithmeticParser").ArithmeticParser,
+ ArithmeticListener = require("./ArithmeticListener").ArithmeticListener;
var a = new antlr4.FileStream("foo.txt");
var l = new ArithmeticLexer(a);
var s = new antlr4.CommonTokenStream(l, 0);
var p = new ArithmeticParser(s);
+p.buildParseTrees = true;
+
+//KeyPrinter = function() {
+// ArithmeticListener.call(this); // inherit default listener
+// return this;
+//};
+//
+//// inherit default listener
+//KeyPrinter.prototype = Object.create(ArithmeticListener.prototype);
+//KeyPrinter.prototype.constructor = KeyPrinter;
+//
+//// override default listener behavior
+//KeyPrinter.prototype.exitAtom = function(ctx) {
+//
+// console.log("Oh, a atom!", ctx.start.source[1].strdata[ctx.start.start]);
+//};
+//
+//KeyPrinter.prototype.exitExpression = function(ctx) {
+//
+// console.log("Oh, an expression!", ctx);
+// throw new Error();
+//};
+
+var tree = p.equation();
+
+//var printer = new KeyPrinter();
+//antlr4.tree.ParseTreeWalker.DEFAULT.walk(printer, tree);
+
+//console.log( tree.children[0].children[0].children[0].children );
+
-p.equation();
\ No newline at end of file
diff --git a/tool/resources/org/antlr/v4/tool/templates/codegen/JavaScript/JavaScript.stg b/tool/resources/org/antlr/v4/tool/templates/codegen/JavaScript/JavaScript.stg
index b015c0b5b..43aa67dd9 100644
--- a/tool/resources/org/antlr/v4/tool/templates/codegen/JavaScript/JavaScript.stg
+++ b/tool/resources/org/antlr/v4/tool/templates/codegen/JavaScript/JavaScript.stg
@@ -820,15 +820,15 @@ function (input) {
. = ;}; separator="\n">
-.modeNames = [ "}; separator=", ", wrap, anchor> ];
+.prototype.modeNames = [ "}; separator=", ", wrap, anchor> ];
-.literalNames = [ }; null="null", separator=", ", wrap, anchor> ];
+.prototype.literalNames = [ }; null="null", separator=", ", wrap, anchor> ];
-.symbolicNames = [ }; null="null", separator=", ", wrap, anchor> ];
+.prototype.symbolicNames = [ }; null="null", separator=", ", wrap, anchor> ];
-.ruleNames = [ "}; separator=", ", wrap, anchor> ];
+.prototype.ruleNames = [ "}; separator=", ", wrap, anchor> ];
-.grammarFileName = "";
+.prototype.grammarFileName = "";