More bug fixing and debugging
This commit is contained in:
parent
4ffb3f81b6
commit
c08a19233c
|
@ -0,0 +1 @@
|
||||||
|
1 + 2 = 3 + 5
|
|
@ -15,8 +15,12 @@ func main() {
|
||||||
|
|
||||||
p := parser.NewArithmeticParser(s)
|
p := parser.NewArithmeticParser(s)
|
||||||
|
|
||||||
|
p.BuildParseTrees = true
|
||||||
|
|
||||||
p.Equation()
|
p.Equation()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
package antlr4
|
package antlr4
|
||||||
|
import "fmt"
|
||||||
|
|
||||||
var ATNINVALID_ALT_NUMBER = 0
|
var ATNINVALID_ALT_NUMBER = 0
|
||||||
|
|
||||||
|
@ -55,7 +56,8 @@ func NewATN(grammarType int, maxTokenType int) *ATN {
|
||||||
// restricted to tokens reachable staying within {@code s}'s rule.
|
// restricted to tokens reachable staying within {@code s}'s rule.
|
||||||
func (this *ATN) nextTokensInContext(s IATNState, ctx IRuleContext) *IntervalSet {
|
func (this *ATN) nextTokensInContext(s IATNState, ctx IRuleContext) *IntervalSet {
|
||||||
var anal = NewLL1Analyzer(this)
|
var anal = NewLL1Analyzer(this)
|
||||||
return anal.LOOK(s, nil, ctx)
|
var res = anal.LOOK(s, nil, ctx)
|
||||||
|
return res
|
||||||
}
|
}
|
||||||
|
|
||||||
// Compute the set of valid tokens that can occur starting in {@code s} and
|
// Compute the set of valid tokens that can occur starting in {@code s} and
|
||||||
|
@ -63,8 +65,11 @@ func (this *ATN) nextTokensInContext(s IATNState, ctx IRuleContext) *IntervalSet
|
||||||
// rule.
|
// rule.
|
||||||
func (this *ATN) nextTokensNoContext(s IATNState) *IntervalSet {
|
func (this *ATN) nextTokensNoContext(s IATNState) *IntervalSet {
|
||||||
if s.GetNextTokenWithinRule() != nil {
|
if s.GetNextTokenWithinRule() != nil {
|
||||||
|
fmt.Println("DEBUG 1")
|
||||||
return s.GetNextTokenWithinRule()
|
return s.GetNextTokenWithinRule()
|
||||||
}
|
}
|
||||||
|
fmt.Println("DEBUG 2")
|
||||||
|
fmt.Println(this.nextTokensInContext(s, nil))
|
||||||
s.SetNextTokenWithinRule(this.nextTokensInContext(s, nil))
|
s.SetNextTokenWithinRule(this.nextTokensInContext(s, nil))
|
||||||
s.GetNextTokenWithinRule().readOnly = true
|
s.GetNextTokenWithinRule().readOnly = true
|
||||||
return s.GetNextTokenWithinRule()
|
return s.GetNextTokenWithinRule()
|
||||||
|
|
|
@ -29,6 +29,8 @@ type IATNConfig interface {
|
||||||
SetReachesIntoOuterContext(int)
|
SetReachesIntoOuterContext(int)
|
||||||
|
|
||||||
String() string
|
String() string
|
||||||
|
|
||||||
|
shortHashString() string
|
||||||
}
|
}
|
||||||
|
|
||||||
type ATNConfig struct {
|
type ATNConfig struct {
|
||||||
|
|
|
@ -11,20 +11,18 @@ import (
|
||||||
///
|
///
|
||||||
|
|
||||||
func hashATNConfig(c interface{}) string {
|
func hashATNConfig(c interface{}) string {
|
||||||
return c.(*ATNConfig).shortHashString()
|
return c.(IATNConfig).shortHashString()
|
||||||
}
|
}
|
||||||
|
|
||||||
func equalATNConfigs(a, b interface{}) bool {
|
func equalATNConfigs(a, b interface{}) bool {
|
||||||
|
|
||||||
fmt.Println("compare")
|
if a == nil || b == nil {
|
||||||
fmt.Println(a)
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
if a == b {
|
if a == b {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
if a == nil || b == nil {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
ai,ok := a.(IATNConfig)
|
ai,ok := a.(IATNConfig)
|
||||||
bi,ok1 := b.(IATNConfig)
|
bi,ok1 := b.(IATNConfig)
|
||||||
|
@ -107,8 +105,6 @@ func NewATNConfigSet(fullCtx bool) *ATNConfigSet {
|
||||||
// /
|
// /
|
||||||
func (this *ATNConfigSet) add(config IATNConfig, mergeCache *DoubleDict) bool {
|
func (this *ATNConfigSet) add(config IATNConfig, mergeCache *DoubleDict) bool {
|
||||||
|
|
||||||
// fmt.Println("DEBUG = Adding config : " + config.String())
|
|
||||||
|
|
||||||
if this.readOnly {
|
if this.readOnly {
|
||||||
panic("This set is readonly")
|
panic("This set is readonly")
|
||||||
}
|
}
|
||||||
|
@ -136,6 +132,7 @@ func (this *ATNConfigSet) add(config IATNConfig, mergeCache *DoubleDict) bool {
|
||||||
existing.setPrecedenceFilterSuppressed(true)
|
existing.setPrecedenceFilterSuppressed(true)
|
||||||
}
|
}
|
||||||
existing.SetContext(merged) // replace context no need to alt mapping
|
existing.SetContext(merged) // replace context no need to alt mapping
|
||||||
|
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -288,6 +285,9 @@ func (this *ATNConfigSet) String() string {
|
||||||
return s
|
return s
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
type OrderedATNConfigSet struct {
|
type OrderedATNConfigSet struct {
|
||||||
*ATNConfigSet
|
*ATNConfigSet
|
||||||
}
|
}
|
||||||
|
@ -297,7 +297,7 @@ func NewOrderedATNConfigSet() *OrderedATNConfigSet {
|
||||||
this := new(OrderedATNConfigSet)
|
this := new(OrderedATNConfigSet)
|
||||||
|
|
||||||
this.ATNConfigSet = NewATNConfigSet(false)
|
this.ATNConfigSet = NewATNConfigSet(false)
|
||||||
this.configLookup = NewSet(nil, nil)
|
// this.configLookup = NewSet(nil, nil) // TODO not sure why this would be overriden
|
||||||
|
|
||||||
return this
|
return this
|
||||||
}
|
}
|
||||||
|
|
|
@ -11,13 +11,15 @@
|
||||||
|
|
||||||
package antlr4
|
package antlr4
|
||||||
|
|
||||||
import "strconv"
|
import (
|
||||||
|
"strconv"
|
||||||
|
)
|
||||||
|
|
||||||
// bt is just to keep meaningful parameter types to Parser
|
// bt is just to keep meaningful parameter types to Parser
|
||||||
type BufferedTokenStream struct {
|
type BufferedTokenStream struct {
|
||||||
tokenSource TokenSource
|
tokenSource TokenSource
|
||||||
|
|
||||||
tokens []*Token
|
tokens []IToken
|
||||||
index int
|
index int
|
||||||
fetchedEOF bool
|
fetchedEOF bool
|
||||||
channel int
|
channel int
|
||||||
|
@ -33,7 +35,7 @@ func NewBufferedTokenStream(tokenSource TokenSource) *BufferedTokenStream {
|
||||||
// A collection of all tokens fetched from the token source. The list is
|
// A collection of all tokens fetched from the token source. The list is
|
||||||
// considered a complete view of the input once {@link //fetchedEOF} is set
|
// considered a complete view of the input once {@link //fetchedEOF} is set
|
||||||
// to {@code true}.
|
// to {@code true}.
|
||||||
ts.tokens = make([]*Token, 0)
|
ts.tokens = make([]IToken, 0)
|
||||||
|
|
||||||
// The index into {@link //tokens} of the current token (next token to
|
// The index into {@link //tokens} of the current token (next token to
|
||||||
// {@link //consume}). {@link //tokens}{@code [}{@link //p}{@code ]} should
|
// {@link //consume}). {@link //tokens}{@code [}{@link //p}{@code ]} should
|
||||||
|
@ -83,7 +85,7 @@ func (bt *BufferedTokenStream) Seek(index int) {
|
||||||
bt.index = bt.adjustSeekIndex(index)
|
bt.index = bt.adjustSeekIndex(index)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (bt *BufferedTokenStream) Get(index int) *Token {
|
func (bt *BufferedTokenStream) Get(index int) IToken {
|
||||||
bt.lazyInit()
|
bt.lazyInit()
|
||||||
return bt.tokens[index]
|
return bt.tokens[index]
|
||||||
}
|
}
|
||||||
|
@ -136,10 +138,10 @@ func (bt *BufferedTokenStream) fetch(n int) int {
|
||||||
}
|
}
|
||||||
|
|
||||||
for i := 0; i < n; i++ {
|
for i := 0; i < n; i++ {
|
||||||
var t *Token = bt.tokenSource.nextToken()
|
var t IToken = bt.tokenSource.nextToken()
|
||||||
t.tokenIndex = len(bt.tokens)
|
t.SetTokenIndex( len(bt.tokens) )
|
||||||
bt.tokens = append(bt.tokens, t)
|
bt.tokens = append(bt.tokens, t)
|
||||||
if t.tokenType == TokenEOF {
|
if t.GetTokenType() == TokenEOF {
|
||||||
bt.fetchedEOF = true
|
bt.fetchedEOF = true
|
||||||
return i + 1
|
return i + 1
|
||||||
}
|
}
|
||||||
|
@ -148,22 +150,22 @@ func (bt *BufferedTokenStream) fetch(n int) int {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get all tokens from start..stop inclusively///
|
// Get all tokens from start..stop inclusively///
|
||||||
func (bt *BufferedTokenStream) GetTokens(start int, stop int, types *IntervalSet) []*Token {
|
func (bt *BufferedTokenStream) GetTokens(start int, stop int, types *IntervalSet) []IToken {
|
||||||
|
|
||||||
if start < 0 || stop < 0 {
|
if start < 0 || stop < 0 {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
bt.lazyInit()
|
bt.lazyInit()
|
||||||
var subset = make([]*Token, 0)
|
var subset = make([]IToken, 0)
|
||||||
if stop >= len(bt.tokens) {
|
if stop >= len(bt.tokens) {
|
||||||
stop = len(bt.tokens) - 1
|
stop = len(bt.tokens) - 1
|
||||||
}
|
}
|
||||||
for i := start; i < stop; i++ {
|
for i := start; i < stop; i++ {
|
||||||
var t = bt.tokens[i]
|
var t = bt.tokens[i]
|
||||||
if t.tokenType == TokenEOF {
|
if t.GetTokenType() == TokenEOF {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
if types == nil || types.contains(t.tokenType) {
|
if types == nil || types.contains(t.GetTokenType()) {
|
||||||
subset = append(subset, t)
|
subset = append(subset, t)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -171,17 +173,17 @@ func (bt *BufferedTokenStream) GetTokens(start int, stop int, types *IntervalSet
|
||||||
}
|
}
|
||||||
|
|
||||||
func (bt *BufferedTokenStream) LA(i int) int {
|
func (bt *BufferedTokenStream) LA(i int) int {
|
||||||
return bt.LT(i).tokenType
|
return bt.LT(i).GetTokenType()
|
||||||
}
|
}
|
||||||
|
|
||||||
func (bt *BufferedTokenStream) LB(k int) *Token {
|
func (bt *BufferedTokenStream) LB(k int) IToken {
|
||||||
if bt.index-k < 0 {
|
if bt.index-k < 0 {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
return bt.tokens[bt.index-k]
|
return bt.tokens[bt.index-k]
|
||||||
}
|
}
|
||||||
|
|
||||||
func (bt *BufferedTokenStream) LT(k int) *Token {
|
func (bt *BufferedTokenStream) LT(k int) IToken {
|
||||||
bt.lazyInit()
|
bt.lazyInit()
|
||||||
if k == 0 {
|
if k == 0 {
|
||||||
return nil
|
return nil
|
||||||
|
@ -233,7 +235,7 @@ func (bt *BufferedTokenStream) GetTokenSource() TokenSource {
|
||||||
// Reset bt token stream by setting its token source.///
|
// Reset bt token stream by setting its token source.///
|
||||||
func (bt *BufferedTokenStream) SetTokenSource(tokenSource TokenSource) {
|
func (bt *BufferedTokenStream) SetTokenSource(tokenSource TokenSource) {
|
||||||
bt.tokenSource = tokenSource
|
bt.tokenSource = tokenSource
|
||||||
bt.tokens = make([]*Token, 0)
|
bt.tokens = make([]IToken, 0)
|
||||||
bt.index = -1
|
bt.index = -1
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -247,8 +249,8 @@ func (bt *BufferedTokenStream) nextTokenOnChannel(i, channel int) int {
|
||||||
return -1
|
return -1
|
||||||
}
|
}
|
||||||
var token = bt.tokens[i]
|
var token = bt.tokens[i]
|
||||||
for token.channel != bt.channel {
|
for token.GetChannel() != bt.channel {
|
||||||
if token.tokenType == TokenEOF {
|
if token.GetTokenType() == TokenEOF {
|
||||||
return -1
|
return -1
|
||||||
}
|
}
|
||||||
i += 1
|
i += 1
|
||||||
|
@ -262,7 +264,7 @@ func (bt *BufferedTokenStream) nextTokenOnChannel(i, channel int) int {
|
||||||
// Return i if tokens[i] is on channel. Return -1 if there are no tokens
|
// Return i if tokens[i] is on channel. Return -1 if there are no tokens
|
||||||
// on channel between i and 0.
|
// on channel between i and 0.
|
||||||
func (bt *BufferedTokenStream) previousTokenOnChannel(i, channel int) int {
|
func (bt *BufferedTokenStream) previousTokenOnChannel(i, channel int) int {
|
||||||
for i >= 0 && bt.tokens[i].channel != channel {
|
for i >= 0 && bt.tokens[i].GetChannel() != channel {
|
||||||
i -= 1
|
i -= 1
|
||||||
}
|
}
|
||||||
return i
|
return i
|
||||||
|
@ -271,7 +273,7 @@ func (bt *BufferedTokenStream) previousTokenOnChannel(i, channel int) int {
|
||||||
// Collect all tokens on specified channel to the right of
|
// Collect all tokens on specified channel to the right of
|
||||||
// the current token up until we see a token on DEFAULT_TOKEN_CHANNEL or
|
// the current token up until we see a token on DEFAULT_TOKEN_CHANNEL or
|
||||||
// EOF. If channel is -1, find any non default channel token.
|
// EOF. If channel is -1, find any non default channel token.
|
||||||
func (bt *BufferedTokenStream) getHiddenTokensToRight(tokenIndex, channel int) []*Token {
|
func (bt *BufferedTokenStream) getHiddenTokensToRight(tokenIndex, channel int) []IToken {
|
||||||
bt.lazyInit()
|
bt.lazyInit()
|
||||||
if tokenIndex < 0 || tokenIndex >= len(bt.tokens) {
|
if tokenIndex < 0 || tokenIndex >= len(bt.tokens) {
|
||||||
panic(strconv.Itoa(tokenIndex) + " not in 0.." + strconv.Itoa(len(bt.tokens)-1))
|
panic(strconv.Itoa(tokenIndex) + " not in 0.." + strconv.Itoa(len(bt.tokens)-1))
|
||||||
|
@ -291,7 +293,7 @@ func (bt *BufferedTokenStream) getHiddenTokensToRight(tokenIndex, channel int) [
|
||||||
// Collect all tokens on specified channel to the left of
|
// Collect all tokens on specified channel to the left of
|
||||||
// the current token up until we see a token on DEFAULT_TOKEN_CHANNEL.
|
// the current token up until we see a token on DEFAULT_TOKEN_CHANNEL.
|
||||||
// If channel is -1, find any non default channel token.
|
// If channel is -1, find any non default channel token.
|
||||||
func (bt *BufferedTokenStream) getHiddenTokensToLeft(tokenIndex, channel int) []*Token {
|
func (bt *BufferedTokenStream) getHiddenTokensToLeft(tokenIndex, channel int) []IToken {
|
||||||
bt.lazyInit()
|
bt.lazyInit()
|
||||||
if tokenIndex < 0 || tokenIndex >= len(bt.tokens) {
|
if tokenIndex < 0 || tokenIndex >= len(bt.tokens) {
|
||||||
panic(strconv.Itoa(tokenIndex) + " not in 0.." + strconv.Itoa(len(bt.tokens)-1))
|
panic(strconv.Itoa(tokenIndex) + " not in 0.." + strconv.Itoa(len(bt.tokens)-1))
|
||||||
|
@ -306,15 +308,15 @@ func (bt *BufferedTokenStream) getHiddenTokensToLeft(tokenIndex, channel int) []
|
||||||
return bt.filterForChannel(from_, to, channel)
|
return bt.filterForChannel(from_, to, channel)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (bt *BufferedTokenStream) filterForChannel(left, right, channel int) []*Token {
|
func (bt *BufferedTokenStream) filterForChannel(left, right, channel int) []IToken {
|
||||||
var hidden = make([]*Token, 0)
|
var hidden = make([]IToken, 0)
|
||||||
for i := left; i < right+1; i++ {
|
for i := left; i < right+1; i++ {
|
||||||
var t = bt.tokens[i]
|
var t = bt.tokens[i]
|
||||||
if channel == -1 {
|
if channel == -1 {
|
||||||
if t.channel != LexerDefaultTokenChannel {
|
if t.GetChannel() != LexerDefaultTokenChannel {
|
||||||
hidden = append(hidden, t)
|
hidden = append(hidden, t)
|
||||||
}
|
}
|
||||||
} else if t.channel == channel {
|
} else if t.GetChannel() == channel {
|
||||||
hidden = append(hidden, t)
|
hidden = append(hidden, t)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -340,7 +342,7 @@ func (bt *BufferedTokenStream) GetAllText() string {
|
||||||
return bt.GetTextFromInterval(nil)
|
return bt.GetTextFromInterval(nil)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (bt *BufferedTokenStream) GetTextFromTokens(start, end *Token) string {
|
func (bt *BufferedTokenStream) GetTextFromTokens(start, end IToken) string {
|
||||||
return bt.GetTextFromInterval(NewInterval(start.GetTokenIndex(), end.GetTokenIndex()))
|
return bt.GetTextFromInterval(NewInterval(start.GetTokenIndex(), end.GetTokenIndex()))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -349,11 +351,13 @@ func (bt *BufferedTokenStream) GetTextFromRuleContext(interval IRuleContext) str
|
||||||
}
|
}
|
||||||
|
|
||||||
func (bt *BufferedTokenStream) GetTextFromInterval(interval *Interval) string {
|
func (bt *BufferedTokenStream) GetTextFromInterval(interval *Interval) string {
|
||||||
|
|
||||||
bt.lazyInit()
|
bt.lazyInit()
|
||||||
bt.fill()
|
bt.fill()
|
||||||
if interval == nil {
|
if interval == nil {
|
||||||
interval = NewInterval(0, len(bt.tokens)-1)
|
interval = NewInterval(0, len(bt.tokens)-1)
|
||||||
}
|
}
|
||||||
|
|
||||||
var start = interval.start
|
var start = interval.start
|
||||||
var stop = interval.stop
|
var stop = interval.stop
|
||||||
if start < 0 || stop < 0 {
|
if start < 0 || stop < 0 {
|
||||||
|
@ -362,14 +366,16 @@ func (bt *BufferedTokenStream) GetTextFromInterval(interval *Interval) string {
|
||||||
if stop >= len(bt.tokens) {
|
if stop >= len(bt.tokens) {
|
||||||
stop = len(bt.tokens) - 1
|
stop = len(bt.tokens) - 1
|
||||||
}
|
}
|
||||||
|
|
||||||
var s = ""
|
var s = ""
|
||||||
for i := start; i < stop+1; i++ {
|
for i := start; i < stop+1; i++ {
|
||||||
var t = bt.tokens[i]
|
var t = bt.tokens[i]
|
||||||
if t.tokenType == TokenEOF {
|
if t.GetTokenType() == TokenEOF {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
s += t.text()
|
s += t.GetText()
|
||||||
}
|
}
|
||||||
|
|
||||||
return s
|
return s
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -4,9 +4,10 @@
|
||||||
//
|
//
|
||||||
|
|
||||||
package antlr4
|
package antlr4
|
||||||
|
import "fmt"
|
||||||
|
|
||||||
type TokenFactory interface {
|
type TokenFactory interface {
|
||||||
Create(source *TokenSourceCharStreamPair, ttype int, text string, channel, start, stop, line, column int) *Token
|
Create(source *TokenSourceCharStreamPair, ttype int, text string, channel, start, stop, line, column int) IToken
|
||||||
}
|
}
|
||||||
|
|
||||||
type CommonTokenFactory struct {
|
type CommonTokenFactory struct {
|
||||||
|
@ -45,22 +46,27 @@ func NewCommonTokenFactory(copyText bool) *CommonTokenFactory {
|
||||||
//
|
//
|
||||||
var CommonTokenFactoryDEFAULT = NewCommonTokenFactory(false)
|
var CommonTokenFactoryDEFAULT = NewCommonTokenFactory(false)
|
||||||
|
|
||||||
func (this *CommonTokenFactory) Create(source *TokenSourceCharStreamPair, ttype int, text string, channel, start, stop, line, column int) *Token {
|
func (this *CommonTokenFactory) Create(source *TokenSourceCharStreamPair, ttype int, text string, channel, start, stop, line, column int) IToken {
|
||||||
|
|
||||||
|
fmt.Println("Token factory creating: " + text)
|
||||||
|
|
||||||
var t = NewCommonToken(source, ttype, channel, start, stop)
|
var t = NewCommonToken(source, ttype, channel, start, stop)
|
||||||
t.line = line
|
t.line = line
|
||||||
t.column = column
|
t.column = column
|
||||||
if text != "" {
|
if text != "" {
|
||||||
t.setText(text)
|
t.SetText(text)
|
||||||
} else if this.copyText && source.charStream != nil {
|
} else if this.copyText && source.charStream != nil {
|
||||||
t.setText(source.charStream.GetTextFromInterval(NewInterval(start, stop)))
|
t.SetText(source.charStream.GetTextFromInterval(NewInterval(start, stop)))
|
||||||
}
|
}
|
||||||
return t.Token
|
return t.Token
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (this *CommonTokenFactory) createThin(ttype int, text string) *Token {
|
func (this *CommonTokenFactory) createThin(ttype int, text string) IToken {
|
||||||
|
|
||||||
|
fmt.Println("Token factory creating: " + text)
|
||||||
|
|
||||||
var t = NewCommonToken(nil, ttype, TokenDefaultChannel, -1, -1)
|
var t = NewCommonToken(nil, ttype, TokenDefaultChannel, -1, -1)
|
||||||
t.setText(text)
|
t.SetText(text)
|
||||||
return t.Token
|
return t.Token
|
||||||
}
|
}
|
||||||
|
|
|
@ -43,7 +43,7 @@ func (ts *CommonTokenStream) adjustSeekIndex(i int) int {
|
||||||
return ts.nextTokenOnChannel(i, ts.channel)
|
return ts.nextTokenOnChannel(i, ts.channel)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (ts *CommonTokenStream) LB(k int) *Token {
|
func (ts *CommonTokenStream) LB(k int) IToken {
|
||||||
if k == 0 || ts.index-k < 0 {
|
if k == 0 || ts.index-k < 0 {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
@ -61,7 +61,7 @@ func (ts *CommonTokenStream) LB(k int) *Token {
|
||||||
return ts.tokens[i]
|
return ts.tokens[i]
|
||||||
}
|
}
|
||||||
|
|
||||||
func (ts *CommonTokenStream) LT(k int) *Token {
|
func (ts *CommonTokenStream) LT(k int) IToken {
|
||||||
ts.lazyInit()
|
ts.lazyInit()
|
||||||
if k == 0 {
|
if k == 0 {
|
||||||
return nil
|
return nil
|
||||||
|
@ -88,10 +88,10 @@ func (ts *CommonTokenStream) getNumberOfOnChannelTokens() int {
|
||||||
ts.fill()
|
ts.fill()
|
||||||
for i := 0; i < len(ts.tokens); i++ {
|
for i := 0; i < len(ts.tokens); i++ {
|
||||||
var t = ts.tokens[i]
|
var t = ts.tokens[i]
|
||||||
if t.channel == ts.channel {
|
if t.GetChannel() == ts.channel {
|
||||||
n += 1
|
n += 1
|
||||||
}
|
}
|
||||||
if t.tokenType == TokenEOF {
|
if t.GetTokenType() == TokenEOF {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,7 +5,12 @@ import (
|
||||||
"strconv"
|
"strconv"
|
||||||
)
|
)
|
||||||
|
|
||||||
// A DFA walker that knows how to dump them to serialized strings.#/
|
// A DFA walker that knows how to dump them to serialized strings.
|
||||||
|
|
||||||
|
type IDFASerializer interface {
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
type DFASerializer struct {
|
type DFASerializer struct {
|
||||||
dfa *DFA
|
dfa *DFA
|
||||||
|
@ -66,14 +71,12 @@ func (this *DFASerializer) String() string {
|
||||||
func (this *DFASerializer) getEdgeLabel(i int) string {
|
func (this *DFASerializer) getEdgeLabel(i int) string {
|
||||||
if i == 0 {
|
if i == 0 {
|
||||||
return "EOF"
|
return "EOF"
|
||||||
} else if this.literalNames != nil || this.symbolicNames != nil {
|
} else if this.literalNames != nil && i - 1 < len(this.literalNames) {
|
||||||
if this.literalNames[i-1] == "" {
|
|
||||||
return this.literalNames[i-1]
|
return this.literalNames[i-1]
|
||||||
} else {
|
} else if this.symbolicNames != nil && i - 1 < len(this.symbolicNames) {
|
||||||
return this.symbolicNames[i-1]
|
return this.symbolicNames[i-1]
|
||||||
}
|
|
||||||
} else {
|
} else {
|
||||||
return string(i - 1)
|
return strconv.Itoa(i-1)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -101,6 +104,8 @@ func (this *DFASerializer) GetStateString(s *DFAState) string {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
type LexerDFASerializer struct {
|
type LexerDFASerializer struct {
|
||||||
*DFASerializer
|
*DFASerializer
|
||||||
}
|
}
|
||||||
|
@ -117,3 +122,35 @@ func NewLexerDFASerializer(dfa *DFA) *LexerDFASerializer {
|
||||||
func (this *LexerDFASerializer) getEdgeLabel(i int) string {
|
func (this *LexerDFASerializer) getEdgeLabel(i int) string {
|
||||||
return "'" + string(i) + "'"
|
return "'" + string(i) + "'"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (this *LexerDFASerializer) String() string {
|
||||||
|
|
||||||
|
if this.dfa.s0 == nil {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
var buf = ""
|
||||||
|
var states = this.dfa.sortedStates()
|
||||||
|
for i := 0; i < len(states); i++ {
|
||||||
|
var s = states[i]
|
||||||
|
if s.edges != nil {
|
||||||
|
var n = len(s.edges)
|
||||||
|
for j := 0; j < n; j++ {
|
||||||
|
var t = s.edges[j]
|
||||||
|
if t != nil && t.stateNumber != 0x7FFFFFFF {
|
||||||
|
buf += this.GetStateString(s)
|
||||||
|
buf += "-"
|
||||||
|
buf += this.getEdgeLabel(j)
|
||||||
|
buf += "->"
|
||||||
|
buf += this.GetStateString(t)
|
||||||
|
buf += "\n"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if len(buf) == 0 {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
return buf
|
||||||
|
}
|
||||||
|
|
|
@ -132,6 +132,7 @@ func (this *DFAState) GetAltSet() *Set {
|
||||||
// {@link ParserATNSimulator//addDFAState} we need to know if any other state
|
// {@link ParserATNSimulator//addDFAState} we need to know if any other state
|
||||||
// exists that has this exact set of ATN configurations. The
|
// exists that has this exact set of ATN configurations. The
|
||||||
// {@link //stateNumber} is irrelevant.</p>
|
// {@link //stateNumber} is irrelevant.</p>
|
||||||
|
|
||||||
func (this *DFAState) equals(other interface{}) bool {
|
func (this *DFAState) equals(other interface{}) bool {
|
||||||
|
|
||||||
if this == other {
|
if this == other {
|
||||||
|
@ -151,7 +152,7 @@ func (this *DFAState) hashString() string {
|
||||||
|
|
||||||
var s string
|
var s string
|
||||||
if (this.isAcceptState) {
|
if (this.isAcceptState) {
|
||||||
if (this.predicates == nil) {
|
if (this.predicates != nil) {
|
||||||
s = "=>" + fmt.Sprint(this.predicates)
|
s = "=>" + fmt.Sprint(this.predicates)
|
||||||
} else {
|
} else {
|
||||||
s = "=>" + fmt.Sprint(this.prediction)
|
s = "=>" + fmt.Sprint(this.prediction)
|
||||||
|
|
|
@ -24,15 +24,19 @@ func NewErrorListener() *DefaultErrorListener {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (this *DefaultErrorListener) SyntaxError(recognizer IRecognizer, offendingSymbol interface{}, line, column int, msg string, e IRecognitionException) {
|
func (this *DefaultErrorListener) SyntaxError(recognizer IRecognizer, offendingSymbol interface{}, line, column int, msg string, e IRecognitionException) {
|
||||||
|
fmt.Println("SyntaxError!")
|
||||||
}
|
}
|
||||||
|
|
||||||
func (this *DefaultErrorListener) ReportAmbiguity(recognizer IParser, dfa *DFA, startIndex, stopIndex int, exact bool, ambigAlts *BitSet, configs *ATNConfigSet) {
|
func (this *DefaultErrorListener) ReportAmbiguity(recognizer IParser, dfa *DFA, startIndex, stopIndex int, exact bool, ambigAlts *BitSet, configs *ATNConfigSet) {
|
||||||
|
fmt.Println("ReportAmbiguity!")
|
||||||
}
|
}
|
||||||
|
|
||||||
func (this *DefaultErrorListener) ReportAttemptingFullContext(recognizer IParser, dfa *DFA, startIndex, stopIndex int, conflictingAlts *BitSet, configs *ATNConfigSet) {
|
func (this *DefaultErrorListener) ReportAttemptingFullContext(recognizer IParser, dfa *DFA, startIndex, stopIndex int, conflictingAlts *BitSet, configs *ATNConfigSet) {
|
||||||
|
fmt.Println("ReportAttemptingFullContext!")
|
||||||
}
|
}
|
||||||
|
|
||||||
func (this *DefaultErrorListener) ReportContextSensitivity(recognizer IParser, dfa *DFA, startIndex, stopIndex, prediction int, configs *ATNConfigSet) {
|
func (this *DefaultErrorListener) ReportContextSensitivity(recognizer IParser, dfa *DFA, startIndex, stopIndex, prediction int, configs *ATNConfigSet) {
|
||||||
|
fmt.Println("ReportContextSensitivity!")
|
||||||
}
|
}
|
||||||
|
|
||||||
type ConsoleErrorListener struct {
|
type ConsoleErrorListener struct {
|
||||||
|
|
|
@ -9,7 +9,7 @@ import (
|
||||||
|
|
||||||
type IErrorStrategy interface {
|
type IErrorStrategy interface {
|
||||||
reset(IParser)
|
reset(IParser)
|
||||||
RecoverInline(IParser) *Token
|
RecoverInline(IParser) IToken
|
||||||
Recover(IParser, IRecognitionException)
|
Recover(IParser, IRecognitionException)
|
||||||
Sync(IParser)
|
Sync(IParser)
|
||||||
inErrorRecoveryMode(IParser) bool
|
inErrorRecoveryMode(IParser) bool
|
||||||
|
@ -208,20 +208,35 @@ func (this *DefaultErrorStrategy) Sync(recognizer IParser) {
|
||||||
if this.inErrorRecoveryMode(recognizer) {
|
if this.inErrorRecoveryMode(recognizer) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fmt.Println("STATE" + strconv.Itoa(recognizer.GetState()))
|
||||||
|
|
||||||
var s = recognizer.GetInterpreter().atn.states[recognizer.GetState()]
|
var s = recognizer.GetInterpreter().atn.states[recognizer.GetState()]
|
||||||
var la = recognizer.GetTokenStream().LA(1)
|
var la = recognizer.GetTokenStream().LA(1)
|
||||||
|
|
||||||
|
fmt.Println("LA" + strconv.Itoa(la))
|
||||||
|
|
||||||
// try cheaper subset first might get lucky. seems to shave a wee bit off
|
// try cheaper subset first might get lucky. seems to shave a wee bit off
|
||||||
if la == TokenEOF || recognizer.GetATN().nextTokens(s, nil).contains(la) {
|
if la == TokenEOF || recognizer.GetATN().nextTokens(s, nil).contains(la) {
|
||||||
|
fmt.Println("OK1")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
// Return but don't end recovery. only do that upon valid token Match
|
// Return but don't end recovery. only do that upon valid token Match
|
||||||
if recognizer.isExpectedToken(la) {
|
if recognizer.isExpectedToken(la) {
|
||||||
|
fmt.Println("OK2")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fmt.Println("LA" + strconv.Itoa(la))
|
||||||
|
fmt.Println(recognizer.GetATN().nextTokens(s, nil))
|
||||||
|
|
||||||
switch s.GetStateType() {
|
switch s.GetStateType() {
|
||||||
case ATNStateBLOCK_START:
|
case ATNStateBLOCK_START:
|
||||||
|
fallthrough
|
||||||
case ATNStateSTAR_BLOCK_START:
|
case ATNStateSTAR_BLOCK_START:
|
||||||
|
fallthrough
|
||||||
case ATNStatePLUS_BLOCK_START:
|
case ATNStatePLUS_BLOCK_START:
|
||||||
|
fallthrough
|
||||||
case ATNStateSTAR_LOOP_ENTRY:
|
case ATNStateSTAR_LOOP_ENTRY:
|
||||||
// Report error and recover if possible
|
// Report error and recover if possible
|
||||||
if this.singleTokenDeletion(recognizer) != nil {
|
if this.singleTokenDeletion(recognizer) != nil {
|
||||||
|
@ -229,15 +244,14 @@ func (this *DefaultErrorStrategy) Sync(recognizer IParser) {
|
||||||
} else {
|
} else {
|
||||||
panic(NewInputMisMatchException(recognizer))
|
panic(NewInputMisMatchException(recognizer))
|
||||||
}
|
}
|
||||||
break
|
|
||||||
case ATNStatePLUS_LOOP_BACK:
|
case ATNStatePLUS_LOOP_BACK:
|
||||||
|
fallthrough
|
||||||
case ATNStateSTAR_LOOP_BACK:
|
case ATNStateSTAR_LOOP_BACK:
|
||||||
this.ReportUnwantedToken(recognizer)
|
this.ReportUnwantedToken(recognizer)
|
||||||
var expecting = NewIntervalSet()
|
var expecting = NewIntervalSet()
|
||||||
expecting.addSet(recognizer.getExpectedTokens())
|
expecting.addSet(recognizer.getExpectedTokens())
|
||||||
var whatFollowsLoopIterationOrRule = expecting.addSet(this.getErrorRecoverySet(recognizer))
|
var whatFollowsLoopIterationOrRule = expecting.addSet(this.getErrorRecoverySet(recognizer))
|
||||||
this.consumeUntil(recognizer, whatFollowsLoopIterationOrRule)
|
this.consumeUntil(recognizer, whatFollowsLoopIterationOrRule)
|
||||||
break
|
|
||||||
default:
|
default:
|
||||||
// do nothing if we can't identify the exact kind of ATN state
|
// do nothing if we can't identify the exact kind of ATN state
|
||||||
}
|
}
|
||||||
|
@ -255,7 +269,7 @@ func (this *DefaultErrorStrategy) ReportNoViableAlternative(recognizer IParser,
|
||||||
var tokens = recognizer.GetTokenStream()
|
var tokens = recognizer.GetTokenStream()
|
||||||
var input string
|
var input string
|
||||||
if tokens != nil {
|
if tokens != nil {
|
||||||
if e.startToken.tokenType == TokenEOF {
|
if e.startToken.GetTokenType() == TokenEOF {
|
||||||
input = "<EOF>"
|
input = "<EOF>"
|
||||||
} else {
|
} else {
|
||||||
input = tokens.GetTextFromTokens(e.startToken, e.offendingToken)
|
input = tokens.GetTextFromTokens(e.startToken, e.offendingToken)
|
||||||
|
@ -279,6 +293,7 @@ func (this *DefaultErrorStrategy) ReportNoViableAlternative(recognizer IParser,
|
||||||
func (this *DefaultErrorStrategy) ReportInputMisMatch(recognizer IParser, e *InputMisMatchException) {
|
func (this *DefaultErrorStrategy) ReportInputMisMatch(recognizer IParser, e *InputMisMatchException) {
|
||||||
var msg = "misMatched input " + this.GetTokenErrorDisplay(e.offendingToken) +
|
var msg = "misMatched input " + this.GetTokenErrorDisplay(e.offendingToken) +
|
||||||
" expecting " + e.getExpectedTokens().StringVerbose(recognizer.GetLiteralNames(), recognizer.GetSymbolicNames(), false)
|
" expecting " + e.getExpectedTokens().StringVerbose(recognizer.GetLiteralNames(), recognizer.GetSymbolicNames(), false)
|
||||||
|
panic(msg)
|
||||||
recognizer.NotifyErrorListeners(msg, e.offendingToken, e)
|
recognizer.NotifyErrorListeners(msg, e.offendingToken, e)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -324,6 +339,7 @@ func (this *DefaultErrorStrategy) ReportUnwantedToken(recognizer IParser) {
|
||||||
var expecting = this.getExpectedTokens(recognizer)
|
var expecting = this.getExpectedTokens(recognizer)
|
||||||
var msg = "extraneous input " + tokenName + " expecting " +
|
var msg = "extraneous input " + tokenName + " expecting " +
|
||||||
expecting.StringVerbose(recognizer.GetLiteralNames(), recognizer.GetSymbolicNames(), false)
|
expecting.StringVerbose(recognizer.GetLiteralNames(), recognizer.GetSymbolicNames(), false)
|
||||||
|
panic(msg)
|
||||||
recognizer.NotifyErrorListeners(msg, t, nil)
|
recognizer.NotifyErrorListeners(msg, t, nil)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -404,7 +420,7 @@ func (this *DefaultErrorStrategy) ReportMissingToken(recognizer IParser) {
|
||||||
// is in the set of tokens that can follow the {@code ')'} token reference
|
// is in the set of tokens that can follow the {@code ')'} token reference
|
||||||
// in rule {@code atom}. It can assume that you forgot the {@code ')'}.
|
// in rule {@code atom}. It can assume that you forgot the {@code ')'}.
|
||||||
//
|
//
|
||||||
func (this *DefaultErrorStrategy) RecoverInline(recognizer IParser) *Token {
|
func (this *DefaultErrorStrategy) RecoverInline(recognizer IParser) IToken {
|
||||||
// SINGLE TOKEN DELETION
|
// SINGLE TOKEN DELETION
|
||||||
var MatchedSymbol = this.singleTokenDeletion(recognizer)
|
var MatchedSymbol = this.singleTokenDeletion(recognizer)
|
||||||
if MatchedSymbol != nil {
|
if MatchedSymbol != nil {
|
||||||
|
@ -473,7 +489,7 @@ func (this *DefaultErrorStrategy) singleTokenInsertion(recognizer IParser) bool
|
||||||
// deletion successfully recovers from the misMatched input, otherwise
|
// deletion successfully recovers from the misMatched input, otherwise
|
||||||
// {@code nil}
|
// {@code nil}
|
||||||
//
|
//
|
||||||
func (this *DefaultErrorStrategy) singleTokenDeletion(recognizer IParser) *Token {
|
func (this *DefaultErrorStrategy) singleTokenDeletion(recognizer IParser) IToken {
|
||||||
var nextTokenType = recognizer.GetTokenStream().LA(2)
|
var nextTokenType = recognizer.GetTokenStream().LA(2)
|
||||||
var expecting = this.getExpectedTokens(recognizer)
|
var expecting = this.getExpectedTokens(recognizer)
|
||||||
if expecting.contains(nextTokenType) {
|
if expecting.contains(nextTokenType) {
|
||||||
|
@ -511,7 +527,7 @@ func (this *DefaultErrorStrategy) singleTokenDeletion(recognizer IParser) *Token
|
||||||
// If you change what tokens must be created by the lexer,
|
// If you change what tokens must be created by the lexer,
|
||||||
// override this method to create the appropriate tokens.
|
// override this method to create the appropriate tokens.
|
||||||
//
|
//
|
||||||
func (this *DefaultErrorStrategy) getMissingSymbol(recognizer IParser) *Token {
|
func (this *DefaultErrorStrategy) getMissingSymbol(recognizer IParser) IToken {
|
||||||
var currentSymbol = recognizer.getCurrentToken()
|
var currentSymbol = recognizer.getCurrentToken()
|
||||||
var expecting = this.getExpectedTokens(recognizer)
|
var expecting = this.getExpectedTokens(recognizer)
|
||||||
var expectedTokenType = expecting.first()
|
var expectedTokenType = expecting.first()
|
||||||
|
@ -523,12 +539,12 @@ func (this *DefaultErrorStrategy) getMissingSymbol(recognizer IParser) *Token {
|
||||||
}
|
}
|
||||||
var current = currentSymbol
|
var current = currentSymbol
|
||||||
var lookback = recognizer.GetTokenStream().LT(-1)
|
var lookback = recognizer.GetTokenStream().LT(-1)
|
||||||
if current.tokenType == TokenEOF && lookback != nil {
|
if current.GetTokenType() == TokenEOF && lookback != nil {
|
||||||
current = lookback
|
current = lookback
|
||||||
}
|
}
|
||||||
|
|
||||||
tf := recognizer.GetTokenFactory()
|
tf := recognizer.GetTokenFactory()
|
||||||
return tf.Create(current.source, expectedTokenType, tokenText, TokenDefaultChannel, -1, -1, current.line, current.column)
|
return tf.Create( current.GetSource(), expectedTokenType, tokenText, TokenDefaultChannel, -1, -1, current.GetLine(), current.GetColumn())
|
||||||
}
|
}
|
||||||
|
|
||||||
func (this *DefaultErrorStrategy) getExpectedTokens(recognizer IParser) *IntervalSet {
|
func (this *DefaultErrorStrategy) getExpectedTokens(recognizer IParser) *IntervalSet {
|
||||||
|
@ -543,16 +559,16 @@ func (this *DefaultErrorStrategy) getExpectedTokens(recognizer IParser) *Interva
|
||||||
// your token objects because you don't have to go modify your lexer
|
// your token objects because you don't have to go modify your lexer
|
||||||
// so that it creates a NewJava type.
|
// so that it creates a NewJava type.
|
||||||
//
|
//
|
||||||
func (this *DefaultErrorStrategy) GetTokenErrorDisplay(t *Token) string {
|
func (this *DefaultErrorStrategy) GetTokenErrorDisplay(t IToken) string {
|
||||||
if t == nil {
|
if t == nil {
|
||||||
return "<no token>"
|
return "<no token>"
|
||||||
}
|
}
|
||||||
var s = t.text()
|
var s = t.GetText()
|
||||||
if s == "" {
|
if s == "" {
|
||||||
if t.tokenType == TokenEOF {
|
if t.GetTokenType() == TokenEOF {
|
||||||
s = "<EOF>"
|
s = "<EOF>"
|
||||||
} else {
|
} else {
|
||||||
s = "<" + strconv.Itoa(t.tokenType) + ">"
|
s = "<" + strconv.Itoa(t.GetTokenType()) + ">"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return this.escapeWSAndQuote(s)
|
return this.escapeWSAndQuote(s)
|
||||||
|
|
|
@ -9,7 +9,7 @@ import ()
|
||||||
// and what kind of problem occurred.
|
// and what kind of problem occurred.
|
||||||
|
|
||||||
type IRecognitionException interface {
|
type IRecognitionException interface {
|
||||||
GetOffendingToken() *Token
|
GetOffendingToken() IToken
|
||||||
GetMessage() string
|
GetMessage() string
|
||||||
GetInputStream() IntStream
|
GetInputStream() IntStream
|
||||||
}
|
}
|
||||||
|
@ -17,7 +17,7 @@ type IRecognitionException interface {
|
||||||
type RecognitionException struct {
|
type RecognitionException struct {
|
||||||
message string
|
message string
|
||||||
recognizer IRecognizer
|
recognizer IRecognizer
|
||||||
offendingToken *Token
|
offendingToken IToken
|
||||||
offendingState int
|
offendingState int
|
||||||
ctx IRuleContext
|
ctx IRuleContext
|
||||||
input IntStream
|
input IntStream
|
||||||
|
@ -62,7 +62,7 @@ func (this *RecognitionException) GetMessage() string {
|
||||||
return this.message
|
return this.message
|
||||||
}
|
}
|
||||||
|
|
||||||
func (this *RecognitionException) GetOffendingToken() *Token {
|
func (this *RecognitionException) GetOffendingToken() IToken {
|
||||||
return this.offendingToken
|
return this.offendingToken
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -124,8 +124,8 @@ func (this *LexerNoViableAltException) String() string {
|
||||||
type NoViableAltException struct {
|
type NoViableAltException struct {
|
||||||
*RecognitionException
|
*RecognitionException
|
||||||
|
|
||||||
startToken *Token
|
startToken IToken
|
||||||
offendingToken *Token
|
offendingToken IToken
|
||||||
ctx IParserRuleContext
|
ctx IParserRuleContext
|
||||||
deadEndConfigs *ATNConfigSet
|
deadEndConfigs *ATNConfigSet
|
||||||
}
|
}
|
||||||
|
@ -135,7 +135,7 @@ type NoViableAltException struct {
|
||||||
// of the offending input and also knows where the parser was
|
// of the offending input and also knows where the parser was
|
||||||
// in the various paths when the error. Reported by ReportNoViableAlternative()
|
// in the various paths when the error. Reported by ReportNoViableAlternative()
|
||||||
//
|
//
|
||||||
func NewNoViableAltException(recognizer IParser, input TokenStream, startToken *Token, offendingToken *Token, deadEndConfigs *ATNConfigSet, ctx IParserRuleContext) *NoViableAltException {
|
func NewNoViableAltException(recognizer IParser, input TokenStream, startToken IToken, offendingToken IToken, deadEndConfigs *ATNConfigSet, ctx IParserRuleContext) *NoViableAltException {
|
||||||
|
|
||||||
if ctx == nil {
|
if ctx == nil {
|
||||||
ctx = recognizer.GetParserRuleContext()
|
ctx = recognizer.GetParserRuleContext()
|
||||||
|
|
|
@ -32,6 +32,7 @@ func (is *InputStream) Consume() {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (is *InputStream) LA(offset int) int {
|
func (is *InputStream) LA(offset int) int {
|
||||||
|
|
||||||
if offset == 0 {
|
if offset == 0 {
|
||||||
return 0 // nil
|
return 0 // nil
|
||||||
}
|
}
|
||||||
|
@ -39,9 +40,11 @@ func (is *InputStream) LA(offset int) int {
|
||||||
offset += 1 // e.g., translate LA(-1) to use offset=0
|
offset += 1 // e.g., translate LA(-1) to use offset=0
|
||||||
}
|
}
|
||||||
var pos = is.index + offset - 1
|
var pos = is.index + offset - 1
|
||||||
|
|
||||||
if pos < 0 || pos >= is.size { // invalid
|
if pos < 0 || pos >= is.size { // invalid
|
||||||
return TokenEOF
|
return TokenEOF
|
||||||
}
|
}
|
||||||
|
|
||||||
return int(is.data[pos])
|
return int(is.data[pos])
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -3,6 +3,7 @@ package antlr4
|
||||||
import (
|
import (
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
|
"fmt"
|
||||||
)
|
)
|
||||||
|
|
||||||
type Interval struct {
|
type Interval struct {
|
||||||
|
@ -67,6 +68,7 @@ func (i *IntervalSet) addRange(l, h int) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (is *IntervalSet) addInterval(v *Interval) {
|
func (is *IntervalSet) addInterval(v *Interval) {
|
||||||
|
fmt.Println("addInterval" + v.String())
|
||||||
if is.intervals == nil {
|
if is.intervals == nil {
|
||||||
is.intervals = make([]*Interval, 0)
|
is.intervals = make([]*Interval, 0)
|
||||||
is.intervals = append(is.intervals, v)
|
is.intervals = append(is.intervals, v)
|
||||||
|
@ -94,7 +96,9 @@ func (is *IntervalSet) addInterval(v *Interval) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (i *IntervalSet) addSet(other *IntervalSet) *IntervalSet {
|
func (i *IntervalSet) addSet(other *IntervalSet) *IntervalSet {
|
||||||
|
fmt.Println("addSet")
|
||||||
if other.intervals != nil {
|
if other.intervals != nil {
|
||||||
|
fmt.Println(len(other.intervals))
|
||||||
for k := 0; k < len(other.intervals); k++ {
|
for k := 0; k < len(other.intervals); k++ {
|
||||||
var i2 = other.intervals[k]
|
var i2 = other.intervals[k]
|
||||||
i.addInterval(NewInterval(i2.start, i2.stop))
|
i.addInterval(NewInterval(i2.start, i2.stop))
|
||||||
|
@ -249,6 +253,7 @@ func (is *IntervalSet) toCharString() string {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (is *IntervalSet) toIndexString() string {
|
func (is *IntervalSet) toIndexString() string {
|
||||||
|
|
||||||
var names = make([]string, 0)
|
var names = make([]string, 0)
|
||||||
for i := 0; i < len(is.intervals); i++ {
|
for i := 0; i < len(is.intervals); i++ {
|
||||||
var v = is.intervals[i]
|
var v = is.intervals[i]
|
||||||
|
@ -256,10 +261,10 @@ func (is *IntervalSet) toIndexString() string {
|
||||||
if v.start == TokenEOF {
|
if v.start == TokenEOF {
|
||||||
names = append(names, "<EOF>")
|
names = append(names, "<EOF>")
|
||||||
} else {
|
} else {
|
||||||
names = append(names, string(v.start))
|
names = append(names, strconv.Itoa(v.start))
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
names = append(names, string(v.start)+".."+string(v.stop-1))
|
names = append(names, strconv.Itoa(v.start)+".."+strconv.Itoa(v.stop-1))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if len(names) > 1 {
|
if len(names) > 1 {
|
||||||
|
|
|
@ -1,6 +1,8 @@
|
||||||
package antlr4
|
package antlr4
|
||||||
|
|
||||||
import ()
|
import (
|
||||||
|
"fmt"
|
||||||
|
)
|
||||||
|
|
||||||
type LL1Analyzer struct {
|
type LL1Analyzer struct {
|
||||||
atn *ATN
|
atn *ATN
|
||||||
|
@ -71,9 +73,18 @@ func (la *LL1Analyzer) LOOK(s, stopState IATNState, ctx IRuleContext) *IntervalS
|
||||||
var seeThruPreds = true // ignore preds get all lookahead
|
var seeThruPreds = true // ignore preds get all lookahead
|
||||||
var lookContext IPredictionContext
|
var lookContext IPredictionContext
|
||||||
if ctx != nil {
|
if ctx != nil {
|
||||||
predictionContextFromRuleContext(s.GetATN(), ctx)
|
lookContext = predictionContextFromRuleContext(s.GetATN(), ctx)
|
||||||
}
|
}
|
||||||
|
fmt.Println("DEBUG 5")
|
||||||
|
// fmt.Println("DEBUG" + lookContext.String())
|
||||||
|
fmt.Println(s)
|
||||||
|
fmt.Println(stopState)
|
||||||
|
fmt.Println(lookContext)
|
||||||
|
fmt.Println(r)
|
||||||
|
fmt.Println(seeThruPreds)
|
||||||
|
fmt.Println("=====")
|
||||||
la._LOOK(s, stopState, lookContext, r, NewSet(nil, nil), NewBitSet(), seeThruPreds, true)
|
la._LOOK(s, stopState, lookContext, r, NewSet(nil, nil), NewBitSet(), seeThruPreds, true)
|
||||||
|
fmt.Println(r)
|
||||||
return r
|
return r
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -107,15 +118,36 @@ func (la *LL1Analyzer) LOOK(s, stopState IATNState, ctx IRuleContext) *IntervalS
|
||||||
// outermost context is reached. This parameter has no effect if {@code ctx}
|
// outermost context is reached. This parameter has no effect if {@code ctx}
|
||||||
// is {@code nil}.
|
// is {@code nil}.
|
||||||
|
|
||||||
|
|
||||||
|
func (la *LL1Analyzer) __LOOK(s, stopState IATNState, ctx IPredictionContext, look *IntervalSet, lookBusy *Set, calledRuleStack *BitSet, seeThruPreds, addEOF bool, i int){
|
||||||
|
|
||||||
|
returnState := la.atn.states[ctx.getReturnState(i)]
|
||||||
|
|
||||||
|
removed := calledRuleStack.contains(returnState.GetRuleIndex())
|
||||||
|
|
||||||
|
defer func() {
|
||||||
|
if removed {
|
||||||
|
calledRuleStack.add(returnState.GetRuleIndex())
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
|
calledRuleStack.remove(returnState.GetRuleIndex())
|
||||||
|
la._LOOK(returnState, stopState, ctx.GetParent(i), look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
func (la *LL1Analyzer) _LOOK(s, stopState IATNState, ctx IPredictionContext, look *IntervalSet, lookBusy *Set, calledRuleStack *BitSet, seeThruPreds, addEOF bool) {
|
func (la *LL1Analyzer) _LOOK(s, stopState IATNState, ctx IPredictionContext, look *IntervalSet, lookBusy *Set, calledRuleStack *BitSet, seeThruPreds, addEOF bool) {
|
||||||
|
|
||||||
c := NewATNConfig6(s, 0, ctx)
|
c := NewATNConfig6(s, 0, ctx)
|
||||||
|
|
||||||
if lookBusy.add(c) == nil {
|
if lookBusy.contains(c) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
lookBusy.add(c)
|
||||||
|
|
||||||
if s == stopState {
|
if s == stopState {
|
||||||
|
fmt.Println("DEBUG 6")
|
||||||
if ctx == nil {
|
if ctx == nil {
|
||||||
look.addOne(TokenEpsilon)
|
look.addOne(TokenEpsilon)
|
||||||
return
|
return
|
||||||
|
@ -137,24 +169,13 @@ func (la *LL1Analyzer) _LOOK(s, stopState IATNState, ctx IPredictionContext, loo
|
||||||
}
|
}
|
||||||
|
|
||||||
if ctx != PredictionContextEMPTY {
|
if ctx != PredictionContextEMPTY {
|
||||||
|
fmt.Println("DEBUG 7")
|
||||||
|
|
||||||
// run thru all possible stack tops in ctx
|
// run thru all possible stack tops in ctx
|
||||||
for i := 0; i < ctx.length(); i++ {
|
for i := 0; i < ctx.length(); i++ {
|
||||||
|
|
||||||
returnState := la.atn.states[ctx.getReturnState(i)]
|
returnState := la.atn.states[ctx.getReturnState(i)]
|
||||||
// System.out.println("popping back to "+retState)
|
la.__LOOK(returnState, stopState, ctx.GetParent(i), look, lookBusy, calledRuleStack, seeThruPreds, addEOF, i)
|
||||||
|
|
||||||
removed := calledRuleStack.contains(returnState.GetRuleIndex())
|
|
||||||
|
|
||||||
// TODO this is incorrect
|
|
||||||
defer func() {
|
|
||||||
if removed {
|
|
||||||
calledRuleStack.add(returnState.GetRuleIndex())
|
|
||||||
}
|
|
||||||
}()
|
|
||||||
|
|
||||||
calledRuleStack.clear(returnState.GetRuleIndex())
|
|
||||||
la._LOOK(returnState, stopState, ctx.GetParent(i), look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
|
|
||||||
|
|
||||||
}
|
}
|
||||||
return
|
return
|
||||||
|
@ -167,6 +188,7 @@ func (la *LL1Analyzer) _LOOK(s, stopState IATNState, ctx IPredictionContext, loo
|
||||||
t := s.GetTransitions()[i]
|
t := s.GetTransitions()[i]
|
||||||
|
|
||||||
if t1, ok := t.(*RuleTransition); ok {
|
if t1, ok := t.(*RuleTransition); ok {
|
||||||
|
fmt.Println("DEBUG 8")
|
||||||
|
|
||||||
if calledRuleStack.contains(t1.getTarget().GetRuleIndex()) {
|
if calledRuleStack.contains(t1.getTarget().GetRuleIndex()) {
|
||||||
continue
|
continue
|
||||||
|
@ -174,24 +196,34 @@ func (la *LL1Analyzer) _LOOK(s, stopState IATNState, ctx IPredictionContext, loo
|
||||||
|
|
||||||
newContext := SingletonPredictionContextCreate(ctx, t1.followState.GetStateNumber())
|
newContext := SingletonPredictionContextCreate(ctx, t1.followState.GetStateNumber())
|
||||||
|
|
||||||
defer func() {
|
la.___LOOK(stopState, newContext, look, lookBusy, calledRuleStack, seeThruPreds, addEOF, t1)
|
||||||
calledRuleStack.remove(t1.getTarget().GetRuleIndex())
|
|
||||||
}()
|
|
||||||
|
|
||||||
calledRuleStack.add(t1.getTarget().GetRuleIndex())
|
fmt.Println(look)
|
||||||
la._LOOK(t.getTarget(), stopState, newContext, look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
|
//
|
||||||
} else if t2, ok := t.(*AbstractPredicateTransition); ok {
|
// defer func() {
|
||||||
|
// calledRuleStack.remove(t1.getTarget().GetRuleIndex())
|
||||||
|
// }()
|
||||||
|
//
|
||||||
|
// calledRuleStack.add(t1.getTarget().GetRuleIndex())
|
||||||
|
// la._LOOK(t1.getTarget(), stopState, newContext, look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
|
||||||
|
|
||||||
|
} else if t2, ok := t.(IAbstractPredicateTransition); ok {
|
||||||
|
fmt.Println("DEBUG 9")
|
||||||
if seeThruPreds {
|
if seeThruPreds {
|
||||||
la._LOOK(t2.getTarget(), stopState, ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
|
la._LOOK(t2.getTarget(), stopState, ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
|
||||||
} else {
|
} else {
|
||||||
look.addOne(LL1AnalyzerHIT_PRED)
|
look.addOne(LL1AnalyzerHIT_PRED)
|
||||||
}
|
}
|
||||||
} else if t.getIsEpsilon() {
|
} else if t.getIsEpsilon() {
|
||||||
|
fmt.Println("DEBUG 10")
|
||||||
la._LOOK(t.getTarget(), stopState, ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
|
la._LOOK(t.getTarget(), stopState, ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
|
||||||
} else if _, ok := t.(*WildcardTransition); ok {
|
} else if _, ok := t.(*WildcardTransition); ok {
|
||||||
|
fmt.Println("DEBUG 11")
|
||||||
look.addRange(TokenMinUserTokenType, la.atn.maxTokenType)
|
look.addRange(TokenMinUserTokenType, la.atn.maxTokenType)
|
||||||
} else {
|
} else {
|
||||||
|
fmt.Println("DEBUG 12")
|
||||||
set := t.getLabel()
|
set := t.getLabel()
|
||||||
|
fmt.Println(set)
|
||||||
if set != nil {
|
if set != nil {
|
||||||
if _, ok := t.(*NotSetTransition); ok {
|
if _, ok := t.(*NotSetTransition); ok {
|
||||||
set = set.complement(TokenMinUserTokenType, la.atn.maxTokenType)
|
set = set.complement(TokenMinUserTokenType, la.atn.maxTokenType)
|
||||||
|
@ -201,3 +233,17 @@ func (la *LL1Analyzer) _LOOK(s, stopState IATNState, ctx IPredictionContext, loo
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (la *LL1Analyzer) ___LOOK(stopState IATNState, ctx IPredictionContext, look *IntervalSet, lookBusy *Set, calledRuleStack *BitSet, seeThruPreds, addEOF bool, t1 *RuleTransition) {
|
||||||
|
|
||||||
|
newContext := SingletonPredictionContextCreate(ctx, t1.followState.GetStateNumber())
|
||||||
|
|
||||||
|
defer func() {
|
||||||
|
calledRuleStack.remove(t1.getTarget().GetRuleIndex())
|
||||||
|
}()
|
||||||
|
|
||||||
|
calledRuleStack.add(t1.getTarget().GetRuleIndex())
|
||||||
|
la._LOOK(t1.getTarget(), stopState, newContext, look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
|
||||||
|
|
||||||
|
|
||||||
|
}
|
||||||
|
|
|
@ -30,7 +30,7 @@ type Lexer struct {
|
||||||
_input CharStream
|
_input CharStream
|
||||||
_factory TokenFactory
|
_factory TokenFactory
|
||||||
_tokenFactorySourcePair *TokenSourceCharStreamPair
|
_tokenFactorySourcePair *TokenSourceCharStreamPair
|
||||||
_token *Token
|
_token IToken
|
||||||
_tokenStartCharIndex int
|
_tokenStartCharIndex int
|
||||||
_tokenStartLine int
|
_tokenStartLine int
|
||||||
_tokenStartColumn int
|
_tokenStartColumn int
|
||||||
|
@ -166,12 +166,11 @@ func (l *Lexer) safeMatch() (ret int) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Return a token from l source i.e., Match a token on the char stream.
|
// Return a token from l source i.e., Match a token on the char stream.
|
||||||
func (l *Lexer) nextToken() *Token {
|
func (l *Lexer) nextToken() IToken {
|
||||||
if l._input == nil {
|
if l._input == nil {
|
||||||
panic("nextToken requires a non-nil input stream.")
|
panic("nextToken requires a non-nil input stream.")
|
||||||
}
|
}
|
||||||
|
|
||||||
// do this when done consuming
|
|
||||||
var tokenStartMarker = l._input.Mark()
|
var tokenStartMarker = l._input.Mark()
|
||||||
|
|
||||||
// previously in finally block
|
// previously in finally block
|
||||||
|
@ -244,7 +243,7 @@ func (l *Lexer) mode(m int) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (l *Lexer) pushMode(m int) {
|
func (l *Lexer) pushMode(m int) {
|
||||||
if LexerATNSimulatordebug {
|
if LexerATNSimulatorDebug {
|
||||||
fmt.Println("pushMode " + strconv.Itoa(m))
|
fmt.Println("pushMode " + strconv.Itoa(m))
|
||||||
}
|
}
|
||||||
l._modeStack.Push(l._mode)
|
l._modeStack.Push(l._mode)
|
||||||
|
@ -255,7 +254,7 @@ func (l *Lexer) popMode() int {
|
||||||
if len(l._modeStack) == 0 {
|
if len(l._modeStack) == 0 {
|
||||||
panic("Empty Stack")
|
panic("Empty Stack")
|
||||||
}
|
}
|
||||||
if LexerATNSimulatordebug {
|
if LexerATNSimulatorDebug {
|
||||||
fmt.Println("popMode back to " + fmt.Sprint(l._modeStack[0:len(l._modeStack)-1]))
|
fmt.Println("popMode back to " + fmt.Sprint(l._modeStack[0:len(l._modeStack)-1]))
|
||||||
}
|
}
|
||||||
i, _ := l._modeStack.Pop()
|
i, _ := l._modeStack.Pop()
|
||||||
|
@ -280,7 +279,7 @@ func (l *Lexer) setInputStream(input CharStream) {
|
||||||
// and GetToken (to push tokens into a list and pull from that list
|
// and GetToken (to push tokens into a list and pull from that list
|
||||||
// rather than a single variable as l implementation does).
|
// rather than a single variable as l implementation does).
|
||||||
// /
|
// /
|
||||||
func (l *Lexer) emitToken(token *Token) {
|
func (l *Lexer) emitToken(token IToken) {
|
||||||
l._token = token
|
l._token = token
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -290,13 +289,13 @@ func (l *Lexer) emitToken(token *Token) {
|
||||||
// use that to set the token's text. Override l method to emit
|
// use that to set the token's text. Override l method to emit
|
||||||
// custom Token objects or provide a Newfactory.
|
// custom Token objects or provide a Newfactory.
|
||||||
// /
|
// /
|
||||||
func (l *Lexer) emit() *Token {
|
func (l *Lexer) emit() IToken {
|
||||||
var t = l._factory.Create(l._tokenFactorySourcePair, l._type, l._text, l._channel, l._tokenStartCharIndex, l.getCharIndex()-1, l._tokenStartLine, l._tokenStartColumn)
|
var t = l._factory.Create(l._tokenFactorySourcePair, l._type, l._text, l._channel, l._tokenStartCharIndex, l.getCharIndex()-1, l._tokenStartLine, l._tokenStartColumn)
|
||||||
l.emitToken(t)
|
l.emitToken(t)
|
||||||
return t
|
return t
|
||||||
}
|
}
|
||||||
|
|
||||||
func (l *Lexer) emitEOF() *Token {
|
func (l *Lexer) emitEOF() IToken {
|
||||||
cpos := l.getCharPositionInLine()
|
cpos := l.getCharPositionInLine()
|
||||||
lpos := l.getLine()
|
lpos := l.getLine()
|
||||||
var eof = l._factory.Create(l._tokenFactorySourcePair, TokenEOF, "", TokenDefaultChannel, l._input.Index(), l._input.Index()-1, lpos, cpos)
|
var eof = l._factory.Create(l._tokenFactorySourcePair, TokenEOF, "", TokenDefaultChannel, l._input.Index(), l._input.Index()-1, lpos, cpos)
|
||||||
|
@ -346,11 +345,13 @@ func (this *Lexer) GetATN() *ATN {
|
||||||
// Return a list of all Token objects in input char stream.
|
// Return a list of all Token objects in input char stream.
|
||||||
// Forces load of all tokens. Does not include EOF token.
|
// Forces load of all tokens. Does not include EOF token.
|
||||||
// /
|
// /
|
||||||
func (l *Lexer) getAllTokens() []*Token {
|
func (l *Lexer) getAllTokens() []IToken {
|
||||||
var tokens = make([]*Token, 0)
|
fmt.Println("getAllTokens")
|
||||||
|
var tokens = make([]IToken, 0)
|
||||||
var t = l.nextToken()
|
var t = l.nextToken()
|
||||||
for t.tokenType != TokenEOF {
|
for t.GetTokenType() != TokenEOF {
|
||||||
tokens = append(tokens, t)
|
tokens = append(tokens, t)
|
||||||
|
fmt.Println("getAllTokens")
|
||||||
t = l.nextToken()
|
t = l.nextToken()
|
||||||
}
|
}
|
||||||
return tokens
|
return tokens
|
||||||
|
|
|
@ -88,8 +88,8 @@ func NewLexerATNSimulator(recog ILexer, atn *ATN, decisionToDFA []*DFA, sharedCo
|
||||||
return this
|
return this
|
||||||
}
|
}
|
||||||
|
|
||||||
var LexerATNSimulatordebug = false
|
var LexerATNSimulatorDebug = true
|
||||||
var LexerATNSimulatordfa_debug = false
|
var LexerATNSimulatorDFADebug = false
|
||||||
|
|
||||||
var LexerATNSimulatorMIN_DFA_EDGE = 0
|
var LexerATNSimulatorMIN_DFA_EDGE = 0
|
||||||
var LexerATNSimulatorMAX_DFA_EDGE = 127 // forces unicode to stay in ATN
|
var LexerATNSimulatorMAX_DFA_EDGE = 127 // forces unicode to stay in ATN
|
||||||
|
@ -105,6 +105,9 @@ func (this *LexerATNSimulator) copyState(simulator *LexerATNSimulator) {
|
||||||
|
|
||||||
func (this *LexerATNSimulator) Match(input CharStream, mode int) int {
|
func (this *LexerATNSimulator) Match(input CharStream, mode int) int {
|
||||||
|
|
||||||
|
fmt.Println("Match")
|
||||||
|
|
||||||
|
|
||||||
this.Match_calls += 1
|
this.Match_calls += 1
|
||||||
this.mode = mode
|
this.mode = mode
|
||||||
var mark = input.Mark()
|
var mark = input.Mark()
|
||||||
|
@ -134,7 +137,7 @@ func (this *LexerATNSimulator) reset() {
|
||||||
func (this *LexerATNSimulator) MatchATN(input CharStream) int {
|
func (this *LexerATNSimulator) MatchATN(input CharStream) int {
|
||||||
var startState = this.atn.modeToStartState[this.mode]
|
var startState = this.atn.modeToStartState[this.mode]
|
||||||
|
|
||||||
if LexerATNSimulatordebug {
|
if LexerATNSimulatorDebug {
|
||||||
fmt.Println("MatchATN mode " + strconv.Itoa(this.mode) + " start: " + startState.String())
|
fmt.Println("MatchATN mode " + strconv.Itoa(this.mode) + " start: " + startState.String())
|
||||||
}
|
}
|
||||||
var old_mode = this.mode
|
var old_mode = this.mode
|
||||||
|
@ -150,14 +153,23 @@ func (this *LexerATNSimulator) MatchATN(input CharStream) int {
|
||||||
|
|
||||||
var predict = this.execATN(input, next)
|
var predict = this.execATN(input, next)
|
||||||
|
|
||||||
if LexerATNSimulatordebug {
|
if LexerATNSimulatorDebug {
|
||||||
fmt.Println("DFA after MatchATN: " + this.decisionToDFA[old_mode].toLexerString())
|
fmt.Println("DFA after MatchATN: " + this.decisionToDFA[old_mode].toLexerString())
|
||||||
}
|
}
|
||||||
return predict
|
return predict
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var countA = 0
|
||||||
|
|
||||||
func (this *LexerATNSimulator) execATN(input CharStream, ds0 *DFAState) int {
|
func (this *LexerATNSimulator) execATN(input CharStream, ds0 *DFAState) int {
|
||||||
if LexerATNSimulatordebug {
|
|
||||||
|
countA += 1
|
||||||
|
|
||||||
|
if (countA == 2) {
|
||||||
|
panic("GAH")
|
||||||
|
}
|
||||||
|
|
||||||
|
if LexerATNSimulatorDebug {
|
||||||
fmt.Println("start state closure=" + ds0.configs.String())
|
fmt.Println("start state closure=" + ds0.configs.String())
|
||||||
}
|
}
|
||||||
if ds0.isAcceptState {
|
if ds0.isAcceptState {
|
||||||
|
@ -168,7 +180,7 @@ func (this *LexerATNSimulator) execATN(input CharStream, ds0 *DFAState) int {
|
||||||
var s = ds0 // s is current/from DFA state
|
var s = ds0 // s is current/from DFA state
|
||||||
|
|
||||||
for true { // while more work
|
for true { // while more work
|
||||||
if LexerATNSimulatordebug {
|
if LexerATNSimulatorDebug {
|
||||||
fmt.Println("execATN loop starting closure: " + s.configs.String())
|
fmt.Println("execATN loop starting closure: " + s.configs.String())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -215,6 +227,8 @@ func (this *LexerATNSimulator) execATN(input CharStream, ds0 *DFAState) int {
|
||||||
t = input.LA(1)
|
t = input.LA(1)
|
||||||
s = target // flip current DFA target becomes Newsrc/from state
|
s = target // flip current DFA target becomes Newsrc/from state
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fmt.Println("OUT")
|
||||||
return this.failOrAccept(this.prevAccept, input, s.configs, t)
|
return this.failOrAccept(this.prevAccept, input, s.configs, t)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -236,7 +250,7 @@ func (this *LexerATNSimulator) getExistingTargetState(s *DFAState, t int) *DFASt
|
||||||
if target == nil {
|
if target == nil {
|
||||||
target = nil
|
target = nil
|
||||||
}
|
}
|
||||||
if LexerATNSimulatordebug && target != nil {
|
if LexerATNSimulatorDebug && target != nil {
|
||||||
fmt.Println("reuse state " + strconv.Itoa(s.stateNumber) + " edge to " + strconv.Itoa(target.stateNumber))
|
fmt.Println("reuse state " + strconv.Itoa(s.stateNumber) + " edge to " + strconv.Itoa(target.stateNumber))
|
||||||
}
|
}
|
||||||
return target
|
return target
|
||||||
|
@ -274,8 +288,9 @@ func (this *LexerATNSimulator) computeTargetState(input CharStream, s *DFAState,
|
||||||
func (this *LexerATNSimulator) failOrAccept(prevAccept *SimState, input CharStream, reach *ATNConfigSet, t int) int {
|
func (this *LexerATNSimulator) failOrAccept(prevAccept *SimState, input CharStream, reach *ATNConfigSet, t int) int {
|
||||||
if this.prevAccept.dfaState != nil {
|
if this.prevAccept.dfaState != nil {
|
||||||
var lexerActionExecutor = prevAccept.dfaState.lexerActionExecutor
|
var lexerActionExecutor = prevAccept.dfaState.lexerActionExecutor
|
||||||
this.accept(input, lexerActionExecutor, this.startIndex,
|
this.accept(input, lexerActionExecutor, this.startIndex, prevAccept.index, prevAccept.line, prevAccept.column)
|
||||||
prevAccept.index, prevAccept.line, prevAccept.column)
|
|
||||||
|
fmt.Println(prevAccept.dfaState.prediction)
|
||||||
return prevAccept.dfaState.prediction
|
return prevAccept.dfaState.prediction
|
||||||
} else {
|
} else {
|
||||||
// if no accept and EOF is first char, return EOF
|
// if no accept and EOF is first char, return EOF
|
||||||
|
@ -299,7 +314,7 @@ func (this *LexerATNSimulator) getReachableConfigSet(input CharStream, closure *
|
||||||
if currentAltReachedAcceptState && cfg.(*LexerATNConfig).passedThroughNonGreedyDecision {
|
if currentAltReachedAcceptState && cfg.(*LexerATNConfig).passedThroughNonGreedyDecision {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
if LexerATNSimulatordebug {
|
if LexerATNSimulatorDebug {
|
||||||
fmt.Printf("testing %s at %s\n", this.GetTokenName(t), cfg.String()) // this.recog, true))
|
fmt.Printf("testing %s at %s\n", this.GetTokenName(t), cfg.String()) // this.recog, true))
|
||||||
}
|
}
|
||||||
for j := 0; j < len(cfg.GetState().GetTransitions()); j++ {
|
for j := 0; j < len(cfg.GetState().GetTransitions()); j++ {
|
||||||
|
@ -324,8 +339,8 @@ func (this *LexerATNSimulator) getReachableConfigSet(input CharStream, closure *
|
||||||
}
|
}
|
||||||
|
|
||||||
func (this *LexerATNSimulator) accept(input CharStream, lexerActionExecutor *LexerActionExecutor, startIndex, index, line, charPos int) {
|
func (this *LexerATNSimulator) accept(input CharStream, lexerActionExecutor *LexerActionExecutor, startIndex, index, line, charPos int) {
|
||||||
if LexerATNSimulatordebug {
|
if LexerATNSimulatorDebug {
|
||||||
fmt.Println("ACTION %s\n", lexerActionExecutor)
|
fmt.Printf("ACTION %s\n", lexerActionExecutor)
|
||||||
}
|
}
|
||||||
// seek to after last char in token
|
// seek to after last char in token
|
||||||
input.Seek(index)
|
input.Seek(index)
|
||||||
|
@ -346,12 +361,17 @@ func (this *LexerATNSimulator) getReachableTarget(trans ITransition, t int) IATN
|
||||||
|
|
||||||
func (this *LexerATNSimulator) computeStartState(input CharStream, p IATNState) *OrderedATNConfigSet {
|
func (this *LexerATNSimulator) computeStartState(input CharStream, p IATNState) *OrderedATNConfigSet {
|
||||||
|
|
||||||
|
fmt.Println("DEBUG" + strconv.Itoa(len(p.GetTransitions())))
|
||||||
|
|
||||||
var configs = NewOrderedATNConfigSet()
|
var configs = NewOrderedATNConfigSet()
|
||||||
for i := 0; i < len(p.GetTransitions()); i++ {
|
for i := 0; i < len(p.GetTransitions()); i++ {
|
||||||
var target = p.GetTransitions()[i].getTarget()
|
var target = p.GetTransitions()[i].getTarget()
|
||||||
var cfg = NewLexerATNConfig6(target, i+1, PredictionContextEMPTY)
|
var cfg = NewLexerATNConfig6(target, i+1, PredictionContextEMPTY)
|
||||||
this.closure(input, cfg, configs.ATNConfigSet, false, false, false)
|
this.closure(input, cfg, configs.ATNConfigSet, false, false, false)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fmt.Println("DEBUG" + configs.String())
|
||||||
|
|
||||||
return configs
|
return configs
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -366,19 +386,21 @@ func (this *LexerATNSimulator) computeStartState(input CharStream, p IATNState)
|
||||||
func (this *LexerATNSimulator) closure(input CharStream, config *LexerATNConfig, configs *ATNConfigSet,
|
func (this *LexerATNSimulator) closure(input CharStream, config *LexerATNConfig, configs *ATNConfigSet,
|
||||||
currentAltReachedAcceptState, speculative, treatEofAsEpsilon bool) bool {
|
currentAltReachedAcceptState, speculative, treatEofAsEpsilon bool) bool {
|
||||||
|
|
||||||
if LexerATNSimulatordebug {
|
if LexerATNSimulatorDebug {
|
||||||
fmt.Println("closure(" + config.String() + ")") // config.String(this.recog, true) + ")")
|
fmt.Println("closure(" + config.String() + ")") // config.String(this.recog, true) + ")")
|
||||||
}
|
}
|
||||||
|
|
||||||
_, ok := config.state.(*RuleStopState)
|
_, ok := config.state.(*RuleStopState)
|
||||||
if ok {
|
if ok {
|
||||||
if LexerATNSimulatordebug {
|
|
||||||
|
if LexerATNSimulatorDebug {
|
||||||
if this.recog != nil {
|
if this.recog != nil {
|
||||||
fmt.Println("closure at %s rule stop %s\n", this.recog.GetRuleNames()[config.state.GetRuleIndex()], config)
|
fmt.Printf("closure at %s rule stop %s\n", this.recog.GetRuleNames()[config.state.GetRuleIndex()], config)
|
||||||
} else {
|
} else {
|
||||||
fmt.Println("closure at rule stop %s\n", config)
|
fmt.Printf("closure at rule stop %s\n", config)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if config.context == nil || config.context.hasEmptyPath() {
|
if config.context == nil || config.context.hasEmptyPath() {
|
||||||
if config.context == nil || config.context.isEmpty() {
|
if config.context == nil || config.context.isEmpty() {
|
||||||
configs.add(config, nil)
|
configs.add(config, nil)
|
||||||
|
@ -452,7 +474,7 @@ func (this *LexerATNSimulator) getEpsilonTarget(input CharStream, config *LexerA
|
||||||
|
|
||||||
pt := trans.(*PredicateTransition)
|
pt := trans.(*PredicateTransition)
|
||||||
|
|
||||||
if LexerATNSimulatordebug {
|
if LexerATNSimulatorDebug {
|
||||||
fmt.Println("EVAL rule " + strconv.Itoa(trans.(*PredicateTransition).ruleIndex) + ":" + strconv.Itoa(pt.predIndex))
|
fmt.Println("EVAL rule " + strconv.Itoa(trans.(*PredicateTransition).ruleIndex) + ":" + strconv.Itoa(pt.predIndex))
|
||||||
}
|
}
|
||||||
configs.hasSemanticContext = true
|
configs.hasSemanticContext = true
|
||||||
|
@ -571,7 +593,7 @@ func (this *LexerATNSimulator) addDFAEdge(from_ *DFAState, tk int, to *DFAState,
|
||||||
// Only track edges within the DFA bounds
|
// Only track edges within the DFA bounds
|
||||||
return to
|
return to
|
||||||
}
|
}
|
||||||
if LexerATNSimulatordebug {
|
if LexerATNSimulatorDebug {
|
||||||
fmt.Println("EDGE " + from_.String() + " -> " + to.String() + " upon " + strconv.Itoa(tk))
|
fmt.Println("EDGE " + from_.String() + " -> " + to.String() + " upon " + strconv.Itoa(tk))
|
||||||
}
|
}
|
||||||
if from_.edges == nil {
|
if from_.edges == nil {
|
||||||
|
@ -643,6 +665,7 @@ func (this *LexerATNSimulator) consume(input CharStream) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (this *LexerATNSimulator) GetTokenName(tt int) string {
|
func (this *LexerATNSimulator) GetTokenName(tt int) string {
|
||||||
|
fmt.Println(tt)
|
||||||
if tt == -1 {
|
if tt == -1 {
|
||||||
return "EOF"
|
return "EOF"
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
package antlr4
|
package antlr4
|
||||||
|
import "fmt"
|
||||||
|
|
||||||
type IParser interface {
|
type IParser interface {
|
||||||
IRecognizer
|
IRecognizer
|
||||||
|
@ -8,13 +9,13 @@ type IParser interface {
|
||||||
GetTokenStream() TokenStream
|
GetTokenStream() TokenStream
|
||||||
GetTokenFactory() TokenFactory
|
GetTokenFactory() TokenFactory
|
||||||
GetParserRuleContext() IParserRuleContext
|
GetParserRuleContext() IParserRuleContext
|
||||||
Consume() *Token
|
Consume() IToken
|
||||||
GetParseListeners() []ParseTreeListener
|
GetParseListeners() []ParseTreeListener
|
||||||
|
|
||||||
GetInputStream() IntStream
|
GetInputStream() IntStream
|
||||||
getCurrentToken() *Token
|
getCurrentToken() IToken
|
||||||
getExpectedTokens() *IntervalSet
|
getExpectedTokens() *IntervalSet
|
||||||
NotifyErrorListeners(msg string, offendingToken *Token, err IRecognitionException)
|
NotifyErrorListeners(msg string, offendingToken IToken, err IRecognitionException)
|
||||||
isExpectedToken(symbol int) bool
|
isExpectedToken(symbol int) bool
|
||||||
getPrecedence() int
|
getPrecedence() int
|
||||||
getRuleInvocationStack(IParserRuleContext) []string
|
getRuleInvocationStack(IParserRuleContext) []string
|
||||||
|
@ -24,16 +25,16 @@ type Parser struct {
|
||||||
*Recognizer
|
*Recognizer
|
||||||
|
|
||||||
Interpreter *ParserATNSimulator
|
Interpreter *ParserATNSimulator
|
||||||
|
BuildParseTrees bool
|
||||||
|
|
||||||
_input TokenStream
|
_input TokenStream
|
||||||
_errHandler IErrorStrategy
|
_errHandler IErrorStrategy
|
||||||
_precedenceStack IntStack
|
_precedenceStack IntStack
|
||||||
_ctx IParserRuleContext
|
_ctx IParserRuleContext
|
||||||
buildParseTrees bool
|
|
||||||
_tracer *TraceListener
|
_tracer *TraceListener
|
||||||
_parseListeners []ParseTreeListener
|
_parseListeners []ParseTreeListener
|
||||||
_SyntaxErrors int
|
_SyntaxErrors int
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// p.is all the parsing support code essentially most of it is error
|
// p.is all the parsing support code essentially most of it is error
|
||||||
|
@ -42,7 +43,6 @@ func NewParser(input TokenStream) *Parser {
|
||||||
|
|
||||||
p := new(Parser)
|
p := new(Parser)
|
||||||
|
|
||||||
|
|
||||||
p.Recognizer = NewRecognizer()
|
p.Recognizer = NewRecognizer()
|
||||||
|
|
||||||
// The input stream.
|
// The input stream.
|
||||||
|
@ -57,7 +57,7 @@ func NewParser(input TokenStream) *Parser {
|
||||||
p._ctx = nil
|
p._ctx = nil
|
||||||
// Specifies whether or not the parser should construct a parse tree during
|
// Specifies whether or not the parser should construct a parse tree during
|
||||||
// the parsing process. The default value is {@code true}.
|
// the parsing process. The default value is {@code true}.
|
||||||
p.buildParseTrees = true
|
p.BuildParseTrees = true
|
||||||
// When {@link //setTrace}{@code (true)} is called, a reference to the
|
// When {@link //setTrace}{@code (true)} is called, a reference to the
|
||||||
// {@link TraceListener} is stored here so it can be easily removed in a
|
// {@link TraceListener} is stored here so it can be easily removed in a
|
||||||
// later call to {@link //setTrace}{@code (false)}. The listener itself is
|
// later call to {@link //setTrace}{@code (false)}. The listener itself is
|
||||||
|
@ -124,14 +124,17 @@ func (p *Parser) GetParseListeners() []ParseTreeListener {
|
||||||
// {@code ttype} and the error strategy could not recover from the
|
// {@code ttype} and the error strategy could not recover from the
|
||||||
// misMatched symbol
|
// misMatched symbol
|
||||||
|
|
||||||
func (p *Parser) Match(ttype int) *Token {
|
func (p *Parser) Match(ttype int) IToken {
|
||||||
var t = p.getCurrentToken()
|
var t = p.getCurrentToken()
|
||||||
if t.tokenType == ttype {
|
|
||||||
|
fmt.Println("TOKEN IS " + t.GetText())
|
||||||
|
|
||||||
|
if t.GetTokenType() == ttype {
|
||||||
p._errHandler.ReportMatch(p)
|
p._errHandler.ReportMatch(p)
|
||||||
p.Consume()
|
p.Consume()
|
||||||
} else {
|
} else {
|
||||||
t = p._errHandler.RecoverInline(p)
|
t = p._errHandler.RecoverInline(p)
|
||||||
if p.buildParseTrees && t.tokenIndex == -1 {
|
if p.BuildParseTrees && t.GetTokenIndex() == -1 {
|
||||||
// we must have conjured up a Newtoken during single token
|
// we must have conjured up a Newtoken during single token
|
||||||
// insertion
|
// insertion
|
||||||
// if it's not the current symbol
|
// if it's not the current symbol
|
||||||
|
@ -157,14 +160,14 @@ func (p *Parser) Match(ttype int) *Token {
|
||||||
// a wildcard and the error strategy could not recover from the misMatched
|
// a wildcard and the error strategy could not recover from the misMatched
|
||||||
// symbol
|
// symbol
|
||||||
|
|
||||||
func (p *Parser) MatchWildcard() *Token {
|
func (p *Parser) MatchWildcard() IToken {
|
||||||
var t = p.getCurrentToken()
|
var t = p.getCurrentToken()
|
||||||
if t.tokenType > 0 {
|
if t.GetTokenType() > 0 {
|
||||||
p._errHandler.ReportMatch(p)
|
p._errHandler.ReportMatch(p)
|
||||||
p.Consume()
|
p.Consume()
|
||||||
} else {
|
} else {
|
||||||
t = p._errHandler.RecoverInline(p)
|
t = p._errHandler.RecoverInline(p)
|
||||||
if p.buildParseTrees && t.tokenIndex == -1 {
|
if p.BuildParseTrees && t.GetTokenIndex() == -1 {
|
||||||
// we must have conjured up a Newtoken during single token
|
// we must have conjured up a Newtoken during single token
|
||||||
// insertion
|
// insertion
|
||||||
// if it's not the current symbol
|
// if it's not the current symbol
|
||||||
|
@ -232,24 +235,24 @@ func (p *Parser) addParseListener(listener ParseTreeListener) {
|
||||||
//
|
//
|
||||||
func (p *Parser) removeParseListener(listener ParseTreeListener) {
|
func (p *Parser) removeParseListener(listener ParseTreeListener) {
|
||||||
|
|
||||||
if (p._parseListeners != nil) {
|
if p._parseListeners != nil {
|
||||||
|
|
||||||
idx := -1
|
idx := -1
|
||||||
for i,v := range p._parseListeners {
|
for i, v := range p._parseListeners {
|
||||||
if v == listener {
|
if v == listener {
|
||||||
idx = i
|
idx = i
|
||||||
break;
|
break
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (idx == -1){
|
if idx == -1 {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// remove the listener from the slice
|
// remove the listener from the slice
|
||||||
p._parseListeners = append( p._parseListeners[0:idx], p._parseListeners[idx+1:]... )
|
p._parseListeners = append(p._parseListeners[0:idx], p._parseListeners[idx+1:]...)
|
||||||
|
|
||||||
if (len(p._parseListeners) == 0) {
|
if len(p._parseListeners) == 0 {
|
||||||
p._parseListeners = nil
|
p._parseListeners = nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -385,28 +388,28 @@ func (p *Parser) setTokenStream(input TokenStream) {
|
||||||
// Match needs to return the current input symbol, which gets put
|
// Match needs to return the current input symbol, which gets put
|
||||||
// into the label for the associated token ref e.g., x=ID.
|
// into the label for the associated token ref e.g., x=ID.
|
||||||
//
|
//
|
||||||
func (p *Parser) getCurrentToken() *Token {
|
func (p *Parser) getCurrentToken() IToken {
|
||||||
return p._input.LT(1)
|
return p._input.LT(1)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *Parser) NotifyErrorListeners(msg string, offendingToken *Token, err IRecognitionException) {
|
func (p *Parser) NotifyErrorListeners(msg string, offendingToken IToken, err IRecognitionException) {
|
||||||
if offendingToken == nil {
|
if offendingToken == nil {
|
||||||
offendingToken = p.getCurrentToken()
|
offendingToken = p.getCurrentToken()
|
||||||
}
|
}
|
||||||
p._SyntaxErrors += 1
|
p._SyntaxErrors += 1
|
||||||
var line = offendingToken.line
|
var line = offendingToken.GetLine()
|
||||||
var column = offendingToken.column
|
var column = offendingToken.GetColumn()
|
||||||
listener := p.getErrorListenerDispatch()
|
listener := p.getErrorListenerDispatch()
|
||||||
listener.SyntaxError(p, offendingToken, line, column, msg, err)
|
listener.SyntaxError(p, offendingToken, line, column, msg, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *Parser) Consume() *Token {
|
func (p *Parser) Consume() IToken {
|
||||||
var o = p.getCurrentToken()
|
var o = p.getCurrentToken()
|
||||||
if o.tokenType != TokenEOF {
|
if o.GetTokenType() != TokenEOF {
|
||||||
p.GetInputStream().Consume()
|
p.GetInputStream().Consume()
|
||||||
}
|
}
|
||||||
var hasListener = p._parseListeners != nil && len(p._parseListeners) > 0
|
var hasListener = p._parseListeners != nil && len(p._parseListeners) > 0
|
||||||
if p.buildParseTrees || hasListener {
|
if p.BuildParseTrees || hasListener {
|
||||||
if p._errHandler.inErrorRecoveryMode(p) {
|
if p._errHandler.inErrorRecoveryMode(p) {
|
||||||
var node = p._ctx.addErrorNode(o)
|
var node = p._ctx.addErrorNode(o)
|
||||||
if p._parseListeners != nil {
|
if p._parseListeners != nil {
|
||||||
|
@ -437,10 +440,10 @@ func (p *Parser) addContextToParseTree() {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *Parser) EnterRule(localctx IParserRuleContext, state, ruleIndex int) {
|
func (p *Parser) EnterRule(localctx IParserRuleContext, state, ruleIndex int) {
|
||||||
p.state = state
|
p.SetState(state)
|
||||||
p._ctx = localctx
|
p._ctx = localctx
|
||||||
p._ctx.setStart(p._input.LT(1))
|
p._ctx.setStart(p._input.LT(1))
|
||||||
if p.buildParseTrees {
|
if p.BuildParseTrees {
|
||||||
p.addContextToParseTree()
|
p.addContextToParseTree()
|
||||||
}
|
}
|
||||||
if p._parseListeners != nil {
|
if p._parseListeners != nil {
|
||||||
|
@ -454,8 +457,8 @@ func (p *Parser) ExitRule() {
|
||||||
if p._parseListeners != nil {
|
if p._parseListeners != nil {
|
||||||
p.TriggerExitRuleEvent()
|
p.TriggerExitRuleEvent()
|
||||||
}
|
}
|
||||||
p.state = p._ctx.getInvokingState()
|
p.SetState(p._ctx.getInvokingState())
|
||||||
if (p._ctx.GetParent() != nil){
|
if p._ctx.GetParent() != nil {
|
||||||
p._ctx = p._ctx.GetParent().(IParserRuleContext)
|
p._ctx = p._ctx.GetParent().(IParserRuleContext)
|
||||||
} else {
|
} else {
|
||||||
p._ctx = nil
|
p._ctx = nil
|
||||||
|
@ -465,7 +468,7 @@ func (p *Parser) ExitRule() {
|
||||||
func (p *Parser) EnterOuterAlt(localctx IParserRuleContext, altNum int) {
|
func (p *Parser) EnterOuterAlt(localctx IParserRuleContext, altNum int) {
|
||||||
// if we have Newlocalctx, make sure we replace existing ctx
|
// if we have Newlocalctx, make sure we replace existing ctx
|
||||||
// that is previous child of parse tree
|
// that is previous child of parse tree
|
||||||
if p.buildParseTrees && p._ctx != localctx {
|
if p.BuildParseTrees && p._ctx != localctx {
|
||||||
if p._ctx.GetParent() != nil {
|
if p._ctx.GetParent() != nil {
|
||||||
p._ctx.GetParent().(IParserRuleContext).removeLastChild()
|
p._ctx.GetParent().(IParserRuleContext).removeLastChild()
|
||||||
p._ctx.GetParent().(IParserRuleContext).addChild(localctx)
|
p._ctx.GetParent().(IParserRuleContext).addChild(localctx)
|
||||||
|
@ -488,7 +491,7 @@ func (p *Parser) getPrecedence() int {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *Parser) EnterRecursionRule(localctx IParserRuleContext, state, ruleIndex, precedence int) {
|
func (p *Parser) EnterRecursionRule(localctx IParserRuleContext, state, ruleIndex, precedence int) {
|
||||||
p.state = state
|
p.SetState(state)
|
||||||
p._precedenceStack.Push(precedence)
|
p._precedenceStack.Push(precedence)
|
||||||
p._ctx = localctx
|
p._ctx = localctx
|
||||||
p._ctx.setStart(p._input.LT(1))
|
p._ctx.setStart(p._input.LT(1))
|
||||||
|
@ -509,7 +512,7 @@ func (p *Parser) PushNewRecursionContext(localctx IParserRuleContext, state, rul
|
||||||
|
|
||||||
p._ctx = localctx
|
p._ctx = localctx
|
||||||
p._ctx.setStart(previous.getStart())
|
p._ctx.setStart(previous.getStart())
|
||||||
if p.buildParseTrees {
|
if p.BuildParseTrees {
|
||||||
p._ctx.addChild(previous)
|
p._ctx.addChild(previous)
|
||||||
}
|
}
|
||||||
if p._parseListeners != nil {
|
if p._parseListeners != nil {
|
||||||
|
@ -533,7 +536,7 @@ func (p *Parser) UnrollRecursionContexts(parentCtx IParserRuleContext) {
|
||||||
}
|
}
|
||||||
// hook into tree
|
// hook into tree
|
||||||
retCtx.setParent(parentCtx)
|
retCtx.setParent(parentCtx)
|
||||||
if p.buildParseTrees && parentCtx != nil {
|
if p.BuildParseTrees && parentCtx != nil {
|
||||||
// add return ctx into invoking rule's tree
|
// add return ctx into invoking rule's tree
|
||||||
parentCtx.addChild(retCtx)
|
parentCtx.addChild(retCtx)
|
||||||
}
|
}
|
||||||
|
|
|
@ -47,7 +47,7 @@ func NewParserATNSimulator(parser IParser, atn *ATN, decisionToDFA []*DFA, share
|
||||||
return this
|
return this
|
||||||
}
|
}
|
||||||
|
|
||||||
var ParserATNSimulatorDebug = false
|
var ParserATNSimulatorDebug = true
|
||||||
var ParserATNSimulatorListATNDecisions = false
|
var ParserATNSimulatorListATNDecisions = false
|
||||||
var ParserATNSimulatorDFADebug = false
|
var ParserATNSimulatorDFADebug = false
|
||||||
var ParserATNSimulatorRetryDebug = false
|
var ParserATNSimulatorRetryDebug = false
|
||||||
|
@ -57,11 +57,14 @@ func (this *ParserATNSimulator) reset() {
|
||||||
|
|
||||||
func (this *ParserATNSimulator) AdaptivePredict(input TokenStream, decision int, outerContext IParserRuleContext) int {
|
func (this *ParserATNSimulator) AdaptivePredict(input TokenStream, decision int, outerContext IParserRuleContext) int {
|
||||||
|
|
||||||
|
fmt.Println("Adaptive preduct")
|
||||||
|
|
||||||
if ParserATNSimulatorDebug || ParserATNSimulatorListATNDecisions {
|
if ParserATNSimulatorDebug || ParserATNSimulatorListATNDecisions {
|
||||||
|
|
||||||
fmt.Println("AdaptivePredict decision " + strconv.Itoa(decision) +
|
fmt.Println("AdaptivePredict decision " + strconv.Itoa(decision) +
|
||||||
" exec LA(1)==" + this.getLookaheadName(input) +
|
" exec LA(1)==" + this.getLookaheadName(input) +
|
||||||
" line " + strconv.Itoa(input.LT(1).line) + ":" +
|
" line " + strconv.Itoa(input.LT(1).GetLine()) + ":" +
|
||||||
strconv.Itoa(input.LT(1).column))
|
strconv.Itoa(input.LT(1).GetColumn()))
|
||||||
}
|
}
|
||||||
|
|
||||||
this._input = input
|
this._input = input
|
||||||
|
@ -174,7 +177,7 @@ func (this *ParserATNSimulator) execATN(dfa *DFA, s0 *DFAState, input TokenStrea
|
||||||
if ParserATNSimulatorDebug || ParserATNSimulatorListATNDecisions {
|
if ParserATNSimulatorDebug || ParserATNSimulatorListATNDecisions {
|
||||||
fmt.Println("execATN decision " + strconv.Itoa(dfa.decision) +
|
fmt.Println("execATN decision " + strconv.Itoa(dfa.decision) +
|
||||||
" exec LA(1)==" + this.getLookaheadName(input) +
|
" exec LA(1)==" + this.getLookaheadName(input) +
|
||||||
" line " + strconv.Itoa(input.LT(1).line) + ":" + strconv.Itoa(input.LT(1).column))
|
" line " + strconv.Itoa(input.LT(1).GetLine()) + ":" + strconv.Itoa(input.LT(1).GetColumn()))
|
||||||
}
|
}
|
||||||
|
|
||||||
var previousD = s0
|
var previousD = s0
|
||||||
|
@ -1278,18 +1281,22 @@ func (this *ParserATNSimulator) getConflictingAltsOrUniqueAlt(configs *ATNConfig
|
||||||
|
|
||||||
func (this *ParserATNSimulator) GetTokenName(t int) string {
|
func (this *ParserATNSimulator) GetTokenName(t int) string {
|
||||||
|
|
||||||
|
fmt.Println("Get token name")
|
||||||
|
|
||||||
if t == TokenEOF {
|
if t == TokenEOF {
|
||||||
return "EOF"
|
return "EOF"
|
||||||
}
|
}
|
||||||
|
|
||||||
if this.parser != nil && this.parser.GetLiteralNames() != nil {
|
if this.parser != nil && this.parser.GetLiteralNames() != nil {
|
||||||
if t >= len(this.parser.GetLiteralNames()) {
|
if t >= len(this.parser.GetLiteralNames()) {
|
||||||
fmt.Println(strconv.Itoa(t) + " ttype out of range: " + strings.Join(this.parser.GetLiteralNames(), ","))
|
fmt.Println(strconv.Itoa(t) + " ttype out of range: " + strings.Join(this.parser.GetLiteralNames(), ","))
|
||||||
fmt.Println(this.parser.GetInputStream().(TokenStream).GetAllText())
|
// fmt.Println(this.parser.GetInputStream().(TokenStream).GetAllText()) // this seems incorrect
|
||||||
} else {
|
} else {
|
||||||
return this.parser.GetLiteralNames()[t] + "<" + strconv.Itoa(t) + ">"
|
return this.parser.GetLiteralNames()[t] + "<" + strconv.Itoa(t) + ">"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return "" + strconv.Itoa(t)
|
|
||||||
|
return strconv.Itoa(t)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (this *ParserATNSimulator) getLookaheadName(input TokenStream) string {
|
func (this *ParserATNSimulator) getLookaheadName(input TokenStream) string {
|
||||||
|
|
|
@ -8,16 +8,16 @@ type IParserRuleContext interface {
|
||||||
IRuleContext
|
IRuleContext
|
||||||
|
|
||||||
SetException(IRecognitionException)
|
SetException(IRecognitionException)
|
||||||
addTokenNode(token *Token) *TerminalNodeImpl
|
addTokenNode(token IToken) *TerminalNodeImpl
|
||||||
addErrorNode(badToken *Token) *ErrorNodeImpl
|
addErrorNode(badToken IToken) *ErrorNodeImpl
|
||||||
EnterRule(listener ParseTreeListener)
|
EnterRule(listener ParseTreeListener)
|
||||||
ExitRule(listener ParseTreeListener)
|
ExitRule(listener ParseTreeListener)
|
||||||
|
|
||||||
setStart(*Token)
|
setStart(IToken)
|
||||||
getStart() *Token
|
getStart() IToken
|
||||||
|
|
||||||
setStop(*Token)
|
setStop(IToken)
|
||||||
getStop() *Token
|
getStop() IToken
|
||||||
|
|
||||||
addChild(child IRuleContext) IRuleContext
|
addChild(child IRuleContext) IRuleContext
|
||||||
removeLastChild()
|
removeLastChild()
|
||||||
|
@ -27,7 +27,7 @@ type ParserRuleContext struct {
|
||||||
*RuleContext
|
*RuleContext
|
||||||
|
|
||||||
children []ParseTree
|
children []ParseTree
|
||||||
start, stop *Token
|
start, stop IToken
|
||||||
exception IRecognitionException
|
exception IRecognitionException
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -117,7 +117,7 @@ func (prc *ParserRuleContext) removeLastChild() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (prc *ParserRuleContext) addTokenNode(token *Token) *TerminalNodeImpl {
|
func (prc *ParserRuleContext) addTokenNode(token IToken) *TerminalNodeImpl {
|
||||||
|
|
||||||
var node = NewTerminalNodeImpl(token)
|
var node = NewTerminalNodeImpl(token)
|
||||||
prc.addTerminalNodeChild(node)
|
prc.addTerminalNodeChild(node)
|
||||||
|
@ -126,7 +126,7 @@ func (prc *ParserRuleContext) addTokenNode(token *Token) *TerminalNodeImpl {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (prc *ParserRuleContext) addErrorNode(badToken *Token) *ErrorNodeImpl {
|
func (prc *ParserRuleContext) addErrorNode(badToken IToken) *ErrorNodeImpl {
|
||||||
var node = NewErrorNodeImpl(badToken)
|
var node = NewErrorNodeImpl(badToken)
|
||||||
prc.addTerminalNodeChild(node)
|
prc.addTerminalNodeChild(node)
|
||||||
node.parentCtx = prc
|
node.parentCtx = prc
|
||||||
|
@ -159,19 +159,19 @@ func (prc *ParserRuleContext) getChildOfType(i int, childType reflect.Type) IRul
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (prc *ParserRuleContext) setStart(t *Token) {
|
func (prc *ParserRuleContext) setStart(t IToken) {
|
||||||
prc.start = t
|
prc.start = t
|
||||||
}
|
}
|
||||||
|
|
||||||
func (prc *ParserRuleContext) getStart() *Token {
|
func (prc *ParserRuleContext) getStart() IToken {
|
||||||
return prc.start
|
return prc.start
|
||||||
}
|
}
|
||||||
|
|
||||||
func (prc *ParserRuleContext) setStop(t *Token) {
|
func (prc *ParserRuleContext) setStop(t IToken) {
|
||||||
prc.stop = t
|
prc.stop = t
|
||||||
}
|
}
|
||||||
|
|
||||||
func (prc *ParserRuleContext) getStop() *Token {
|
func (prc *ParserRuleContext) getStop() IToken {
|
||||||
return prc.stop
|
return prc.stop
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -180,7 +180,7 @@ func (prc *ParserRuleContext) GetToken(ttype int, i int) TerminalNode {
|
||||||
for j := 0; j < len(prc.children); j++ {
|
for j := 0; j < len(prc.children); j++ {
|
||||||
var child = prc.children[j]
|
var child = prc.children[j]
|
||||||
if c2, ok := child.(TerminalNode); ok {
|
if c2, ok := child.(TerminalNode); ok {
|
||||||
if c2.getSymbol().tokenType == ttype {
|
if c2.getSymbol().GetTokenType() == ttype {
|
||||||
if i == 0 {
|
if i == 0 {
|
||||||
return c2
|
return c2
|
||||||
} else {
|
} else {
|
||||||
|
@ -200,7 +200,7 @@ func (prc *ParserRuleContext) GetTokens(ttype int) []TerminalNode {
|
||||||
for j := 0; j < len(prc.children); j++ {
|
for j := 0; j < len(prc.children); j++ {
|
||||||
var child = prc.children[j]
|
var child = prc.children[j]
|
||||||
if tchild, ok := child.(TerminalNode); ok {
|
if tchild, ok := child.(TerminalNode); ok {
|
||||||
if tchild.getSymbol().tokenType == ttype {
|
if tchild.getSymbol().GetTokenType() == ttype {
|
||||||
tokens = append(tokens, tchild)
|
tokens = append(tokens, tchild)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -242,7 +242,7 @@ func (prc *ParserRuleContext) GetSourceInterval() *Interval {
|
||||||
if prc.start == nil || prc.stop == nil {
|
if prc.start == nil || prc.stop == nil {
|
||||||
return TreeINVALID_INTERVAL
|
return TreeINVALID_INTERVAL
|
||||||
} else {
|
} else {
|
||||||
return NewInterval(prc.start.tokenIndex, prc.stop.tokenIndex)
|
return NewInterval(prc.start.GetTokenIndex(), prc.stop.GetTokenIndex())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -72,10 +72,6 @@ func (this *PredictionContext) isEmpty() bool {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
func (this *PredictionContext) hasEmptyPath() bool {
|
|
||||||
return this.getReturnState(this.length()-1) == PredictionContextEMPTY_RETURN_STATE
|
|
||||||
}
|
|
||||||
|
|
||||||
func (this *PredictionContext) hashString() string {
|
func (this *PredictionContext) hashString() string {
|
||||||
return this.cachedHashString
|
return this.cachedHashString
|
||||||
}
|
}
|
||||||
|
@ -88,22 +84,6 @@ func calculateEmptyHashString() string {
|
||||||
return ""
|
return ""
|
||||||
}
|
}
|
||||||
|
|
||||||
func (this *PredictionContext) String() string {
|
|
||||||
panic("Not implemented")
|
|
||||||
}
|
|
||||||
|
|
||||||
func (this *PredictionContext) GetParent(index int) IPredictionContext {
|
|
||||||
panic("Not implemented")
|
|
||||||
}
|
|
||||||
|
|
||||||
func (this *PredictionContext) length() int {
|
|
||||||
panic("Not implemented")
|
|
||||||
}
|
|
||||||
|
|
||||||
func (this *PredictionContext) getReturnState(index int) int {
|
|
||||||
panic("Not implemented")
|
|
||||||
}
|
|
||||||
|
|
||||||
// Used to cache {@link PredictionContext} objects. Its used for the shared
|
// Used to cache {@link PredictionContext} objects. Its used for the shared
|
||||||
// context cash associated with contexts in DFA states. This cache
|
// context cash associated with contexts in DFA states. This cache
|
||||||
// can be used for both lexers and parsers.
|
// can be used for both lexers and parsers.
|
||||||
|
@ -191,6 +171,10 @@ func (this *SingletonPredictionContext) getReturnState(index int) int {
|
||||||
return this.returnState
|
return this.returnState
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (this *SingletonPredictionContext) hasEmptyPath() bool {
|
||||||
|
return this.returnState == PredictionContextEMPTY_RETURN_STATE
|
||||||
|
}
|
||||||
|
|
||||||
func (this *SingletonPredictionContext) equals(other IPredictionContext) bool {
|
func (this *SingletonPredictionContext) equals(other IPredictionContext) bool {
|
||||||
if this == other {
|
if this == other {
|
||||||
return true
|
return true
|
||||||
|
@ -301,6 +285,10 @@ func (c *ArrayPredictionContext) GetReturnStates() []int {
|
||||||
return c.returnStates
|
return c.returnStates
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (this *ArrayPredictionContext) hasEmptyPath() bool {
|
||||||
|
return this.getReturnState(this.length()-1) == PredictionContextEMPTY_RETURN_STATE
|
||||||
|
}
|
||||||
|
|
||||||
func (this *ArrayPredictionContext) isEmpty() bool {
|
func (this *ArrayPredictionContext) isEmpty() bool {
|
||||||
// since EMPTY_RETURN_STATE can only appear in the last position, we
|
// since EMPTY_RETURN_STATE can only appear in the last position, we
|
||||||
// don't need to verify that size==1
|
// don't need to verify that size==1
|
||||||
|
@ -320,9 +308,7 @@ func (this *ArrayPredictionContext) getReturnState(index int) int {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (this *ArrayPredictionContext) equals(other IPredictionContext) bool {
|
func (this *ArrayPredictionContext) equals(other IPredictionContext) bool {
|
||||||
if this == other {
|
if _, ok := other.(*ArrayPredictionContext); !ok {
|
||||||
return true
|
|
||||||
} else if _, ok := other.(*ArrayPredictionContext); !ok {
|
|
||||||
return false
|
return false
|
||||||
} else if this.cachedHashString != other.hashString() {
|
} else if this.cachedHashString != other.hashString() {
|
||||||
return false // can't be same if hash is different
|
return false // can't be same if hash is different
|
||||||
|
|
|
@ -83,6 +83,8 @@ func (this *Recognizer) GetState() int {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (this *Recognizer) SetState(v int) {
|
func (this *Recognizer) SetState(v int) {
|
||||||
|
fmt.Println("SETTING STATE " + strconv.Itoa(v) + " from " + strconv.Itoa(this.state))
|
||||||
|
|
||||||
this.state = v
|
this.state = v
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -159,8 +161,8 @@ func (this *Recognizer) GetTokenType(tokenName string) int {
|
||||||
|
|
||||||
// What is the error header, normally line/character position information?//
|
// What is the error header, normally line/character position information?//
|
||||||
func (this *Recognizer) getErrorHeader(e IRecognitionException) string {
|
func (this *Recognizer) getErrorHeader(e IRecognitionException) string {
|
||||||
var line = e.GetOffendingToken().line
|
var line = e.GetOffendingToken().GetLine()
|
||||||
var column = e.GetOffendingToken().column
|
var column = e.GetOffendingToken().GetColumn()
|
||||||
return "line " + strconv.Itoa(line) + ":" + strconv.Itoa(column)
|
return "line " + strconv.Itoa(line) + ":" + strconv.Itoa(column)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -177,16 +179,16 @@ func (this *Recognizer) getErrorHeader(e IRecognitionException) string {
|
||||||
// feature when necessary. For example, see
|
// feature when necessary. For example, see
|
||||||
// {@link DefaultErrorStrategy//GetTokenErrorDisplay}.
|
// {@link DefaultErrorStrategy//GetTokenErrorDisplay}.
|
||||||
//
|
//
|
||||||
func (this *Recognizer) GetTokenErrorDisplay(t *Token) string {
|
func (this *Recognizer) GetTokenErrorDisplay(t IToken) string {
|
||||||
if t == nil {
|
if t == nil {
|
||||||
return "<no token>"
|
return "<no token>"
|
||||||
}
|
}
|
||||||
var s = t.text()
|
var s = t.GetText()
|
||||||
if s == "" {
|
if s == "" {
|
||||||
if t.tokenType == TokenEOF {
|
if t.GetTokenType() == TokenEOF {
|
||||||
s = "<EOF>"
|
s = "<EOF>"
|
||||||
} else {
|
} else {
|
||||||
s = "<" + strconv.Itoa(t.tokenType) + ">"
|
s = "<" + strconv.Itoa(t.GetTokenType()) + ">"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
s = strings.Replace(s, "\t", "\\t", -1)
|
s = strings.Replace(s, "\t", "\\t", -1)
|
||||||
|
|
|
@ -5,8 +5,6 @@ import (
|
||||||
"strings"
|
"strings"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
type TokenSourceCharStreamPair struct {
|
type TokenSourceCharStreamPair struct {
|
||||||
tokenSource TokenSource
|
tokenSource TokenSource
|
||||||
charStream CharStream
|
charStream CharStream
|
||||||
|
@ -16,6 +14,26 @@ type TokenSourceCharStreamPair struct {
|
||||||
// (so we can ignore tabs), token channel, index, and source from which
|
// (so we can ignore tabs), token channel, index, and source from which
|
||||||
// we obtained this token.
|
// we obtained this token.
|
||||||
|
|
||||||
|
|
||||||
|
type IToken interface {
|
||||||
|
GetSource() *TokenSourceCharStreamPair
|
||||||
|
GetTokenType() int
|
||||||
|
GetChannel() int
|
||||||
|
GetStart() int
|
||||||
|
GetStop() int
|
||||||
|
GetLine() int
|
||||||
|
GetColumn() int
|
||||||
|
|
||||||
|
GetText() string
|
||||||
|
SetText(s string)
|
||||||
|
|
||||||
|
GetTokenIndex() int
|
||||||
|
SetTokenIndex(v int)
|
||||||
|
|
||||||
|
GetTokenSource() TokenSource
|
||||||
|
GetInputStream() CharStream
|
||||||
|
}
|
||||||
|
|
||||||
type Token struct {
|
type Token struct {
|
||||||
source *TokenSourceCharStreamPair
|
source *TokenSourceCharStreamPair
|
||||||
tokenType int // token type of the token
|
tokenType int // token type of the token
|
||||||
|
@ -52,19 +70,40 @@ const (
|
||||||
TokenHiddenChannel = 1
|
TokenHiddenChannel = 1
|
||||||
)
|
)
|
||||||
|
|
||||||
// Explicitly set the text for this token. If {code text} is not
|
func (this *Token) GetChannel() int {
|
||||||
// {@code nil}, then {@link //GetText} will return this value rather than
|
return this.channel
|
||||||
// extracting the text from the input.
|
}
|
||||||
//
|
|
||||||
// @param text The explicit text of the token, or {@code nil} if the text
|
|
||||||
// should be obtained from the input along with the start and stop indexes
|
|
||||||
// of the token.
|
|
||||||
|
|
||||||
func (this *Token) text() string {
|
func (this *Token) GetStart() int {
|
||||||
|
return this.start
|
||||||
|
}
|
||||||
|
|
||||||
|
func (this *Token) GetStop() int {
|
||||||
|
return this.stop
|
||||||
|
}
|
||||||
|
|
||||||
|
func (this *Token) GetLine() int {
|
||||||
|
return this.line
|
||||||
|
}
|
||||||
|
|
||||||
|
func (this *Token) GetColumn() int {
|
||||||
|
return this.column
|
||||||
|
}
|
||||||
|
|
||||||
|
func (this *Token) GetTokenType() int {
|
||||||
|
return this.tokenType
|
||||||
|
}
|
||||||
|
|
||||||
|
func (this *Token) GetSource() *TokenSourceCharStreamPair{
|
||||||
|
return this.source
|
||||||
|
}
|
||||||
|
|
||||||
|
func (this *Token) GetText() string {
|
||||||
return this._text
|
return this._text
|
||||||
}
|
}
|
||||||
|
|
||||||
func (this *Token) setText(s string) {
|
|
||||||
|
func (this *Token) SetText(s string) {
|
||||||
this._text = s
|
this._text = s
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -72,6 +111,10 @@ func (this *Token) GetTokenIndex() int {
|
||||||
return this.tokenIndex
|
return this.tokenIndex
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (this *Token) SetTokenIndex(v int) {
|
||||||
|
this.tokenIndex = v
|
||||||
|
}
|
||||||
|
|
||||||
func (this *Token) GetTokenSource() TokenSource {
|
func (this *Token) GetTokenSource() TokenSource {
|
||||||
return this.source.tokenSource
|
return this.source.tokenSource
|
||||||
}
|
}
|
||||||
|
@ -91,7 +134,7 @@ func NewCommonToken(source *TokenSourceCharStreamPair, tokenType, channel, start
|
||||||
t.Token = new(Token)
|
t.Token = new(Token)
|
||||||
|
|
||||||
t.source = source
|
t.source = source
|
||||||
t.tokenType = -1
|
t.tokenType = tokenType
|
||||||
t.channel = channel
|
t.channel = channel
|
||||||
t.start = start
|
t.start = start
|
||||||
t.stop = stop
|
t.stop = stop
|
||||||
|
@ -123,16 +166,15 @@ func NewCommonToken(source *TokenSourceCharStreamPair, tokenType, channel, start
|
||||||
// @param oldToken The token to copy.
|
// @param oldToken The token to copy.
|
||||||
//
|
//
|
||||||
func (ct *CommonToken) clone() *CommonToken {
|
func (ct *CommonToken) clone() *CommonToken {
|
||||||
var t = NewCommonToken(ct.source, ct.tokenType, ct.channel, ct.start,
|
var t = NewCommonToken(ct.source, ct.tokenType, ct.channel, ct.start, ct.stop)
|
||||||
ct.stop)
|
t.tokenIndex = ct.GetTokenIndex()
|
||||||
t.tokenIndex = ct.tokenIndex
|
t.line = ct.GetLine()
|
||||||
t.line = ct.line
|
t.column = ct.GetColumn()
|
||||||
t.column = ct.column
|
t._text = ct.GetText()
|
||||||
t._text = ct.text()
|
|
||||||
return t
|
return t
|
||||||
}
|
}
|
||||||
|
|
||||||
func (this *CommonToken) text() string {
|
func (this *CommonToken) GetText() string {
|
||||||
if this._text != "" {
|
if this._text != "" {
|
||||||
return this._text
|
return this._text
|
||||||
}
|
}
|
||||||
|
@ -148,12 +190,12 @@ func (this *CommonToken) text() string {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (this *CommonToken) setText(text string) {
|
func (this *CommonToken) SetText(text string) {
|
||||||
this._text = text
|
this._text = text
|
||||||
}
|
}
|
||||||
|
|
||||||
func (this *CommonToken) String() string {
|
func (this *CommonToken) String() string {
|
||||||
var txt = this.text()
|
var txt = this.GetText()
|
||||||
if txt != "" {
|
if txt != "" {
|
||||||
txt = strings.Replace(txt, "\n", "", -1)
|
txt = strings.Replace(txt, "\n", "", -1)
|
||||||
txt = strings.Replace(txt, "\r", "", -1)
|
txt = strings.Replace(txt, "\r", "", -1)
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
package antlr4
|
package antlr4
|
||||||
|
|
||||||
type TokenSource interface {
|
type TokenSource interface {
|
||||||
nextToken() *Token
|
nextToken() IToken
|
||||||
skip()
|
skip()
|
||||||
more()
|
more()
|
||||||
getLine() int
|
getLine() int
|
||||||
|
|
|
@ -3,14 +3,14 @@ package antlr4
|
||||||
type TokenStream interface {
|
type TokenStream interface {
|
||||||
IntStream
|
IntStream
|
||||||
|
|
||||||
LT(k int) *Token
|
LT(k int) IToken
|
||||||
|
|
||||||
Get(index int) *Token
|
Get(index int) IToken
|
||||||
GetTokenSource() TokenSource
|
GetTokenSource() TokenSource
|
||||||
SetTokenSource(TokenSource)
|
SetTokenSource(TokenSource)
|
||||||
|
|
||||||
GetAllText() string
|
GetAllText() string
|
||||||
GetTextFromInterval(*Interval) string
|
GetTextFromInterval(*Interval) string
|
||||||
GetTextFromRuleContext(IRuleContext) string
|
GetTextFromRuleContext(IRuleContext) string
|
||||||
GetTextFromTokens(*Token, *Token) string
|
GetTextFromTokens(IToken, IToken) string
|
||||||
}
|
}
|
||||||
|
|
|
@ -16,7 +16,7 @@ func (this *TraceListener) VisitErrorNode(_ ErrorNode) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (this *TraceListener) EnterEveryRule(ctx IParserRuleContext) {
|
func (this *TraceListener) EnterEveryRule(ctx IParserRuleContext) {
|
||||||
fmt.Println("enter " + this.parser.GetRuleNames()[ctx.GetRuleIndex()] + ", LT(1)=" + this.parser._input.LT(1).text())
|
fmt.Println("enter " + this.parser.GetRuleNames()[ctx.GetRuleIndex()] + ", LT(1)=" + this.parser._input.LT(1).GetText())
|
||||||
}
|
}
|
||||||
|
|
||||||
func (this *TraceListener) VisitTerminal(node TerminalNode) {
|
func (this *TraceListener) VisitTerminal(node TerminalNode) {
|
||||||
|
@ -24,5 +24,5 @@ func (this *TraceListener) VisitTerminal(node TerminalNode) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (this *TraceListener) ExitEveryRule(ctx IParserRuleContext) {
|
func (this *TraceListener) ExitEveryRule(ctx IParserRuleContext) {
|
||||||
fmt.Println("exit " + this.parser.GetRuleNames()[ctx.GetRuleIndex()] + ", LT(1)=" + this.parser._input.LT(1).text())
|
fmt.Println("exit " + this.parser.GetRuleNames()[ctx.GetRuleIndex()] + ", LT(1)=" + this.parser._input.LT(1).GetText())
|
||||||
}
|
}
|
||||||
|
|
|
@ -26,6 +26,7 @@ type ITransition interface {
|
||||||
type Transition struct {
|
type Transition struct {
|
||||||
target IATNState
|
target IATNState
|
||||||
isEpsilon bool
|
isEpsilon bool
|
||||||
|
label_ int
|
||||||
label *IntervalSet
|
label *IntervalSet
|
||||||
serializationType int
|
serializationType int
|
||||||
}
|
}
|
||||||
|
@ -124,8 +125,6 @@ var TransitionserializationNames = []string{
|
||||||
// TODO: make all transitions sets? no, should remove set edges
|
// TODO: make all transitions sets? no, should remove set edges
|
||||||
type AtomTransition struct {
|
type AtomTransition struct {
|
||||||
*Transition
|
*Transition
|
||||||
label_ int
|
|
||||||
label *IntervalSet
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewAtomTransition(target IATNState, label int) *AtomTransition {
|
func NewAtomTransition(target IATNState, label int) *AtomTransition {
|
||||||
|
@ -236,6 +235,11 @@ func (t *RangeTransition) String() string {
|
||||||
return "'" + string(t.start) + "'..'" + string(t.stop) + "'"
|
return "'" + string(t.start) + "'..'" + string(t.stop) + "'"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type IAbstractPredicateTransition interface {
|
||||||
|
ITransition
|
||||||
|
IAbstractPredicateTransitionFoo()
|
||||||
|
}
|
||||||
|
|
||||||
type AbstractPredicateTransition struct {
|
type AbstractPredicateTransition struct {
|
||||||
*Transition
|
*Transition
|
||||||
}
|
}
|
||||||
|
@ -248,8 +252,10 @@ func NewAbstractPredicateTransition(target IATNState) *AbstractPredicateTransiti
|
||||||
return t
|
return t
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (a *AbstractPredicateTransition) IAbstractPredicateTransitionFoo(){}
|
||||||
|
|
||||||
type PredicateTransition struct {
|
type PredicateTransition struct {
|
||||||
*Transition
|
*AbstractPredicateTransition
|
||||||
|
|
||||||
isCtxDependent bool
|
isCtxDependent bool
|
||||||
ruleIndex, predIndex int
|
ruleIndex, predIndex int
|
||||||
|
@ -258,7 +264,7 @@ type PredicateTransition struct {
|
||||||
func NewPredicateTransition(target IATNState, ruleIndex, predIndex int, isCtxDependent bool) *PredicateTransition {
|
func NewPredicateTransition(target IATNState, ruleIndex, predIndex int, isCtxDependent bool) *PredicateTransition {
|
||||||
|
|
||||||
t := new(PredicateTransition)
|
t := new(PredicateTransition)
|
||||||
t.Transition = NewTransition(target)
|
t.AbstractPredicateTransition = NewAbstractPredicateTransition(target)
|
||||||
|
|
||||||
t.serializationType = TransitionPREDICATE
|
t.serializationType = TransitionPREDICATE
|
||||||
t.ruleIndex = ruleIndex
|
t.ruleIndex = ruleIndex
|
||||||
|
@ -381,7 +387,7 @@ func (t *WildcardTransition) String() string {
|
||||||
}
|
}
|
||||||
|
|
||||||
type PrecedencePredicateTransition struct {
|
type PrecedencePredicateTransition struct {
|
||||||
*Transition
|
*AbstractPredicateTransition
|
||||||
|
|
||||||
precedence int
|
precedence int
|
||||||
}
|
}
|
||||||
|
@ -389,7 +395,7 @@ type PrecedencePredicateTransition struct {
|
||||||
func NewPrecedencePredicateTransition(target IATNState, precedence int) *PrecedencePredicateTransition {
|
func NewPrecedencePredicateTransition(target IATNState, precedence int) *PrecedencePredicateTransition {
|
||||||
|
|
||||||
t := new(PrecedencePredicateTransition)
|
t := new(PrecedencePredicateTransition)
|
||||||
t.Transition = NewTransition(target)
|
t.AbstractPredicateTransition = NewAbstractPredicateTransition(target)
|
||||||
|
|
||||||
t.serializationType = TransitionPRECEDENCE
|
t.serializationType = TransitionPRECEDENCE
|
||||||
t.precedence = precedence
|
t.precedence = precedence
|
||||||
|
|
|
@ -41,7 +41,7 @@ type RuleNode interface {
|
||||||
type TerminalNode interface {
|
type TerminalNode interface {
|
||||||
ParseTree
|
ParseTree
|
||||||
|
|
||||||
getSymbol() *Token
|
getSymbol() IToken
|
||||||
}
|
}
|
||||||
|
|
||||||
type ErrorNode interface {
|
type ErrorNode interface {
|
||||||
|
@ -87,10 +87,10 @@ type ParseTreeListener interface {
|
||||||
type TerminalNodeImpl struct {
|
type TerminalNodeImpl struct {
|
||||||
parentCtx IRuleContext
|
parentCtx IRuleContext
|
||||||
|
|
||||||
symbol *Token
|
symbol IToken
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewTerminalNodeImpl(symbol *Token) *TerminalNodeImpl {
|
func NewTerminalNodeImpl(symbol IToken) *TerminalNodeImpl {
|
||||||
tn := new(TerminalNodeImpl)
|
tn := new(TerminalNodeImpl)
|
||||||
|
|
||||||
tn.parentCtx = nil
|
tn.parentCtx = nil
|
||||||
|
@ -112,7 +112,7 @@ func (this *TerminalNodeImpl) setChildren(t []Tree) {
|
||||||
panic("Cannot set children on terminal node")
|
panic("Cannot set children on terminal node")
|
||||||
}
|
}
|
||||||
|
|
||||||
func (this *TerminalNodeImpl) getSymbol() *Token {
|
func (this *TerminalNodeImpl) getSymbol() IToken {
|
||||||
return this.symbol
|
return this.symbol
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -132,7 +132,7 @@ func (this *TerminalNodeImpl) GetSourceInterval() *Interval {
|
||||||
if this.symbol == nil {
|
if this.symbol == nil {
|
||||||
return TreeINVALID_INTERVAL
|
return TreeINVALID_INTERVAL
|
||||||
}
|
}
|
||||||
var tokenIndex = this.symbol.tokenIndex
|
var tokenIndex = this.symbol.GetTokenIndex()
|
||||||
return NewInterval(tokenIndex, tokenIndex)
|
return NewInterval(tokenIndex, tokenIndex)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -145,14 +145,14 @@ func (this *TerminalNodeImpl) accept(Visitor ParseTreeVisitor) interface{} {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (this *TerminalNodeImpl) GetText() string {
|
func (this *TerminalNodeImpl) GetText() string {
|
||||||
return this.symbol.text()
|
return this.symbol.GetText()
|
||||||
}
|
}
|
||||||
|
|
||||||
func (this *TerminalNodeImpl) String() string {
|
func (this *TerminalNodeImpl) String() string {
|
||||||
if this.symbol.tokenType == TokenEOF {
|
if this.symbol.GetTokenType() == TokenEOF {
|
||||||
return "<EOF>"
|
return "<EOF>"
|
||||||
} else {
|
} else {
|
||||||
return this.symbol.text()
|
return this.symbol.GetText()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -166,7 +166,7 @@ type ErrorNodeImpl struct {
|
||||||
*TerminalNodeImpl
|
*TerminalNodeImpl
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewErrorNodeImpl(token *Token) *ErrorNodeImpl {
|
func NewErrorNodeImpl(token IToken) *ErrorNodeImpl {
|
||||||
en := new(ErrorNodeImpl)
|
en := new(ErrorNodeImpl)
|
||||||
en.TerminalNodeImpl = NewTerminalNodeImpl(token)
|
en.TerminalNodeImpl = NewTerminalNodeImpl(token)
|
||||||
return en
|
return en
|
||||||
|
|
|
@ -46,15 +46,15 @@ func TreesgetNodeText(t Tree, ruleNames []string, recog *Parser) string {
|
||||||
return fmt.Sprint(t2)
|
return fmt.Sprint(t2)
|
||||||
} else if t2, ok := t.(TerminalNode); ok {
|
} else if t2, ok := t.(TerminalNode); ok {
|
||||||
if t2.getSymbol() != nil {
|
if t2.getSymbol() != nil {
|
||||||
return t2.getSymbol().text()
|
return t2.getSymbol().GetText()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// no recog for rule names
|
// no recog for rule names
|
||||||
var payload = t.getPayload()
|
var payload = t.getPayload()
|
||||||
if p2, ok := payload.(*Token); ok {
|
if p2, ok := payload.(IToken); ok {
|
||||||
return p2.text()
|
return p2.GetText()
|
||||||
}
|
}
|
||||||
|
|
||||||
return fmt.Sprint(t.getPayload())
|
return fmt.Sprint(t.getPayload())
|
||||||
|
@ -104,7 +104,7 @@ func Trees_findAllNodes(t ParseTree, index int, findTokens bool, nodes []ParseTr
|
||||||
t3, ok2 := t.(IParserRuleContext)
|
t3, ok2 := t.(IParserRuleContext)
|
||||||
|
|
||||||
if findTokens && ok {
|
if findTokens && ok {
|
||||||
if t2.getSymbol().tokenType == index {
|
if t2.getSymbol().GetTokenType() == index {
|
||||||
nodes = append(nodes, t2)
|
nodes = append(nodes, t2)
|
||||||
}
|
}
|
||||||
} else if !findTokens && ok2 {
|
} else if !findTokens && ok2 {
|
||||||
|
|
|
@ -44,10 +44,6 @@ func (s *IntStack) Push(e int) {
|
||||||
*s = append(*s, e)
|
*s = append(*s, e)
|
||||||
}
|
}
|
||||||
|
|
||||||
func arrayString(a []interface{}) string {
|
|
||||||
return fmt.Sprint(a)
|
|
||||||
}
|
|
||||||
|
|
||||||
func hashCode(s string) string {
|
func hashCode(s string) string {
|
||||||
h := fnv.New32a()
|
h := fnv.New32a()
|
||||||
h.Write([]byte((s)))
|
h.Write([]byte((s)))
|
||||||
|
@ -81,8 +77,11 @@ func NewSet(hashFunction func(interface{}) string, equalsFunction func(interface
|
||||||
return s
|
return s
|
||||||
}
|
}
|
||||||
|
|
||||||
func standardEqualsFunction(a interface{}, b interface{}) bool {
|
func standardHashFunction(a interface{}) string {
|
||||||
return standardHashFunction(a) == standardHashFunction(b)
|
h := fnv.New32a()
|
||||||
|
v, _ := getBytes(a)
|
||||||
|
h.Write(v)
|
||||||
|
return fmt.Sprint(h.Sum32())
|
||||||
}
|
}
|
||||||
|
|
||||||
func getBytes(key interface{}) ([]byte, error) {
|
func getBytes(key interface{}) ([]byte, error) {
|
||||||
|
@ -95,13 +94,12 @@ func getBytes(key interface{}) ([]byte, error) {
|
||||||
return buf.Bytes(), nil
|
return buf.Bytes(), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func standardHashFunction(a interface{}) string {
|
|
||||||
h := fnv.New32a()
|
func standardEqualsFunction(a interface{}, b interface{}) bool {
|
||||||
v, _ := getBytes(a)
|
return standardHashFunction(a) == standardHashFunction(b)
|
||||||
h.Write(v)
|
|
||||||
return fmt.Sprint(h.Sum32())
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
func (this *Set) length() int {
|
func (this *Set) length() int {
|
||||||
return len(this.data)
|
return len(this.data)
|
||||||
}
|
}
|
||||||
|
@ -110,6 +108,7 @@ func (this *Set) add(value interface{}) interface{} {
|
||||||
|
|
||||||
var hash = this.hashFunction(value)
|
var hash = this.hashFunction(value)
|
||||||
var key = "hash_" + hashCode(hash)
|
var key = "hash_" + hashCode(hash)
|
||||||
|
|
||||||
values := this.data[key]
|
values := this.data[key]
|
||||||
|
|
||||||
if this.data[key] != nil {
|
if this.data[key] != nil {
|
||||||
|
|
|
@ -177,9 +177,11 @@ BufferedTokenStream.prototype.getTokens = function(start, stop, types) {
|
||||||
if (types === undefined) {
|
if (types === undefined) {
|
||||||
types = null;
|
types = null;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (start < 0 || stop < 0) {
|
if (start < 0 || stop < 0) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
this.lazyInit();
|
this.lazyInit();
|
||||||
var subset = [];
|
var subset = [];
|
||||||
if (stop >= this.tokens.length) {
|
if (stop >= this.tokens.length) {
|
||||||
|
@ -194,6 +196,7 @@ BufferedTokenStream.prototype.getTokens = function(start, stop, types) {
|
||||||
subset.push(t);
|
subset.push(t);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return subset;
|
return subset;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -73,6 +73,9 @@ CommonTokenFactory.prototype.constructor = CommonTokenFactory;
|
||||||
CommonTokenFactory.DEFAULT = new CommonTokenFactory();
|
CommonTokenFactory.DEFAULT = new CommonTokenFactory();
|
||||||
|
|
||||||
CommonTokenFactory.prototype.create = function(source, type, text, channel, start, stop, line, column) {
|
CommonTokenFactory.prototype.create = function(source, type, text, channel, start, stop, line, column) {
|
||||||
|
|
||||||
|
console.log("Token factory creating: " + text)
|
||||||
|
|
||||||
var t = new CommonToken(source, type, channel, start, stop);
|
var t = new CommonToken(source, type, channel, start, stop);
|
||||||
t.line = line;
|
t.line = line;
|
||||||
t.column = column;
|
t.column = column;
|
||||||
|
@ -85,6 +88,9 @@ CommonTokenFactory.prototype.create = function(source, type, text, channel, star
|
||||||
};
|
};
|
||||||
|
|
||||||
CommonTokenFactory.prototype.createThin = function(type, text) {
|
CommonTokenFactory.prototype.createThin = function(type, text) {
|
||||||
|
|
||||||
|
console.log("Token factory creating: " + text)
|
||||||
|
|
||||||
var t = new CommonToken(null, type);
|
var t = new CommonToken(null, type);
|
||||||
t.text = text;
|
t.text = text;
|
||||||
return t;
|
return t;
|
||||||
|
|
|
@ -41,6 +41,8 @@ function FileStream(fileName) {
|
||||||
var data = fs.readFileSync(fileName, "utf8");
|
var data = fs.readFileSync(fileName, "utf8");
|
||||||
InputStream.call(this, data);
|
InputStream.call(this, data);
|
||||||
this.fileName = fileName;
|
this.fileName = fileName;
|
||||||
|
|
||||||
|
console.log(data);
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -50,6 +50,7 @@ IntervalSet.prototype.addRange = function(l, h) {
|
||||||
};
|
};
|
||||||
|
|
||||||
IntervalSet.prototype.addInterval = function(v) {
|
IntervalSet.prototype.addInterval = function(v) {
|
||||||
|
console.log("addInterval" + v.toString())
|
||||||
if (this.intervals === null) {
|
if (this.intervals === null) {
|
||||||
this.intervals = [];
|
this.intervals = [];
|
||||||
this.intervals.push(v);
|
this.intervals.push(v);
|
||||||
|
@ -80,7 +81,9 @@ IntervalSet.prototype.addInterval = function(v) {
|
||||||
};
|
};
|
||||||
|
|
||||||
IntervalSet.prototype.addSet = function(other) {
|
IntervalSet.prototype.addSet = function(other) {
|
||||||
|
console.log("addSet")
|
||||||
if (other.intervals !== null) {
|
if (other.intervals !== null) {
|
||||||
|
console.log(other.intervals.length)
|
||||||
for (var k = 0; k < other.intervals.length; k++) {
|
for (var k = 0; k < other.intervals.length; k++) {
|
||||||
var i = other.intervals[k];
|
var i = other.intervals[k];
|
||||||
this.addInterval(new Interval(i.start, i.stop));
|
this.addInterval(new Interval(i.start, i.stop));
|
||||||
|
|
|
@ -110,7 +110,15 @@ LL1Analyzer.prototype.LOOK = function(s, stopState, ctx) {
|
||||||
var seeThruPreds = true; // ignore preds; get all lookahead
|
var seeThruPreds = true; // ignore preds; get all lookahead
|
||||||
ctx = ctx || null;
|
ctx = ctx || null;
|
||||||
var lookContext = ctx!==null ? predictionContextFromRuleContext(s.atn, ctx) : null;
|
var lookContext = ctx!==null ? predictionContextFromRuleContext(s.atn, ctx) : null;
|
||||||
|
console.log("DEBUG 5")
|
||||||
|
console.log(s.toString())
|
||||||
|
console.log(stopState)
|
||||||
|
console.log(lookContext)
|
||||||
|
console.log(r.toString())
|
||||||
|
console.log(seeThruPreds)
|
||||||
|
console.log("=====")
|
||||||
this._LOOK(s, stopState, lookContext, r, new Set(), new BitSet(), seeThruPreds, true);
|
this._LOOK(s, stopState, lookContext, r, new Set(), new BitSet(), seeThruPreds, true);
|
||||||
|
console.log(r.toString())
|
||||||
return r;
|
return r;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -151,6 +159,7 @@ LL1Analyzer.prototype._LOOK = function(s, stopState , ctx, look, lookBusy, calle
|
||||||
}
|
}
|
||||||
lookBusy.add(c);
|
lookBusy.add(c);
|
||||||
if (s === stopState) {
|
if (s === stopState) {
|
||||||
|
console.log("DEBUG 6")
|
||||||
if (ctx ===null) {
|
if (ctx ===null) {
|
||||||
look.addOne(Token.EPSILON);
|
look.addOne(Token.EPSILON);
|
||||||
return;
|
return;
|
||||||
|
@ -168,6 +177,7 @@ LL1Analyzer.prototype._LOOK = function(s, stopState , ctx, look, lookBusy, calle
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
if (ctx !== PredictionContext.EMPTY) {
|
if (ctx !== PredictionContext.EMPTY) {
|
||||||
|
console.log("DEBUG 7")
|
||||||
// run thru all possible stack tops in ctx
|
// run thru all possible stack tops in ctx
|
||||||
for(var i=0; i<ctx.length; i++) {
|
for(var i=0; i<ctx.length; i++) {
|
||||||
var returnState = this.atn.states[ctx.getReturnState(i)];
|
var returnState = this.atn.states[ctx.getReturnState(i)];
|
||||||
|
@ -187,6 +197,9 @@ LL1Analyzer.prototype._LOOK = function(s, stopState , ctx, look, lookBusy, calle
|
||||||
for(var j=0; j<s.transitions.length; j++) {
|
for(var j=0; j<s.transitions.length; j++) {
|
||||||
var t = s.transitions[j];
|
var t = s.transitions[j];
|
||||||
if (t.constructor === RuleTransition) {
|
if (t.constructor === RuleTransition) {
|
||||||
|
|
||||||
|
console.log("DEBUG 8")
|
||||||
|
|
||||||
if (calledRuleStack.contains(t.target.ruleIndex)) {
|
if (calledRuleStack.contains(t.target.ruleIndex)) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
@ -197,18 +210,26 @@ LL1Analyzer.prototype._LOOK = function(s, stopState , ctx, look, lookBusy, calle
|
||||||
} finally {
|
} finally {
|
||||||
calledRuleStack.remove(t.target.ruleIndex);
|
calledRuleStack.remove(t.target.ruleIndex);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
console.log(look.toString())
|
||||||
|
|
||||||
} else if (t instanceof AbstractPredicateTransition ) {
|
} else if (t instanceof AbstractPredicateTransition ) {
|
||||||
|
console.log("DEBUG 9")
|
||||||
if (seeThruPreds) {
|
if (seeThruPreds) {
|
||||||
this._LOOK(t.target, stopState, ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF);
|
this._LOOK(t.target, stopState, ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF);
|
||||||
} else {
|
} else {
|
||||||
look.addOne(LL1Analyzer.HIT_PRED);
|
look.addOne(LL1Analyzer.HIT_PRED);
|
||||||
}
|
}
|
||||||
} else if( t.isEpsilon) {
|
} else if( t.isEpsilon) {
|
||||||
|
console.log("DEBUG 10")
|
||||||
this._LOOK(t.target, stopState, ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF);
|
this._LOOK(t.target, stopState, ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF);
|
||||||
} else if (t.constructor === WildcardTransition) {
|
} else if (t.constructor === WildcardTransition) {
|
||||||
|
console.log("DEBUG 11")
|
||||||
look.addRange( Token.MIN_USER_TOKEN_TYPE, this.atn.maxTokenType );
|
look.addRange( Token.MIN_USER_TOKEN_TYPE, this.atn.maxTokenType );
|
||||||
} else {
|
} else {
|
||||||
|
console.log("DEBUG 12")
|
||||||
var set = t.label;
|
var set = t.label;
|
||||||
|
console.log(set.toString())
|
||||||
if (set !== null) {
|
if (set !== null) {
|
||||||
if (t instanceof NotSetTransition) {
|
if (t instanceof NotSetTransition) {
|
||||||
set = set.complement(Token.MIN_USER_TOKEN_TYPE, this.atn.maxTokenType);
|
set = set.complement(Token.MIN_USER_TOKEN_TYPE, this.atn.maxTokenType);
|
||||||
|
|
|
@ -134,6 +134,8 @@ Parser.prototype.reset = function() {
|
||||||
|
|
||||||
Parser.prototype.match = function(ttype) {
|
Parser.prototype.match = function(ttype) {
|
||||||
var t = this.getCurrentToken();
|
var t = this.getCurrentToken();
|
||||||
|
|
||||||
|
console.log("TOKEN IS " + t.text)
|
||||||
if (t.type === ttype) {
|
if (t.type === ttype) {
|
||||||
this._errHandler.reportMatch(this);
|
this._errHandler.reportMatch(this);
|
||||||
this.consume();
|
this.consume();
|
||||||
|
|
|
@ -160,9 +160,11 @@ Recognizer.prototype.precpred = function(localctx , precedence) {
|
||||||
|
|
||||||
Object.defineProperty(Recognizer.prototype, "state", {
|
Object.defineProperty(Recognizer.prototype, "state", {
|
||||||
get : function() {
|
get : function() {
|
||||||
|
|
||||||
return this._stateNumber;
|
return this._stateNumber;
|
||||||
},
|
},
|
||||||
set : function(state) {
|
set : function(state) {
|
||||||
|
console.log("SETTING STATE" + state + " from " + this._stateNumber )
|
||||||
this._stateNumber = state;
|
this._stateNumber = state;
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
|
@ -75,8 +75,11 @@ ATN.prototype.nextTokensInContext = function(s, ctx) {
|
||||||
// rule.
|
// rule.
|
||||||
ATN.prototype.nextTokensNoContext = function(s) {
|
ATN.prototype.nextTokensNoContext = function(s) {
|
||||||
if (s.nextTokenWithinRule !== null ) {
|
if (s.nextTokenWithinRule !== null ) {
|
||||||
|
console.log("DEBUG 1")
|
||||||
return s.nextTokenWithinRule;
|
return s.nextTokenWithinRule;
|
||||||
}
|
}
|
||||||
|
console.log("DEBUG 2")
|
||||||
|
console.log(this.nextTokensInContext(s, null).toString())
|
||||||
s.nextTokenWithinRule = this.nextTokensInContext(s, null);
|
s.nextTokenWithinRule = this.nextTokensInContext(s, null);
|
||||||
s.nextTokenWithinRule.readOnly = true;
|
s.nextTokenWithinRule.readOnly = true;
|
||||||
return s.nextTokenWithinRule;
|
return s.nextTokenWithinRule;
|
||||||
|
|
|
@ -101,8 +101,8 @@ function LexerATNSimulator(recog, atn, decisionToDFA, sharedContextCache) {
|
||||||
LexerATNSimulator.prototype = Object.create(ATNSimulator.prototype);
|
LexerATNSimulator.prototype = Object.create(ATNSimulator.prototype);
|
||||||
LexerATNSimulator.prototype.constructor = LexerATNSimulator;
|
LexerATNSimulator.prototype.constructor = LexerATNSimulator;
|
||||||
|
|
||||||
LexerATNSimulator.debug = false;
|
LexerATNSimulator.prototype.debug = true;
|
||||||
LexerATNSimulator.dfa_debug = false;
|
LexerATNSimulator.prototype.dfa_debug = false;
|
||||||
|
|
||||||
LexerATNSimulator.MIN_DFA_EDGE = 0;
|
LexerATNSimulator.MIN_DFA_EDGE = 0;
|
||||||
LexerATNSimulator.MAX_DFA_EDGE = 127; // forces unicode to stay in ATN
|
LexerATNSimulator.MAX_DFA_EDGE = 127; // forces unicode to stay in ATN
|
||||||
|
@ -117,6 +117,9 @@ LexerATNSimulator.prototype.copyState = function(simulator) {
|
||||||
};
|
};
|
||||||
|
|
||||||
LexerATNSimulator.prototype.match = function(input, mode) {
|
LexerATNSimulator.prototype.match = function(input, mode) {
|
||||||
|
|
||||||
|
console.log("MATCH")
|
||||||
|
|
||||||
this.match_calls += 1;
|
this.match_calls += 1;
|
||||||
this.mode = mode;
|
this.mode = mode;
|
||||||
var mark = input.mark();
|
var mark = input.mark();
|
||||||
|
@ -225,6 +228,8 @@ LexerATNSimulator.prototype.execATN = function(input, ds0) {
|
||||||
t = input.LA(1);
|
t = input.LA(1);
|
||||||
s = target; // flip; current DFA target becomes new src/from state
|
s = target; // flip; current DFA target becomes new src/from state
|
||||||
}
|
}
|
||||||
|
|
||||||
|
console.log("OUT")
|
||||||
return this.failOrAccept(this.prevAccept, input, s.configs, t);
|
return this.failOrAccept(this.prevAccept, input, s.configs, t);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -286,6 +291,7 @@ LexerATNSimulator.prototype.failOrAccept = function(prevAccept, input, reach, t)
|
||||||
var lexerActionExecutor = prevAccept.dfaState.lexerActionExecutor;
|
var lexerActionExecutor = prevAccept.dfaState.lexerActionExecutor;
|
||||||
this.accept(input, lexerActionExecutor, this.startIndex,
|
this.accept(input, lexerActionExecutor, this.startIndex,
|
||||||
prevAccept.index, prevAccept.line, prevAccept.column);
|
prevAccept.index, prevAccept.line, prevAccept.column);
|
||||||
|
console.log(prevAccept.dfaState.prediction)
|
||||||
return prevAccept.dfaState.prediction;
|
return prevAccept.dfaState.prediction;
|
||||||
} else {
|
} else {
|
||||||
// if no accept and EOF is first char, return EOF
|
// if no accept and EOF is first char, return EOF
|
||||||
|
@ -311,7 +317,7 @@ LexerATNSimulator.prototype.getReachableConfigSet = function(input, closure,
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
if (this.debug) {
|
if (this.debug) {
|
||||||
console.log("testing %s at %s\n", this.getTokenName(t), cfg
|
console.log("testing %s at %s", this.getTokenName(t), cfg
|
||||||
.toString(this.recog, true));
|
.toString(this.recog, true));
|
||||||
}
|
}
|
||||||
for (var j = 0; j < cfg.state.transitions.length; j++) {
|
for (var j = 0; j < cfg.state.transitions.length; j++) {
|
||||||
|
@ -338,7 +344,7 @@ LexerATNSimulator.prototype.getReachableConfigSet = function(input, closure,
|
||||||
LexerATNSimulator.prototype.accept = function(input, lexerActionExecutor,
|
LexerATNSimulator.prototype.accept = function(input, lexerActionExecutor,
|
||||||
startIndex, index, line, charPos) {
|
startIndex, index, line, charPos) {
|
||||||
if (this.debug) {
|
if (this.debug) {
|
||||||
console.log("ACTION %s\n", lexerActionExecutor);
|
console.log("ACTION %s", lexerActionExecutor);
|
||||||
}
|
}
|
||||||
// seek to after last char in token
|
// seek to after last char in token
|
||||||
input.seek(index);
|
input.seek(index);
|
||||||
|
@ -358,6 +364,7 @@ LexerATNSimulator.prototype.getReachableTarget = function(trans, t) {
|
||||||
};
|
};
|
||||||
|
|
||||||
LexerATNSimulator.prototype.computeStartState = function(input, p) {
|
LexerATNSimulator.prototype.computeStartState = function(input, p) {
|
||||||
|
|
||||||
var initialContext = PredictionContext.EMPTY;
|
var initialContext = PredictionContext.EMPTY;
|
||||||
var configs = new OrderedATNConfigSet();
|
var configs = new OrderedATNConfigSet();
|
||||||
for (var i = 0; i < p.transitions.length; i++) {
|
for (var i = 0; i < p.transitions.length; i++) {
|
||||||
|
@ -365,6 +372,7 @@ LexerATNSimulator.prototype.computeStartState = function(input, p) {
|
||||||
var cfg = new LexerATNConfig({state:target, alt:i+1, context:initialContext}, null);
|
var cfg = new LexerATNConfig({state:target, alt:i+1, context:initialContext}, null);
|
||||||
this.closure(input, cfg, configs, false, false, false);
|
this.closure(input, cfg, configs, false, false, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
return configs;
|
return configs;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -382,14 +390,17 @@ LexerATNSimulator.prototype.closure = function(input, config, configs,
|
||||||
if (this.debug) {
|
if (this.debug) {
|
||||||
console.log("closure(" + config.toString(this.recog, true) + ")");
|
console.log("closure(" + config.toString(this.recog, true) + ")");
|
||||||
}
|
}
|
||||||
|
|
||||||
if (config.state instanceof RuleStopState) {
|
if (config.state instanceof RuleStopState) {
|
||||||
|
|
||||||
if (this.debug) {
|
if (this.debug) {
|
||||||
if (this.recog !== null) {
|
if (this.recog !== null && this.recog.getRuleNames) {
|
||||||
console.log("closure at %s rule stop %s\n", this.recog.getRuleNames()[config.state.ruleIndex], config);
|
console.log("closure at %s rule stop %s", this.recog.getRuleNames()[config.state.ruleIndex], config);
|
||||||
} else {
|
} else {
|
||||||
console.log("closure at rule stop %s\n", config);
|
console.log("closure at rule stop %s", config);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (config.context === null || config.context.hasEmptyPath()) {
|
if (config.context === null || config.context.hasEmptyPath()) {
|
||||||
if (config.context === null || config.context.isEmpty()) {
|
if (config.context === null || config.context.isEmpty()) {
|
||||||
configs.add(config);
|
configs.add(config);
|
||||||
|
@ -651,6 +662,7 @@ LexerATNSimulator.prototype.consume = function(input) {
|
||||||
};
|
};
|
||||||
|
|
||||||
LexerATNSimulator.prototype.getTokenName = function(tt) {
|
LexerATNSimulator.prototype.getTokenName = function(tt) {
|
||||||
|
console.log(tt);
|
||||||
if (tt === -1) {
|
if (tt === -1) {
|
||||||
return "EOF";
|
return "EOF";
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -313,7 +313,7 @@ function ParserATNSimulator(parser, atn, decisionToDFA, sharedContextCache) {
|
||||||
ParserATNSimulator.prototype = Object.create(ATNSimulator.prototype);
|
ParserATNSimulator.prototype = Object.create(ATNSimulator.prototype);
|
||||||
ParserATNSimulator.prototype.constructor = ParserATNSimulator;
|
ParserATNSimulator.prototype.constructor = ParserATNSimulator;
|
||||||
|
|
||||||
ParserATNSimulator.prototype.debug = false;
|
ParserATNSimulator.prototype.debug = true;
|
||||||
ParserATNSimulator.prototype.debug_list_atn_decisions = false;
|
ParserATNSimulator.prototype.debug_list_atn_decisions = false;
|
||||||
ParserATNSimulator.prototype.dfa_debug = false;
|
ParserATNSimulator.prototype.dfa_debug = false;
|
||||||
ParserATNSimulator.prototype.retry_debug = false;
|
ParserATNSimulator.prototype.retry_debug = false;
|
||||||
|
@ -323,6 +323,9 @@ ParserATNSimulator.prototype.reset = function() {
|
||||||
};
|
};
|
||||||
|
|
||||||
ParserATNSimulator.prototype.adaptivePredict = function(input, decision, outerContext) {
|
ParserATNSimulator.prototype.adaptivePredict = function(input, decision, outerContext) {
|
||||||
|
|
||||||
|
console.log("adaptive predict")
|
||||||
|
|
||||||
if (this.debug || this.debug_list_atn_decisions) {
|
if (this.debug || this.debug_list_atn_decisions) {
|
||||||
console.log("adaptivePredict decision " + decision +
|
console.log("adaptivePredict decision " + decision +
|
||||||
" exec LA(1)==" + this.getLookaheadName(input) +
|
" exec LA(1)==" + this.getLookaheadName(input) +
|
||||||
|
@ -1512,13 +1515,16 @@ ParserATNSimulator.prototype.getConflictingAltsOrUniqueAlt = function(configs) {
|
||||||
};
|
};
|
||||||
|
|
||||||
ParserATNSimulator.prototype.getTokenName = function( t) {
|
ParserATNSimulator.prototype.getTokenName = function( t) {
|
||||||
|
|
||||||
|
console.log("Get token name")
|
||||||
|
|
||||||
if (t===Token.EOF) {
|
if (t===Token.EOF) {
|
||||||
return "EOF";
|
return "EOF";
|
||||||
}
|
}
|
||||||
if( this.parser!==null && this.parser.literalNames!==null) {
|
if( this.parser!==null && this.parser.literalNames!==null) {
|
||||||
if (t >= this.parser.literalNames.length) {
|
if (t >= this.parser.literalNames.length) {
|
||||||
console.log("" + t + " ttype out of range: " + this.parser.literalNames);
|
console.log("" + t + " ttype out of range: " + this.parser.literalNames);
|
||||||
console.log("" + this.parser.getInputStream().getTokens());
|
// console.log(this.parser.getInputStream().getTokens());
|
||||||
} else {
|
} else {
|
||||||
return this.parser.literalNames[t] + "<" + t + ">";
|
return this.parser.literalNames[t] + "<" + t + ">";
|
||||||
}
|
}
|
||||||
|
@ -1529,7 +1535,7 @@ ParserATNSimulator.prototype.getTokenName = function( t) {
|
||||||
ParserATNSimulator.prototype.getLookaheadName = function(input) {
|
ParserATNSimulator.prototype.getLookaheadName = function(input) {
|
||||||
return this.getTokenName(input.LA(1));
|
return this.getTokenName(input.LA(1));
|
||||||
};
|
};
|
||||||
|
``
|
||||||
// Used for debugging in adaptivePredict around execATN but I cut
|
// Used for debugging in adaptivePredict around execATN but I cut
|
||||||
// it out for clarity now that alg. works well. We can leave this
|
// it out for clarity now that alg. works well. We can leave this
|
||||||
// "dead" code for a bit.
|
// "dead" code for a bit.
|
||||||
|
|
|
@ -74,7 +74,7 @@ ConsoleErrorListener.INSTANCE = new ConsoleErrorListener();
|
||||||
// </pre>
|
// </pre>
|
||||||
//
|
//
|
||||||
ConsoleErrorListener.prototype.syntaxError = function(recognizer, offendingSymbol, line, column, msg, e) {
|
ConsoleErrorListener.prototype.syntaxError = function(recognizer, offendingSymbol, line, column, msg, e) {
|
||||||
console.error("line " + line + ":" + column + " " + msg);
|
console.log("line " + line + ":" + column + " " + msg);
|
||||||
};
|
};
|
||||||
|
|
||||||
function ProxyErrorListener(delegates) {
|
function ProxyErrorListener(delegates) {
|
||||||
|
|
|
@ -244,16 +244,28 @@ DefaultErrorStrategy.prototype.sync = function(recognizer) {
|
||||||
if (this.inErrorRecoveryMode(recognizer)) {
|
if (this.inErrorRecoveryMode(recognizer)) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
console.log("STATE" + recognizer.state)
|
||||||
|
|
||||||
var s = recognizer._interp.atn.states[recognizer.state];
|
var s = recognizer._interp.atn.states[recognizer.state];
|
||||||
var la = recognizer.getTokenStream().LA(1);
|
var la = recognizer.getTokenStream().LA(1);
|
||||||
|
|
||||||
|
console.log("LA" + la);
|
||||||
|
|
||||||
// try cheaper subset first; might get lucky. seems to shave a wee bit off
|
// try cheaper subset first; might get lucky. seems to shave a wee bit off
|
||||||
if (la===Token.EOF || recognizer.atn.nextTokens(s).contains(la)) {
|
if (la===Token.EOF || recognizer.atn.nextTokens(s).contains(la)) {
|
||||||
|
console.log("OK1")
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
// Return but don't end recovery. only do that upon valid token match
|
// Return but don't end recovery. only do that upon valid token match
|
||||||
if(recognizer.isExpectedToken(la)) {
|
if(recognizer.isExpectedToken(la)) {
|
||||||
|
console.log("OK2")
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
console.log("LA" + la)
|
||||||
|
// console.log(recognizer.GetATN().nextTokens(s, nil))
|
||||||
|
|
||||||
switch (s.stateType) {
|
switch (s.stateType) {
|
||||||
case ATNState.BLOCK_START:
|
case ATNState.BLOCK_START:
|
||||||
case ATNState.STAR_BLOCK_START:
|
case ATNState.STAR_BLOCK_START:
|
||||||
|
|
|
@ -0,0 +1 @@
|
||||||
|
1 + 2 = 3 + 5
|
|
@ -1,11 +1,41 @@
|
||||||
var antlr4 = require("./antlr4/index"),
|
var antlr4 = require("./antlr4/index"),
|
||||||
|
tree = antlr4.tree
|
||||||
ArithmeticLexer = require("./ArithmeticLexer").ArithmeticLexer,
|
ArithmeticLexer = require("./ArithmeticLexer").ArithmeticLexer,
|
||||||
ArithmeticParser = require("./ArithmeticParser").ArithmeticParser;
|
ArithmeticParser = require("./ArithmeticParser").ArithmeticParser,
|
||||||
|
ArithmeticListener = require("./ArithmeticListener").ArithmeticListener;
|
||||||
|
|
||||||
var a = new antlr4.FileStream("foo.txt");
|
var a = new antlr4.FileStream("foo.txt");
|
||||||
var l = new ArithmeticLexer(a);
|
var l = new ArithmeticLexer(a);
|
||||||
var s = new antlr4.CommonTokenStream(l, 0);
|
var s = new antlr4.CommonTokenStream(l, 0);
|
||||||
var p = new ArithmeticParser(s);
|
var p = new ArithmeticParser(s);
|
||||||
|
p.buildParseTrees = true;
|
||||||
|
|
||||||
|
//KeyPrinter = function() {
|
||||||
|
// ArithmeticListener.call(this); // inherit default listener
|
||||||
|
// return this;
|
||||||
|
//};
|
||||||
|
//
|
||||||
|
//// inherit default listener
|
||||||
|
//KeyPrinter.prototype = Object.create(ArithmeticListener.prototype);
|
||||||
|
//KeyPrinter.prototype.constructor = KeyPrinter;
|
||||||
|
//
|
||||||
|
//// override default listener behavior
|
||||||
|
//KeyPrinter.prototype.exitAtom = function(ctx) {
|
||||||
|
//
|
||||||
|
// console.log("Oh, a atom!", ctx.start.source[1].strdata[ctx.start.start]);
|
||||||
|
//};
|
||||||
|
//
|
||||||
|
//KeyPrinter.prototype.exitExpression = function(ctx) {
|
||||||
|
//
|
||||||
|
// console.log("Oh, an expression!", ctx);
|
||||||
|
// throw new Error();
|
||||||
|
//};
|
||||||
|
|
||||||
|
var tree = p.equation();
|
||||||
|
|
||||||
|
//var printer = new KeyPrinter();
|
||||||
|
//antlr4.tree.ParseTreeWalker.DEFAULT.walk(printer, tree);
|
||||||
|
|
||||||
|
//console.log( tree.children[0].children[0].children[0].children );
|
||||||
|
|
||||||
|
|
||||||
p.equation();
|
|
|
@ -820,15 +820,15 @@ function <lexer.name>(input) {
|
||||||
|
|
||||||
<rest(lexer.modes):{m| <lexer.name>.<m> = <i>;}; separator="\n">
|
<rest(lexer.modes):{m| <lexer.name>.<m> = <i>;}; separator="\n">
|
||||||
|
|
||||||
<lexer.name>.modeNames = [ <lexer.modes:{m| "<m>"}; separator=", ", wrap, anchor> ];
|
<lexer.name>.prototype.modeNames = [ <lexer.modes:{m| "<m>"}; separator=", ", wrap, anchor> ];
|
||||||
|
|
||||||
<lexer.name>.literalNames = [ <lexer.literalNames:{t | <t>}; null="null", separator=", ", wrap, anchor> ];
|
<lexer.name>.prototype.literalNames = [ <lexer.literalNames:{t | <t>}; null="null", separator=", ", wrap, anchor> ];
|
||||||
|
|
||||||
<lexer.name>.symbolicNames = [ <lexer.symbolicNames:{t | <t>}; null="null", separator=", ", wrap, anchor> ];
|
<lexer.name>.prototype.symbolicNames = [ <lexer.symbolicNames:{t | <t>}; null="null", separator=", ", wrap, anchor> ];
|
||||||
|
|
||||||
<lexer.name>.ruleNames = [ <lexer.ruleNames:{r | "<r>"}; separator=", ", wrap, anchor> ];
|
<lexer.name>.prototype.ruleNames = [ <lexer.ruleNames:{r | "<r>"}; separator=", ", wrap, anchor> ];
|
||||||
|
|
||||||
<lexer.name>.grammarFileName = "<lexer.grammarFileName>";
|
<lexer.name>.prototype.grammarFileName = "<lexer.grammarFileName>";
|
||||||
|
|
||||||
<namedActions.members>
|
<namedActions.members>
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue