forked from jasder/antlr
Lint: Use uniform receiver names
This commit is contained in:
parent
8a1b68feb4
commit
9ed1ed1003
|
@ -70,42 +70,42 @@ func NewCommonTokenStream(lexer Lexer, channel int) *CommonTokenStream {
|
||||||
return ts
|
return ts
|
||||||
}
|
}
|
||||||
|
|
||||||
func (bt *CommonTokenStream) GetAllTokens() []Token {
|
func (c *CommonTokenStream) GetAllTokens() []Token {
|
||||||
return bt.tokens
|
return c.tokens
|
||||||
}
|
}
|
||||||
|
|
||||||
func (bt *CommonTokenStream) Mark() int {
|
func (c *CommonTokenStream) Mark() int {
|
||||||
return 0
|
return 0
|
||||||
}
|
}
|
||||||
|
|
||||||
func (bt *CommonTokenStream) Release(marker int) {
|
func (c *CommonTokenStream) Release(marker int) {
|
||||||
// no resources to release
|
// no resources to release
|
||||||
}
|
}
|
||||||
|
|
||||||
func (bt *CommonTokenStream) reset() {
|
func (c *CommonTokenStream) reset() {
|
||||||
bt.Seek(0)
|
c.Seek(0)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (bt *CommonTokenStream) Seek(index int) {
|
func (c *CommonTokenStream) Seek(index int) {
|
||||||
bt.lazyInit()
|
c.lazyInit()
|
||||||
bt.index = bt.adjustSeekIndex(index)
|
c.index = c.adjustSeekIndex(index)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (bt *CommonTokenStream) Get(index int) Token {
|
func (c *CommonTokenStream) Get(index int) Token {
|
||||||
bt.lazyInit()
|
c.lazyInit()
|
||||||
return bt.tokens[index]
|
return c.tokens[index]
|
||||||
}
|
}
|
||||||
|
|
||||||
func (bt *CommonTokenStream) Consume() {
|
func (c *CommonTokenStream) Consume() {
|
||||||
var SkipEofCheck = false
|
var SkipEofCheck = false
|
||||||
if bt.index >= 0 {
|
if c.index >= 0 {
|
||||||
if bt.fetchedEOF {
|
if c.fetchedEOF {
|
||||||
// the last token in tokens is EOF. Skip check if p indexes any
|
// the last token in tokens is EOF. Skip check if p indexes any
|
||||||
// fetched token except the last.
|
// fetched token except the last.
|
||||||
SkipEofCheck = bt.index < len(bt.tokens)-1
|
SkipEofCheck = c.index < len(c.tokens)-1
|
||||||
} else {
|
} else {
|
||||||
// no EOF token in tokens. Skip check if p indexes a fetched token.
|
// no EOF token in tokens. Skip check if p indexes a fetched token.
|
||||||
SkipEofCheck = bt.index < len(bt.tokens)
|
SkipEofCheck = c.index < len(c.tokens)
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// not yet initialized
|
// not yet initialized
|
||||||
|
@ -115,14 +115,14 @@ func (bt *CommonTokenStream) Consume() {
|
||||||
if PortDebug {
|
if PortDebug {
|
||||||
fmt.Println("Consume 1")
|
fmt.Println("Consume 1")
|
||||||
}
|
}
|
||||||
if !SkipEofCheck && bt.LA(1) == TokenEOF {
|
if !SkipEofCheck && c.LA(1) == TokenEOF {
|
||||||
panic("cannot consume EOF")
|
panic("cannot consume EOF")
|
||||||
}
|
}
|
||||||
if bt.Sync(bt.index + 1) {
|
if c.Sync(c.index + 1) {
|
||||||
if PortDebug {
|
if PortDebug {
|
||||||
fmt.Println("Consume 2")
|
fmt.Println("Consume 2")
|
||||||
}
|
}
|
||||||
bt.index = bt.adjustSeekIndex(bt.index + 1)
|
c.index = c.adjustSeekIndex(c.index + 1)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -132,10 +132,10 @@ func (bt *CommonTokenStream) Consume() {
|
||||||
// {@code false}.
|
// {@code false}.
|
||||||
// @see //Get(int i)
|
// @see //Get(int i)
|
||||||
// /
|
// /
|
||||||
func (bt *CommonTokenStream) Sync(i int) bool {
|
func (c *CommonTokenStream) Sync(i int) bool {
|
||||||
var n = i - len(bt.tokens) + 1 // how many more elements we need?
|
var n = i - len(c.tokens) + 1 // how many more elements we need?
|
||||||
if n > 0 {
|
if n > 0 {
|
||||||
var fetched = bt.fetch(n)
|
var fetched = c.fetch(n)
|
||||||
if PortDebug {
|
if PortDebug {
|
||||||
fmt.Println("Sync done")
|
fmt.Println("Sync done")
|
||||||
}
|
}
|
||||||
|
@ -148,20 +148,20 @@ func (bt *CommonTokenStream) Sync(i int) bool {
|
||||||
//
|
//
|
||||||
// @return The actual number of elements added to the buffer.
|
// @return The actual number of elements added to the buffer.
|
||||||
// /
|
// /
|
||||||
func (bt *CommonTokenStream) fetch(n int) int {
|
func (c *CommonTokenStream) fetch(n int) int {
|
||||||
if bt.fetchedEOF {
|
if c.fetchedEOF {
|
||||||
return 0
|
return 0
|
||||||
}
|
}
|
||||||
|
|
||||||
for i := 0; i < n; i++ {
|
for i := 0; i < n; i++ {
|
||||||
var t Token = bt.tokenSource.NextToken()
|
var t Token = c.tokenSource.NextToken()
|
||||||
if PortDebug {
|
if PortDebug {
|
||||||
fmt.Println("fetch loop")
|
fmt.Println("fetch loop")
|
||||||
}
|
}
|
||||||
t.SetTokenIndex(len(bt.tokens))
|
t.SetTokenIndex(len(c.tokens))
|
||||||
bt.tokens = append(bt.tokens, t)
|
c.tokens = append(c.tokens, t)
|
||||||
if t.GetTokenType() == TokenEOF {
|
if t.GetTokenType() == TokenEOF {
|
||||||
bt.fetchedEOF = true
|
c.fetchedEOF = true
|
||||||
return i + 1
|
return i + 1
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -173,18 +173,18 @@ func (bt *CommonTokenStream) fetch(n int) int {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get all tokens from start..stop inclusively///
|
// Get all tokens from start..stop inclusively///
|
||||||
func (bt *CommonTokenStream) GetTokens(start int, stop int, types *IntervalSet) []Token {
|
func (c *CommonTokenStream) GetTokens(start int, stop int, types *IntervalSet) []Token {
|
||||||
|
|
||||||
if start < 0 || stop < 0 {
|
if start < 0 || stop < 0 {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
bt.lazyInit()
|
c.lazyInit()
|
||||||
var subset = make([]Token, 0)
|
var subset = make([]Token, 0)
|
||||||
if stop >= len(bt.tokens) {
|
if stop >= len(c.tokens) {
|
||||||
stop = len(bt.tokens) - 1
|
stop = len(c.tokens) - 1
|
||||||
}
|
}
|
||||||
for i := start; i < stop; i++ {
|
for i := start; i < stop; i++ {
|
||||||
var t = bt.tokens[i]
|
var t = c.tokens[i]
|
||||||
if t.GetTokenType() == TokenEOF {
|
if t.GetTokenType() == TokenEOF {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
|
@ -195,49 +195,49 @@ func (bt *CommonTokenStream) GetTokens(start int, stop int, types *IntervalSet)
|
||||||
return subset
|
return subset
|
||||||
}
|
}
|
||||||
|
|
||||||
func (bt *CommonTokenStream) LA(i int) int {
|
func (c *CommonTokenStream) LA(i int) int {
|
||||||
return bt.LT(i).GetTokenType()
|
return c.LT(i).GetTokenType()
|
||||||
}
|
}
|
||||||
|
|
||||||
func (bt *CommonTokenStream) lazyInit() {
|
func (c *CommonTokenStream) lazyInit() {
|
||||||
if bt.index == -1 {
|
if c.index == -1 {
|
||||||
bt.setup()
|
c.setup()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (bt *CommonTokenStream) setup() {
|
func (c *CommonTokenStream) setup() {
|
||||||
bt.Sync(0)
|
c.Sync(0)
|
||||||
bt.index = bt.adjustSeekIndex(0)
|
c.index = c.adjustSeekIndex(0)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (bt *CommonTokenStream) GetTokenSource() TokenSource {
|
func (c *CommonTokenStream) GetTokenSource() TokenSource {
|
||||||
return bt.tokenSource
|
return c.tokenSource
|
||||||
}
|
}
|
||||||
|
|
||||||
// Reset bt token stream by setting its token source.///
|
// Reset c token stream by setting its token source.///
|
||||||
func (bt *CommonTokenStream) SetTokenSource(tokenSource TokenSource) {
|
func (c *CommonTokenStream) SetTokenSource(tokenSource TokenSource) {
|
||||||
bt.tokenSource = tokenSource
|
c.tokenSource = tokenSource
|
||||||
bt.tokens = make([]Token, 0)
|
c.tokens = make([]Token, 0)
|
||||||
bt.index = -1
|
c.index = -1
|
||||||
}
|
}
|
||||||
|
|
||||||
// Given a starting index, return the index of the next token on channel.
|
// Given a starting index, return the index of the next token on channel.
|
||||||
// Return i if tokens[i] is on channel. Return -1 if there are no tokens
|
// Return i if tokens[i] is on channel. Return -1 if there are no tokens
|
||||||
// on channel between i and EOF.
|
// on channel between i and EOF.
|
||||||
// /
|
// /
|
||||||
func (bt *CommonTokenStream) NextTokenOnChannel(i, channel int) int {
|
func (c *CommonTokenStream) NextTokenOnChannel(i, channel int) int {
|
||||||
bt.Sync(i)
|
c.Sync(i)
|
||||||
if i >= len(bt.tokens) {
|
if i >= len(c.tokens) {
|
||||||
return -1
|
return -1
|
||||||
}
|
}
|
||||||
var token = bt.tokens[i]
|
var token = c.tokens[i]
|
||||||
for token.GetChannel() != bt.channel {
|
for token.GetChannel() != c.channel {
|
||||||
if token.GetTokenType() == TokenEOF {
|
if token.GetTokenType() == TokenEOF {
|
||||||
return -1
|
return -1
|
||||||
}
|
}
|
||||||
i += 1
|
i += 1
|
||||||
bt.Sync(i)
|
c.Sync(i)
|
||||||
token = bt.tokens[i]
|
token = c.tokens[i]
|
||||||
}
|
}
|
||||||
return i
|
return i
|
||||||
}
|
}
|
||||||
|
@ -245,8 +245,8 @@ func (bt *CommonTokenStream) NextTokenOnChannel(i, channel int) int {
|
||||||
// Given a starting index, return the index of the previous token on channel.
|
// Given a starting index, return the index of the previous token on channel.
|
||||||
// Return i if tokens[i] is on channel. Return -1 if there are no tokens
|
// Return i if tokens[i] is on channel. Return -1 if there are no tokens
|
||||||
// on channel between i and 0.
|
// on channel between i and 0.
|
||||||
func (bt *CommonTokenStream) previousTokenOnChannel(i, channel int) int {
|
func (c *CommonTokenStream) previousTokenOnChannel(i, channel int) int {
|
||||||
for i >= 0 && bt.tokens[i].GetChannel() != channel {
|
for i >= 0 && c.tokens[i].GetChannel() != channel {
|
||||||
i -= 1
|
i -= 1
|
||||||
}
|
}
|
||||||
return i
|
return i
|
||||||
|
@ -255,45 +255,45 @@ func (bt *CommonTokenStream) previousTokenOnChannel(i, channel int) int {
|
||||||
// Collect all tokens on specified channel to the right of
|
// Collect all tokens on specified channel to the right of
|
||||||
// the current token up until we see a token on DEFAULT_TOKEN_CHANNEL or
|
// the current token up until we see a token on DEFAULT_TOKEN_CHANNEL or
|
||||||
// EOF. If channel is -1, find any non default channel token.
|
// EOF. If channel is -1, find any non default channel token.
|
||||||
func (bt *CommonTokenStream) getHiddenTokensToRight(tokenIndex, channel int) []Token {
|
func (c *CommonTokenStream) getHiddenTokensToRight(tokenIndex, channel int) []Token {
|
||||||
bt.lazyInit()
|
c.lazyInit()
|
||||||
if tokenIndex < 0 || tokenIndex >= len(bt.tokens) {
|
if tokenIndex < 0 || tokenIndex >= len(c.tokens) {
|
||||||
panic(strconv.Itoa(tokenIndex) + " not in 0.." + strconv.Itoa(len(bt.tokens)-1))
|
panic(strconv.Itoa(tokenIndex) + " not in 0.." + strconv.Itoa(len(c.tokens)-1))
|
||||||
}
|
}
|
||||||
var nextOnChannel = bt.NextTokenOnChannel(tokenIndex+1, LexerDefaultTokenChannel)
|
var nextOnChannel = c.NextTokenOnChannel(tokenIndex+1, LexerDefaultTokenChannel)
|
||||||
var from_ = tokenIndex + 1
|
var from_ = tokenIndex + 1
|
||||||
// if none onchannel to right, nextOnChannel=-1 so set to = last token
|
// if none onchannel to right, nextOnChannel=-1 so set to = last token
|
||||||
var to int
|
var to int
|
||||||
if nextOnChannel == -1 {
|
if nextOnChannel == -1 {
|
||||||
to = len(bt.tokens) - 1
|
to = len(c.tokens) - 1
|
||||||
} else {
|
} else {
|
||||||
to = nextOnChannel
|
to = nextOnChannel
|
||||||
}
|
}
|
||||||
return bt.filterForChannel(from_, to, channel)
|
return c.filterForChannel(from_, to, channel)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Collect all tokens on specified channel to the left of
|
// Collect all tokens on specified channel to the left of
|
||||||
// the current token up until we see a token on DEFAULT_TOKEN_CHANNEL.
|
// the current token up until we see a token on DEFAULT_TOKEN_CHANNEL.
|
||||||
// If channel is -1, find any non default channel token.
|
// If channel is -1, find any non default channel token.
|
||||||
func (bt *CommonTokenStream) getHiddenTokensToLeft(tokenIndex, channel int) []Token {
|
func (c *CommonTokenStream) getHiddenTokensToLeft(tokenIndex, channel int) []Token {
|
||||||
bt.lazyInit()
|
c.lazyInit()
|
||||||
if tokenIndex < 0 || tokenIndex >= len(bt.tokens) {
|
if tokenIndex < 0 || tokenIndex >= len(c.tokens) {
|
||||||
panic(strconv.Itoa(tokenIndex) + " not in 0.." + strconv.Itoa(len(bt.tokens)-1))
|
panic(strconv.Itoa(tokenIndex) + " not in 0.." + strconv.Itoa(len(c.tokens)-1))
|
||||||
}
|
}
|
||||||
var prevOnChannel = bt.previousTokenOnChannel(tokenIndex-1, LexerDefaultTokenChannel)
|
var prevOnChannel = c.previousTokenOnChannel(tokenIndex-1, LexerDefaultTokenChannel)
|
||||||
if prevOnChannel == tokenIndex-1 {
|
if prevOnChannel == tokenIndex-1 {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
// if none on channel to left, prevOnChannel=-1 then from=0
|
// if none on channel to left, prevOnChannel=-1 then from=0
|
||||||
var from_ = prevOnChannel + 1
|
var from_ = prevOnChannel + 1
|
||||||
var to = tokenIndex - 1
|
var to = tokenIndex - 1
|
||||||
return bt.filterForChannel(from_, to, channel)
|
return c.filterForChannel(from_, to, channel)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (bt *CommonTokenStream) filterForChannel(left, right, channel int) []Token {
|
func (c *CommonTokenStream) filterForChannel(left, right, channel int) []Token {
|
||||||
var hidden = make([]Token, 0)
|
var hidden = make([]Token, 0)
|
||||||
for i := left; i < right+1; i++ {
|
for i := left; i < right+1; i++ {
|
||||||
var t = bt.tokens[i]
|
var t = c.tokens[i]
|
||||||
if channel == -1 {
|
if channel == -1 {
|
||||||
if t.GetChannel() != LexerDefaultTokenChannel {
|
if t.GetChannel() != LexerDefaultTokenChannel {
|
||||||
hidden = append(hidden, t)
|
hidden = append(hidden, t)
|
||||||
|
@ -308,40 +308,40 @@ func (bt *CommonTokenStream) filterForChannel(left, right, channel int) []Token
|
||||||
return hidden
|
return hidden
|
||||||
}
|
}
|
||||||
|
|
||||||
func (bt *CommonTokenStream) GetSourceName() string {
|
func (c *CommonTokenStream) GetSourceName() string {
|
||||||
return bt.tokenSource.GetSourceName()
|
return c.tokenSource.GetSourceName()
|
||||||
}
|
}
|
||||||
|
|
||||||
func (bt *CommonTokenStream) Size() int {
|
func (c *CommonTokenStream) Size() int {
|
||||||
return len(bt.tokens)
|
return len(c.tokens)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (bt *CommonTokenStream) Index() int {
|
func (c *CommonTokenStream) Index() int {
|
||||||
return bt.index
|
return c.index
|
||||||
}
|
}
|
||||||
|
|
||||||
func (bt *CommonTokenStream) GetAllText() string {
|
func (c *CommonTokenStream) GetAllText() string {
|
||||||
return bt.GetTextFromInterval(nil)
|
return c.GetTextFromInterval(nil)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (bt *CommonTokenStream) GetTextFromTokens(start, end Token) string {
|
func (c *CommonTokenStream) GetTextFromTokens(start, end Token) string {
|
||||||
if start == nil || end == nil {
|
if start == nil || end == nil {
|
||||||
return ""
|
return ""
|
||||||
}
|
}
|
||||||
|
|
||||||
return bt.GetTextFromInterval(NewInterval(start.GetTokenIndex(), end.GetTokenIndex()))
|
return c.GetTextFromInterval(NewInterval(start.GetTokenIndex(), end.GetTokenIndex()))
|
||||||
}
|
}
|
||||||
|
|
||||||
func (bt *CommonTokenStream) GetTextFromRuleContext(interval RuleContext) string {
|
func (c *CommonTokenStream) GetTextFromRuleContext(interval RuleContext) string {
|
||||||
return bt.GetTextFromInterval(interval.GetSourceInterval())
|
return c.GetTextFromInterval(interval.GetSourceInterval())
|
||||||
}
|
}
|
||||||
|
|
||||||
func (bt *CommonTokenStream) GetTextFromInterval(interval *Interval) string {
|
func (c *CommonTokenStream) GetTextFromInterval(interval *Interval) string {
|
||||||
|
|
||||||
bt.lazyInit()
|
c.lazyInit()
|
||||||
bt.Fill()
|
c.Fill()
|
||||||
if interval == nil {
|
if interval == nil {
|
||||||
interval = NewInterval(0, len(bt.tokens)-1)
|
interval = NewInterval(0, len(c.tokens)-1)
|
||||||
}
|
}
|
||||||
|
|
||||||
var start = interval.start
|
var start = interval.start
|
||||||
|
@ -349,13 +349,13 @@ func (bt *CommonTokenStream) GetTextFromInterval(interval *Interval) string {
|
||||||
if start < 0 || stop < 0 {
|
if start < 0 || stop < 0 {
|
||||||
return ""
|
return ""
|
||||||
}
|
}
|
||||||
if stop >= len(bt.tokens) {
|
if stop >= len(c.tokens) {
|
||||||
stop = len(bt.tokens) - 1
|
stop = len(c.tokens) - 1
|
||||||
}
|
}
|
||||||
|
|
||||||
var s = ""
|
var s = ""
|
||||||
for i := start; i < stop+1; i++ {
|
for i := start; i < stop+1; i++ {
|
||||||
var t = bt.tokens[i]
|
var t = c.tokens[i]
|
||||||
if t.GetTokenType() == TokenEOF {
|
if t.GetTokenType() == TokenEOF {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
|
@ -366,64 +366,64 @@ func (bt *CommonTokenStream) GetTextFromInterval(interval *Interval) string {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get all tokens from lexer until EOF///
|
// Get all tokens from lexer until EOF///
|
||||||
func (bt *CommonTokenStream) Fill() {
|
func (c *CommonTokenStream) Fill() {
|
||||||
bt.lazyInit()
|
c.lazyInit()
|
||||||
for bt.fetch(1000) == 1000 {
|
for c.fetch(1000) == 1000 {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (ts *CommonTokenStream) adjustSeekIndex(i int) int {
|
func (c *CommonTokenStream) adjustSeekIndex(i int) int {
|
||||||
return ts.NextTokenOnChannel(i, ts.channel)
|
return c.NextTokenOnChannel(i, c.channel)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (ts *CommonTokenStream) LB(k int) Token {
|
func (c *CommonTokenStream) LB(k int) Token {
|
||||||
|
|
||||||
if k == 0 || ts.index-k < 0 {
|
if k == 0 || c.index-k < 0 {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
var i = ts.index
|
var i = c.index
|
||||||
var n = 1
|
var n = 1
|
||||||
// find k good tokens looking backwards
|
// find k good tokens looking backwards
|
||||||
for n <= k {
|
for n <= k {
|
||||||
// Skip off-channel tokens
|
// Skip off-channel tokens
|
||||||
i = ts.previousTokenOnChannel(i-1, ts.channel)
|
i = c.previousTokenOnChannel(i-1, c.channel)
|
||||||
n += 1
|
n += 1
|
||||||
}
|
}
|
||||||
if i < 0 {
|
if i < 0 {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
return ts.tokens[i]
|
return c.tokens[i]
|
||||||
}
|
}
|
||||||
|
|
||||||
func (ts *CommonTokenStream) LT(k int) Token {
|
func (c *CommonTokenStream) LT(k int) Token {
|
||||||
ts.lazyInit()
|
c.lazyInit()
|
||||||
if k == 0 {
|
if k == 0 {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
if k < 0 {
|
if k < 0 {
|
||||||
return ts.LB(-k)
|
return c.LB(-k)
|
||||||
}
|
}
|
||||||
var i = ts.index
|
var i = c.index
|
||||||
var n = 1 // we know tokens[pos] is a good one
|
var n = 1 // we know tokens[pos] is a good one
|
||||||
// find k good tokens
|
// find k good tokens
|
||||||
for n < k {
|
for n < k {
|
||||||
// Skip off-channel tokens, but make sure to not look past EOF
|
// Skip off-channel tokens, but make sure to not look past EOF
|
||||||
if ts.Sync(i + 1) {
|
if c.Sync(i + 1) {
|
||||||
i = ts.NextTokenOnChannel(i+1, ts.channel)
|
i = c.NextTokenOnChannel(i+1, c.channel)
|
||||||
}
|
}
|
||||||
n += 1
|
n += 1
|
||||||
}
|
}
|
||||||
return ts.tokens[i]
|
return c.tokens[i]
|
||||||
}
|
}
|
||||||
|
|
||||||
// Count EOF just once.///
|
// Count EOF just once.///
|
||||||
func (ts *CommonTokenStream) getNumberOfOnChannelTokens() int {
|
func (c *CommonTokenStream) getNumberOfOnChannelTokens() int {
|
||||||
var n = 0
|
var n = 0
|
||||||
ts.Fill()
|
c.Fill()
|
||||||
for i := 0; i < len(ts.tokens); i++ {
|
for i := 0; i < len(c.tokens); i++ {
|
||||||
var t = ts.tokens[i]
|
var t = c.tokens[i]
|
||||||
if t.GetChannel() == ts.channel {
|
if t.GetChannel() == c.channel {
|
||||||
n += 1
|
n += 1
|
||||||
}
|
}
|
||||||
if t.GetTokenType() == TokenEOF {
|
if t.GetTokenType() == TokenEOF {
|
||||||
|
|
|
@ -105,7 +105,7 @@ func (is *InputStream) GetTextFromInterval(i *Interval) string {
|
||||||
return is.GetText(i.start, i.stop)
|
return is.GetText(i.start, i.stop)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (f *InputStream) GetSourceName() string {
|
func (*InputStream) GetSourceName() string {
|
||||||
return "Obtained from string"
|
return "Obtained from string"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -67,33 +67,33 @@ func (i *IntervalSet) addRange(l, h int) {
|
||||||
i.addInterval(NewInterval(l, h+1))
|
i.addInterval(NewInterval(l, h+1))
|
||||||
}
|
}
|
||||||
|
|
||||||
func (is *IntervalSet) addInterval(v *Interval) {
|
func (i *IntervalSet) addInterval(v *Interval) {
|
||||||
if PortDebug {
|
if PortDebug {
|
||||||
fmt.Println("addInterval" + v.String())
|
fmt.Println("addInterval" + v.String())
|
||||||
}
|
}
|
||||||
if is.intervals == nil {
|
if i.intervals == nil {
|
||||||
is.intervals = make([]*Interval, 0)
|
i.intervals = make([]*Interval, 0)
|
||||||
is.intervals = append(is.intervals, v)
|
i.intervals = append(i.intervals, v)
|
||||||
} else {
|
} else {
|
||||||
// find insert pos
|
// find insert pos
|
||||||
for k := 0; k < len(is.intervals); k++ {
|
for k := 0; k < len(i.intervals); k++ {
|
||||||
var i = is.intervals[k]
|
var interval = i.intervals[k]
|
||||||
// distinct range -> insert
|
// ditinct range -> insert
|
||||||
if v.stop < i.start {
|
if v.stop < interval.start {
|
||||||
// is.intervals = splice(k, 0, v)
|
// i.intervals = splice(k, 0, v)
|
||||||
is.intervals = append(is.intervals[0:k], append([]*Interval{v}, is.intervals[k:]...)...)
|
i.intervals = append(i.intervals[0:k], append([]*Interval{v}, i.intervals[k:]...)...)
|
||||||
return
|
return
|
||||||
} else if v.stop == i.start {
|
} else if v.stop == interval.start {
|
||||||
is.intervals[k].start = v.start
|
i.intervals[k].start = v.start
|
||||||
return
|
return
|
||||||
} else if v.start <= i.stop {
|
} else if v.start <= interval.stop {
|
||||||
is.intervals[k] = NewInterval(intMin(i.start, v.start), intMax(i.stop, v.stop))
|
i.intervals[k] = NewInterval(intMin(interval.start, v.start), intMax(interval.stop, v.stop))
|
||||||
is.reduce(k)
|
i.reduce(k)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// greater than any existing
|
// greater than any exiting
|
||||||
is.intervals = append(is.intervals, v)
|
i.intervals = append(i.intervals, v)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -129,11 +129,11 @@ func (i *IntervalSet) reduce(k int) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (is *IntervalSet) complement(start int, stop int) *IntervalSet {
|
func (i *IntervalSet) complement(start int, stop int) *IntervalSet {
|
||||||
var result = NewIntervalSet()
|
var result = NewIntervalSet()
|
||||||
result.addInterval(NewInterval(start, stop+1))
|
result.addInterval(NewInterval(start, stop+1))
|
||||||
for i := 0; i < len(is.intervals); i++ {
|
for j := 0; j < len(i.intervals); j++ {
|
||||||
result.removeRange(is.intervals[i])
|
result.removeRange(i.intervals[j])
|
||||||
}
|
}
|
||||||
return result
|
return result
|
||||||
}
|
}
|
||||||
|
@ -151,68 +151,68 @@ func (i *IntervalSet) contains(item int) bool {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (is *IntervalSet) length() int {
|
func (i *IntervalSet) length() int {
|
||||||
len := 0
|
len := 0
|
||||||
|
|
||||||
for _, v := range is.intervals {
|
for _, v := range i.intervals {
|
||||||
len += v.length()
|
len += v.length()
|
||||||
}
|
}
|
||||||
|
|
||||||
return len
|
return len
|
||||||
}
|
}
|
||||||
|
|
||||||
func (is *IntervalSet) removeRange(v *Interval) {
|
func (i *IntervalSet) removeRange(v *Interval) {
|
||||||
if v.start == v.stop-1 {
|
if v.start == v.stop-1 {
|
||||||
is.removeOne(v.start)
|
i.removeOne(v.start)
|
||||||
} else if is.intervals != nil {
|
} else if i.intervals != nil {
|
||||||
k := 0
|
k := 0
|
||||||
for n := 0; n < len(is.intervals); n++ {
|
for n := 0; n < len(i.intervals); n++ {
|
||||||
var i = is.intervals[k]
|
var ni = i.intervals[k]
|
||||||
// intervals are ordered
|
// intervals are ordered
|
||||||
if v.stop <= i.start {
|
if v.stop <= ni.start {
|
||||||
return
|
return
|
||||||
} else if v.start > i.start && v.stop < i.stop {
|
} else if v.start > ni.start && v.stop < ni.stop {
|
||||||
is.intervals[k] = NewInterval(i.start, v.start)
|
i.intervals[k] = NewInterval(ni.start, v.start)
|
||||||
var x = NewInterval(v.stop, i.stop)
|
var x = NewInterval(v.stop, ni.stop)
|
||||||
// is.intervals.splice(k, 0, x)
|
// i.intervals.splice(k, 0, x)
|
||||||
is.intervals = append(is.intervals[0:k], append([]*Interval{x}, is.intervals[k:]...)...)
|
i.intervals = append(i.intervals[0:k], append([]*Interval{x}, i.intervals[k:]...)...)
|
||||||
return
|
return
|
||||||
} else if v.start <= i.start && v.stop >= i.stop {
|
} else if v.start <= ni.start && v.stop >= ni.stop {
|
||||||
// is.intervals.splice(k, 1)
|
// i.intervals.splice(k, 1)
|
||||||
is.intervals = append(is.intervals[0:k], is.intervals[k+1:]...)
|
i.intervals = append(i.intervals[0:k], i.intervals[k+1:]...)
|
||||||
k = k - 1 // need another pass
|
k = k - 1 // need another pass
|
||||||
} else if v.start < i.stop {
|
} else if v.start < ni.stop {
|
||||||
is.intervals[k] = NewInterval(i.start, v.start)
|
i.intervals[k] = NewInterval(ni.start, v.start)
|
||||||
} else if v.stop < i.stop {
|
} else if v.stop < ni.stop {
|
||||||
is.intervals[k] = NewInterval(v.stop, i.stop)
|
i.intervals[k] = NewInterval(v.stop, ni.stop)
|
||||||
}
|
}
|
||||||
k += 1
|
k += 1
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (is *IntervalSet) removeOne(v int) {
|
func (i *IntervalSet) removeOne(v int) {
|
||||||
if is.intervals != nil {
|
if i.intervals != nil {
|
||||||
for k := 0; k < len(is.intervals); k++ {
|
for k := 0; k < len(i.intervals); k++ {
|
||||||
var i = is.intervals[k]
|
var ki = i.intervals[k]
|
||||||
// intervals is ordered
|
// intervals i ordered
|
||||||
if v < i.start {
|
if v < ki.start {
|
||||||
return
|
return
|
||||||
} else if v == i.start && v == i.stop-1 {
|
} else if v == ki.start && v == ki.stop-1 {
|
||||||
// is.intervals.splice(k, 1);
|
// i.intervals.splice(k, 1);
|
||||||
is.intervals = append(is.intervals[0:k], is.intervals[k+1:]...)
|
i.intervals = append(i.intervals[0:k], i.intervals[k+1:]...)
|
||||||
return
|
return
|
||||||
} else if v == i.start {
|
} else if v == ki.start {
|
||||||
is.intervals[k] = NewInterval(i.start+1, i.stop)
|
i.intervals[k] = NewInterval(ki.start+1, ki.stop)
|
||||||
return
|
return
|
||||||
} else if v == i.stop-1 {
|
} else if v == ki.stop-1 {
|
||||||
is.intervals[k] = NewInterval(i.start, i.stop-1)
|
i.intervals[k] = NewInterval(ki.start, ki.stop-1)
|
||||||
return
|
return
|
||||||
} else if v < i.stop-1 {
|
} else if v < ki.stop-1 {
|
||||||
var x = NewInterval(i.start, v)
|
var x = NewInterval(ki.start, v)
|
||||||
i.start = v + 1
|
ki.start = v + 1
|
||||||
// is.intervals.splice(k, 0, x);
|
// i.intervals.splice(k, 0, x);
|
||||||
is.intervals = append(is.intervals[0:k], append([]*Interval{x}, is.intervals[k:]...)...)
|
i.intervals = append(i.intervals[0:k], append([]*Interval{x}, i.intervals[k:]...)...)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -236,11 +236,11 @@ func (i *IntervalSet) StringVerbose(literalNames []string, symbolicNames []strin
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (is *IntervalSet) toCharString() string {
|
func (i *IntervalSet) toCharString() string {
|
||||||
var names = make([]string, len(is.intervals))
|
var names = make([]string, len(i.intervals))
|
||||||
|
|
||||||
for i := 0; i < len(is.intervals); i++ {
|
for j := 0; j < len(i.intervals); j++ {
|
||||||
var v = is.intervals[i]
|
var v = i.intervals[j]
|
||||||
if v.stop == v.start+1 {
|
if v.stop == v.start+1 {
|
||||||
if v.start == TokenEOF {
|
if v.start == TokenEOF {
|
||||||
names = append(names, "<EOF>")
|
names = append(names, "<EOF>")
|
||||||
|
@ -258,11 +258,11 @@ func (is *IntervalSet) toCharString() string {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (is *IntervalSet) toIndexString() string {
|
func (i *IntervalSet) toIndexString() string {
|
||||||
|
|
||||||
var names = make([]string, 0)
|
var names = make([]string, 0)
|
||||||
for i := 0; i < len(is.intervals); i++ {
|
for j := 0; j < len(i.intervals); j++ {
|
||||||
var v = is.intervals[i]
|
var v = i.intervals[j]
|
||||||
if v.stop == v.start+1 {
|
if v.stop == v.start+1 {
|
||||||
if v.start == TokenEOF {
|
if v.start == TokenEOF {
|
||||||
names = append(names, "<EOF>")
|
names = append(names, "<EOF>")
|
||||||
|
@ -280,11 +280,11 @@ func (is *IntervalSet) toIndexString() string {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (is *IntervalSet) toTokenString(literalNames []string, symbolicNames []string) string {
|
func (i *IntervalSet) toTokenString(literalNames []string, symbolicNames []string) string {
|
||||||
var names = make([]string, 0)
|
var names = make([]string, 0)
|
||||||
for _, v := range is.intervals {
|
for _, v := range i.intervals {
|
||||||
for j := v.start; j < v.stop; j++ {
|
for j := v.start; j < v.stop; j++ {
|
||||||
names = append(names, is.elementName(literalNames, symbolicNames, j))
|
names = append(names, i.elementName(literalNames, symbolicNames, j))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if len(names) > 1 {
|
if len(names) > 1 {
|
||||||
|
|
|
@ -664,27 +664,27 @@ func (p *BaseParser) GetRuleIndex(ruleName string) int {
|
||||||
//
|
//
|
||||||
// this very useful for error messages.
|
// this very useful for error messages.
|
||||||
|
|
||||||
func (b *BaseParser) GetRuleInvocationStack(p ParserRuleContext) []string {
|
func (p *BaseParser) GetRuleInvocationStack(c ParserRuleContext) []string {
|
||||||
if p == nil {
|
if c == nil {
|
||||||
p = b._ctx
|
c = p._ctx
|
||||||
}
|
}
|
||||||
var stack = make([]string, 0)
|
var stack = make([]string, 0)
|
||||||
for p != nil {
|
for c != nil {
|
||||||
// compute what follows who invoked us
|
// compute what follows who invoked us
|
||||||
var ruleIndex = p.GetRuleIndex()
|
var ruleIndex = c.GetRuleIndex()
|
||||||
if ruleIndex < 0 {
|
if ruleIndex < 0 {
|
||||||
stack = append(stack, "n/a")
|
stack = append(stack, "n/a")
|
||||||
} else {
|
} else {
|
||||||
stack = append(stack, b.GetRuleNames()[ruleIndex])
|
stack = append(stack, p.GetRuleNames()[ruleIndex])
|
||||||
}
|
}
|
||||||
|
|
||||||
vp := p.GetParent()
|
vp := c.GetParent()
|
||||||
|
|
||||||
if vp == nil {
|
if vp == nil {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
|
|
||||||
p = vp.(ParserRuleContext)
|
c = vp.(ParserRuleContext)
|
||||||
}
|
}
|
||||||
return stack
|
return stack
|
||||||
}
|
}
|
||||||
|
|
|
@ -76,12 +76,12 @@ func (prc *BaseParserRuleContext) CopyFrom(ctx *BaseParserRuleContext) {
|
||||||
prc.stop = ctx.stop
|
prc.stop = ctx.stop
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *BaseParserRuleContext) GetText() string {
|
func (prc *BaseParserRuleContext) GetText() string {
|
||||||
if b.GetChildCount() == 0 {
|
if prc.GetChildCount() == 0 {
|
||||||
return ""
|
return ""
|
||||||
} else {
|
} else {
|
||||||
var s string
|
var s string
|
||||||
for _, child := range b.children {
|
for _, child := range prc.children {
|
||||||
s += child.(ParseTree).GetText()
|
s += child.(ParseTree).GetText()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -171,16 +171,16 @@ func (prc *BaseParserRuleContext) GetChildOfType(i int, childType reflect.Type)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *BaseParserRuleContext) ToStringTree(ruleNames []string, recog Recognizer) string {
|
func (prc *BaseParserRuleContext) ToStringTree(ruleNames []string, recog Recognizer) string {
|
||||||
return TreesStringTree(b, ruleNames, recog)
|
return TreesStringTree(prc, ruleNames, recog)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (prc *BaseParserRuleContext) GetRuleContext() RuleContext {
|
func (prc *BaseParserRuleContext) GetRuleContext() RuleContext {
|
||||||
return prc
|
return prc
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *BaseParserRuleContext) Accept(visitor ParseTreeVisitor) interface{} {
|
func (prc *BaseParserRuleContext) Accept(visitor ParseTreeVisitor) interface{} {
|
||||||
return visitor.VisitChildren(b)
|
return visitor.VisitChildren(prc)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (prc *BaseParserRuleContext) SetStart(t Token) {
|
func (prc *BaseParserRuleContext) SetStart(t Token) {
|
||||||
|
@ -303,9 +303,9 @@ func (prc *BaseParserRuleContext) GetSourceInterval() *Interval {
|
||||||
// (root child1 .. childN). Print just a node if b is a leaf.
|
// (root child1 .. childN). Print just a node if b is a leaf.
|
||||||
//
|
//
|
||||||
|
|
||||||
func (b *BaseParserRuleContext) String(ruleNames []string, stop RuleContext) string {
|
func (prc *BaseParserRuleContext) String(ruleNames []string, stop RuleContext) string {
|
||||||
|
|
||||||
var p ParserRuleContext = b
|
var p ParserRuleContext = prc
|
||||||
var s = "["
|
var s = "["
|
||||||
for p != nil && p != stop {
|
for p != nil && p != stop {
|
||||||
if ruleNames == nil {
|
if ruleNames == nil {
|
||||||
|
|
|
@ -281,8 +281,8 @@ func NewArrayPredictionContext(parents []PredictionContext, returnStates []int)
|
||||||
return c
|
return c
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *ArrayPredictionContext) GetReturnStates() []int {
|
func (a *ArrayPredictionContext) GetReturnStates() []int {
|
||||||
return c.returnStates
|
return a.returnStates
|
||||||
}
|
}
|
||||||
|
|
||||||
func (a *ArrayPredictionContext) hasEmptyPath() bool {
|
func (a *ArrayPredictionContext) hasEmptyPath() bool {
|
||||||
|
|
|
@ -155,12 +155,12 @@ func NewCommonToken(source *TokenSourceCharStreamPair, tokenType, channel, start
|
||||||
//
|
//
|
||||||
// @param oldToken The token to copy.
|
// @param oldToken The token to copy.
|
||||||
//
|
//
|
||||||
func (ct *CommonToken) clone() *CommonToken {
|
func (c *CommonToken) clone() *CommonToken {
|
||||||
var t = NewCommonToken(ct.source, ct.tokenType, ct.channel, ct.start, ct.stop)
|
var t = NewCommonToken(c.source, c.tokenType, c.channel, c.start, c.stop)
|
||||||
t.tokenIndex = ct.GetTokenIndex()
|
t.tokenIndex = c.GetTokenIndex()
|
||||||
t.line = ct.GetLine()
|
t.line = c.GetLine()
|
||||||
t.column = ct.GetColumn()
|
t.column = c.GetColumn()
|
||||||
t._text = ct.GetText()
|
t._text = c.GetText()
|
||||||
return t
|
return t
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue