Merge pull request #4 from willfaught/master

Simple clean up
This commit is contained in:
Peter Boyer 2016-05-27 02:01:23 -04:00
commit 128ec07dd3
47 changed files with 2303 additions and 2312 deletions

View File

View File

@ -60,16 +60,16 @@ func NewATN(grammarType int, maxTokenType int) *ATN {
// If {@code ctx} is nil, the set of tokens will not include what can follow
// the rule surrounding {@code s}. In other words, the set will be
// restricted to tokens reachable staying within {@code s}'s rule.
func (this *ATN) NextTokensInContext(s ATNState, ctx RuleContext) *IntervalSet {
var anal = NewLL1Analyzer(this)
var res = anal.LOOK(s, nil, ctx)
func (a *ATN) NextTokensInContext(s ATNState, ctx RuleContext) *IntervalSet {
var anal = NewLL1Analyzer(a)
var res = anal.Look(s, nil, ctx)
return res
}
// Compute the set of valid tokens that can occur starting in {@code s} and
// staying in same rule. {@link Token//EPSILON} is in set if we reach end of
// rule.
func (this *ATN) NextTokensNoContext(s ATNState) *IntervalSet {
func (a *ATN) NextTokensNoContext(s ATNState) *IntervalSet {
if s.GetNextTokenWithinRule() != nil {
if PortDebug {
fmt.Println("DEBUG A")
@ -78,45 +78,45 @@ func (this *ATN) NextTokensNoContext(s ATNState) *IntervalSet {
}
if PortDebug {
fmt.Println("DEBUG 2")
fmt.Println(this.NextTokensInContext(s, nil))
fmt.Println(a.NextTokensInContext(s, nil))
}
s.SetNextTokenWithinRule(this.NextTokensInContext(s, nil))
s.SetNextTokenWithinRule(a.NextTokensInContext(s, nil))
s.GetNextTokenWithinRule().readOnly = true
return s.GetNextTokenWithinRule()
}
func (this *ATN) NextTokens(s ATNState, ctx RuleContext) *IntervalSet {
func (a *ATN) NextTokens(s ATNState, ctx RuleContext) *IntervalSet {
if ctx == nil {
return this.NextTokensNoContext(s)
} else {
return this.NextTokensInContext(s, ctx)
return a.NextTokensNoContext(s)
}
return a.NextTokensInContext(s, ctx)
}
func (this *ATN) addState(state ATNState) {
func (a *ATN) addState(state ATNState) {
if state != nil {
state.SetATN(this)
state.SetStateNumber(len(this.states))
state.SetATN(a)
state.SetStateNumber(len(a.states))
}
this.states = append(this.states, state)
a.states = append(a.states, state)
}
func (this *ATN) removeState(state ATNState) {
this.states[state.GetStateNumber()] = nil // just free mem, don't shift states in list
func (a *ATN) removeState(state ATNState) {
a.states[state.GetStateNumber()] = nil // just free mem, don't shift states in list
}
func (this *ATN) defineDecisionState(s DecisionState) int {
this.DecisionToState = append(this.DecisionToState, s)
s.setDecision(len(this.DecisionToState) - 1)
func (a *ATN) defineDecisionState(s DecisionState) int {
a.DecisionToState = append(a.DecisionToState, s)
s.setDecision(len(a.DecisionToState) - 1)
return s.getDecision()
}
func (this *ATN) getDecisionState(decision int) DecisionState {
if len(this.DecisionToState) == 0 {
func (a *ATN) getDecisionState(decision int) DecisionState {
if len(a.DecisionToState) == 0 {
return nil
} else {
return this.DecisionToState[decision]
}
return a.DecisionToState[decision]
}
// Computes the set of input symbols which could follow ATN state number
@ -137,12 +137,12 @@ func (this *ATN) getDecisionState(decision int) DecisionState {
// @panics IllegalArgumentException if the ATN does not contain a state with
// number {@code stateNumber}
func (this *ATN) getExpectedTokens(stateNumber int, ctx RuleContext) *IntervalSet {
if stateNumber < 0 || stateNumber >= len(this.states) {
func (a *ATN) getExpectedTokens(stateNumber int, ctx RuleContext) *IntervalSet {
if stateNumber < 0 || stateNumber >= len(a.states) {
panic("Invalid state number.")
}
var s = this.states[stateNumber]
var following = this.NextTokens(s, nil)
var s = a.states[stateNumber]
var following = a.NextTokens(s, nil)
if !following.contains(TokenEpsilon) {
return following
}
@ -150,9 +150,9 @@ func (this *ATN) getExpectedTokens(stateNumber int, ctx RuleContext) *IntervalSe
expected.addSet(following)
expected.removeOne(TokenEpsilon)
for ctx != nil && ctx.GetInvokingState() >= 0 && following.contains(TokenEpsilon) {
var invokingState = this.states[ctx.GetInvokingState()]
var invokingState = a.states[ctx.GetInvokingState()]
var rt = invokingState.GetTransitions()[0]
following = this.NextTokens(rt.(*RuleTransition).followState, nil)
following = a.NextTokens(rt.(*RuleTransition).followState, nil)
expected.addSet(following)
expected.removeOne(TokenEpsilon)
ctx = ctx.GetParent().(RuleContext)

View File

@ -2,7 +2,7 @@ package antlr
import (
"fmt"
// "reflect"
// "reflect"
"strconv"
)
@ -110,48 +110,48 @@ func NewBaseATNConfig(c ATNConfig, state ATNState, context PredictionContext, se
return a
}
func (this *BaseATNConfig) getPrecedenceFilterSuppressed() bool {
return this.precedenceFilterSuppressed
func (b *BaseATNConfig) getPrecedenceFilterSuppressed() bool {
return b.precedenceFilterSuppressed
}
func (this *BaseATNConfig) setPrecedenceFilterSuppressed(v bool) {
this.precedenceFilterSuppressed = v
func (b *BaseATNConfig) setPrecedenceFilterSuppressed(v bool) {
b.precedenceFilterSuppressed = v
}
func (this *BaseATNConfig) GetState() ATNState {
return this.state
func (b *BaseATNConfig) GetState() ATNState {
return b.state
}
func (this *BaseATNConfig) GetAlt() int {
return this.alt
func (b *BaseATNConfig) GetAlt() int {
return b.alt
}
func (this *BaseATNConfig) SetContext(v PredictionContext) {
this.context = v
func (b *BaseATNConfig) SetContext(v PredictionContext) {
b.context = v
}
func (this *BaseATNConfig) GetContext() PredictionContext {
return this.context
func (b *BaseATNConfig) GetContext() PredictionContext {
return b.context
}
func (this *BaseATNConfig) GetSemanticContext() SemanticContext {
return this.semanticContext
func (b *BaseATNConfig) GetSemanticContext() SemanticContext {
return b.semanticContext
}
func (this *BaseATNConfig) GetReachesIntoOuterContext() int {
return this.reachesIntoOuterContext
func (b *BaseATNConfig) GetReachesIntoOuterContext() int {
return b.reachesIntoOuterContext
}
func (this *BaseATNConfig) SetReachesIntoOuterContext(v int) {
this.reachesIntoOuterContext = v
func (b *BaseATNConfig) SetReachesIntoOuterContext(v int) {
b.reachesIntoOuterContext = v
}
// An ATN configuration is equal to another if both have
// the same state, they predict the same alternative, and
// syntactic/semantic contexts are the same.
///
func (this *BaseATNConfig) equals(o interface{}) bool {
func (b *BaseATNConfig) equals(o interface{}) bool {
if this == o {
if b == o {
return true
}
@ -161,54 +161,54 @@ func (this *BaseATNConfig) equals(o interface{}) bool {
return false
}
var b bool
if this.context==nil {
b = other.context==nil
var equal bool
if b.context == nil {
equal = other.context == nil
} else {
b = this.context.equals(other.context)
equal = b.context.equals(other.context)
}
return this.state.GetStateNumber() == other.state.GetStateNumber() &&
this.alt==other.alt &&
this.semanticContext.equals(other.semanticContext) &&
this.precedenceFilterSuppressed==other.precedenceFilterSuppressed &&
b;
return b.state.GetStateNumber() == other.state.GetStateNumber() &&
b.alt == other.alt &&
b.semanticContext.equals(other.semanticContext) &&
b.precedenceFilterSuppressed == other.precedenceFilterSuppressed &&
equal
}
func (this *BaseATNConfig) shortHash() string {
return strconv.Itoa(this.state.GetStateNumber()) + "/" + strconv.Itoa(this.alt) + "/" + this.semanticContext.String()
func (b *BaseATNConfig) shortHash() string {
return strconv.Itoa(b.state.GetStateNumber()) + "/" + strconv.Itoa(b.alt) + "/" + b.semanticContext.String()
}
func (this *BaseATNConfig) Hash() string {
func (b *BaseATNConfig) Hash() string {
var c string
if this.context == nil {
if b.context == nil {
c = ""
} else {
c = this.context.Hash()
c = b.context.Hash()
}
return strconv.Itoa(this.state.GetStateNumber()) + "/" + strconv.Itoa(this.alt) + "/" + c + "/" + this.semanticContext.String()
return strconv.Itoa(b.state.GetStateNumber()) + "/" + strconv.Itoa(b.alt) + "/" + c + "/" + b.semanticContext.String()
}
func (this *BaseATNConfig) String() string {
func (b *BaseATNConfig) String() string {
var a string
if this.context != nil {
a = ",[" + fmt.Sprint(this.context) + "]"
var s1 string
if b.context != nil {
s1 = ",[" + fmt.Sprint(b.context) + "]"
}
var b string
if this.semanticContext != SemanticContextNone {
b = "," + fmt.Sprint(this.semanticContext)
var s2 string
if b.semanticContext != SemanticContextNone {
s2 = "," + fmt.Sprint(b.semanticContext)
}
var c string
if this.reachesIntoOuterContext > 0 {
c = ",up=" + fmt.Sprint(this.reachesIntoOuterContext)
var s3 string
if b.reachesIntoOuterContext > 0 {
s3 = ",up=" + fmt.Sprint(b.reachesIntoOuterContext)
}
return "(" + fmt.Sprint(this.state) + "," + strconv.Itoa(this.alt) + a + b + c + ")"
return "(" + fmt.Sprint(b.state) + "," + strconv.Itoa(b.alt) + s1 + s2 + s3 + ")"
}
type LexerATNConfig struct {
@ -220,104 +220,104 @@ type LexerATNConfig struct {
func NewLexerATNConfig6(state ATNState, alt int, context PredictionContext) *LexerATNConfig {
this := new(LexerATNConfig)
l := new(LexerATNConfig)
this.BaseATNConfig = NewBaseATNConfig5(state, alt, context, SemanticContextNone)
l.BaseATNConfig = NewBaseATNConfig5(state, alt, context, SemanticContextNone)
this.passedThroughNonGreedyDecision = false
this.lexerActionExecutor = nil
return this
l.passedThroughNonGreedyDecision = false
l.lexerActionExecutor = nil
return l
}
func NewLexerATNConfig5(state ATNState, alt int, context PredictionContext, lexerActionExecutor *LexerActionExecutor) *LexerATNConfig {
this := new(LexerATNConfig)
l := new(LexerATNConfig)
this.BaseATNConfig = NewBaseATNConfig5(state, alt, context, SemanticContextNone)
this.lexerActionExecutor = lexerActionExecutor
this.passedThroughNonGreedyDecision = false
return this
l.BaseATNConfig = NewBaseATNConfig5(state, alt, context, SemanticContextNone)
l.lexerActionExecutor = lexerActionExecutor
l.passedThroughNonGreedyDecision = false
return l
}
func NewLexerATNConfig4(c *LexerATNConfig, state ATNState) *LexerATNConfig {
this := new(LexerATNConfig)
l := new(LexerATNConfig)
this.BaseATNConfig = NewBaseATNConfig(c, state, c.GetContext(), c.GetSemanticContext())
this.lexerActionExecutor = c.lexerActionExecutor
this.passedThroughNonGreedyDecision = checkNonGreedyDecision(c, state)
return this
l.BaseATNConfig = NewBaseATNConfig(c, state, c.GetContext(), c.GetSemanticContext())
l.lexerActionExecutor = c.lexerActionExecutor
l.passedThroughNonGreedyDecision = checkNonGreedyDecision(c, state)
return l
}
func NewLexerATNConfig3(c *LexerATNConfig, state ATNState, lexerActionExecutor *LexerActionExecutor) *LexerATNConfig {
this := new(LexerATNConfig)
l := new(LexerATNConfig)
this.BaseATNConfig = NewBaseATNConfig(c, state, c.GetContext(), c.GetSemanticContext())
this.lexerActionExecutor = lexerActionExecutor
this.passedThroughNonGreedyDecision = checkNonGreedyDecision(c, state)
return this
l.BaseATNConfig = NewBaseATNConfig(c, state, c.GetContext(), c.GetSemanticContext())
l.lexerActionExecutor = lexerActionExecutor
l.passedThroughNonGreedyDecision = checkNonGreedyDecision(c, state)
return l
}
func NewLexerATNConfig2(c *LexerATNConfig, state ATNState, context PredictionContext) *LexerATNConfig {
this := new(LexerATNConfig)
l := new(LexerATNConfig)
this.BaseATNConfig = NewBaseATNConfig(c, state, context, c.GetSemanticContext())
this.lexerActionExecutor = c.lexerActionExecutor
this.passedThroughNonGreedyDecision = checkNonGreedyDecision(c, state)
return this
l.BaseATNConfig = NewBaseATNConfig(c, state, context, c.GetSemanticContext())
l.lexerActionExecutor = c.lexerActionExecutor
l.passedThroughNonGreedyDecision = checkNonGreedyDecision(c, state)
return l
}
func NewLexerATNConfig1(state ATNState, alt int, context PredictionContext) *LexerATNConfig {
this := new(LexerATNConfig)
l := new(LexerATNConfig)
this.BaseATNConfig = NewBaseATNConfig5(state, alt, context, SemanticContextNone)
l.BaseATNConfig = NewBaseATNConfig5(state, alt, context, SemanticContextNone)
this.lexerActionExecutor = nil
this.passedThroughNonGreedyDecision = false
l.lexerActionExecutor = nil
l.passedThroughNonGreedyDecision = false
return this
return l
}
func (this *LexerATNConfig) Hash() string {
func (l *LexerATNConfig) Hash() string {
var f string
if this.passedThroughNonGreedyDecision {
if l.passedThroughNonGreedyDecision {
f = "1"
} else {
f = "0"
}
return strconv.Itoa(this.state.GetStateNumber()) + strconv.Itoa(this.alt) + fmt.Sprint(this.context) +
fmt.Sprint(this.semanticContext) + f + fmt.Sprint(this.lexerActionExecutor)
return strconv.Itoa(l.state.GetStateNumber()) + strconv.Itoa(l.alt) + fmt.Sprint(l.context) +
fmt.Sprint(l.semanticContext) + f + fmt.Sprint(l.lexerActionExecutor)
}
func (this *LexerATNConfig) equals(other interface{}) bool {
func (l *LexerATNConfig) equals(other interface{}) bool {
othert, ok := other.(*LexerATNConfig)
if this == other {
if l == other {
return true
} else if !ok {
return false
} else if this.passedThroughNonGreedyDecision != othert.passedThroughNonGreedyDecision {
} else if l.passedThroughNonGreedyDecision != othert.passedThroughNonGreedyDecision {
return false
}
var b bool
if this.lexerActionExecutor != nil {
b = !this.lexerActionExecutor.equals(othert.lexerActionExecutor)
if l.lexerActionExecutor != nil {
b = !l.lexerActionExecutor.equals(othert.lexerActionExecutor)
} else {
b = othert.lexerActionExecutor != nil
}
if b {
return false
} else {
return this.BaseATNConfig.equals(othert.BaseATNConfig)
}
return l.BaseATNConfig.equals(othert.BaseATNConfig)
}
func checkNonGreedyDecision(source *LexerATNConfig, target ATNState) bool {

View File

@ -113,25 +113,25 @@ func NewBaseATNConfigSet(fullCtx bool) *BaseATNConfigSet {
// <p>This method updates {@link //dipsIntoOuterContext} and
// {@link //hasSemanticContext} when necessary.</p>
// /
func (this *BaseATNConfigSet) Add(config ATNConfig, mergeCache *DoubleDict) bool {
func (b *BaseATNConfigSet) Add(config ATNConfig, mergeCache *DoubleDict) bool {
if this.readOnly {
if b.readOnly {
panic("This set is readonly")
}
if config.GetSemanticContext() != SemanticContextNone {
this.hasSemanticContext = true
b.hasSemanticContext = true
}
if config.GetReachesIntoOuterContext() > 0 {
this.dipsIntoOuterContext = true
b.dipsIntoOuterContext = true
}
var existing = this.configLookup.add(config).(ATNConfig)
var existing = b.configLookup.add(config).(ATNConfig)
if existing == config {
this.cachedHashString = "-1"
this.configs = append(this.configs, config) // track order here
b.cachedHashString = "-1"
b.configs = append(b.configs, config) // track order here
return true
}
// a previous (s,i,pi,_), merge with it and save result
var rootIsWildcard = !this.fullCtx
var rootIsWildcard = !b.fullCtx
var merged = merge(existing.GetContext(), config.GetContext(), rootIsWildcard, mergeCache)
// no need to check for existing.context, config.context in cache
// since only way to create Newgraphs is "call rule" and here. We
@ -146,26 +146,26 @@ func (this *BaseATNConfigSet) Add(config ATNConfig, mergeCache *DoubleDict) bool
return true
}
func (this *BaseATNConfigSet) GetStates() *Set {
func (b *BaseATNConfigSet) GetStates() *Set {
var states = NewSet(nil, nil)
for i := 0; i < len(this.configs); i++ {
states.add(this.configs[i].GetState())
for i := 0; i < len(b.configs); i++ {
states.add(b.configs[i].GetState())
}
return states
}
func (this *BaseATNConfigSet) HasSemanticContext() bool {
return this.hasSemanticContext
func (b *BaseATNConfigSet) HasSemanticContext() bool {
return b.hasSemanticContext
}
func (this *BaseATNConfigSet) SetHasSemanticContext(v bool) {
this.hasSemanticContext = v
func (b *BaseATNConfigSet) SetHasSemanticContext(v bool) {
b.hasSemanticContext = v
}
func (this *BaseATNConfigSet) GetPredicates() []SemanticContext {
func (b *BaseATNConfigSet) GetPredicates() []SemanticContext {
var preds = make([]SemanticContext, 0)
for i := 0; i < len(this.configs); i++ {
c := this.configs[i].GetSemanticContext()
for i := 0; i < len(b.configs); i++ {
c := b.configs[i].GetSemanticContext()
if c != SemanticContextNone {
preds = append(preds, c)
}
@ -173,32 +173,32 @@ func (this *BaseATNConfigSet) GetPredicates() []SemanticContext {
return preds
}
func (this *BaseATNConfigSet) GetItems() []ATNConfig {
return this.configs
func (b *BaseATNConfigSet) GetItems() []ATNConfig {
return b.configs
}
func (this *BaseATNConfigSet) OptimizeConfigs(interpreter *BaseATNSimulator) {
if this.readOnly {
func (b *BaseATNConfigSet) OptimizeConfigs(interpreter *BaseATNSimulator) {
if b.readOnly {
panic("This set is readonly")
}
if this.configLookup.length() == 0 {
if b.configLookup.length() == 0 {
return
}
for i := 0; i < len(this.configs); i++ {
var config = this.configs[i]
for i := 0; i < len(b.configs); i++ {
var config = b.configs[i]
config.SetContext(interpreter.getCachedContext(config.GetContext()))
}
}
func (this *BaseATNConfigSet) AddAll(coll []ATNConfig) bool {
func (b *BaseATNConfigSet) AddAll(coll []ATNConfig) bool {
for i := 0; i < len(coll); i++ {
this.Add(coll[i], nil)
b.Add(coll[i], nil)
}
return false
}
func (this *BaseATNConfigSet) Equals(other interface{}) bool {
if this == other {
func (b *BaseATNConfigSet) Equals(other interface{}) bool {
if b == other {
return true
} else if _, ok := other.(*BaseATNConfigSet); !ok {
return false
@ -206,129 +206,129 @@ func (this *BaseATNConfigSet) Equals(other interface{}) bool {
other2 := other.(*BaseATNConfigSet)
return this.configs != nil &&
// this.configs.equals(other2.configs) && // TODO is this necessary?
this.fullCtx == other2.fullCtx &&
this.uniqueAlt == other2.uniqueAlt &&
this.conflictingAlts == other2.conflictingAlts &&
this.hasSemanticContext == other2.hasSemanticContext &&
this.dipsIntoOuterContext == other2.dipsIntoOuterContext
return b.configs != nil &&
// b.configs.equals(other2.configs) && // TODO is b necessary?
b.fullCtx == other2.fullCtx &&
b.uniqueAlt == other2.uniqueAlt &&
b.conflictingAlts == other2.conflictingAlts &&
b.hasSemanticContext == other2.hasSemanticContext &&
b.dipsIntoOuterContext == other2.dipsIntoOuterContext
}
func (this *BaseATNConfigSet) Hash() string {
if this.readOnly {
if this.cachedHashString == "-1" {
this.cachedHashString = this.hashConfigs()
func (b *BaseATNConfigSet) Hash() string {
if b.readOnly {
if b.cachedHashString == "-1" {
b.cachedHashString = b.hashConfigs()
}
return this.cachedHashString
} else {
return this.hashConfigs()
return b.cachedHashString
}
return b.hashConfigs()
}
func (this *BaseATNConfigSet) hashConfigs() string {
func (b *BaseATNConfigSet) hashConfigs() string {
var s = ""
for _, c := range this.configs {
for _, c := range b.configs {
s += fmt.Sprint(c)
}
return s
}
func (this *BaseATNConfigSet) Length() int {
return len(this.configs)
func (b *BaseATNConfigSet) Length() int {
return len(b.configs)
}
func (this *BaseATNConfigSet) IsEmpty() bool {
return len(this.configs) == 0
func (b *BaseATNConfigSet) IsEmpty() bool {
return len(b.configs) == 0
}
func (this *BaseATNConfigSet) Contains(item ATNConfig) bool {
if this.configLookup == nil {
func (b *BaseATNConfigSet) Contains(item ATNConfig) bool {
if b.configLookup == nil {
panic("This method is not implemented for readonly sets.")
}
return this.configLookup.contains(item)
return b.configLookup.contains(item)
}
func (this *BaseATNConfigSet) ContainsFast(item ATNConfig) bool {
if this.configLookup == nil {
func (b *BaseATNConfigSet) ContainsFast(item ATNConfig) bool {
if b.configLookup == nil {
panic("This method is not implemented for readonly sets.")
}
return this.configLookup.contains(item) // TODO containsFast is not implemented for Set
return b.configLookup.contains(item) // TODO containsFast is not implemented for Set
}
func (this *BaseATNConfigSet) Clear() {
if this.readOnly {
func (b *BaseATNConfigSet) Clear() {
if b.readOnly {
panic("This set is readonly")
}
this.configs = make([]ATNConfig, 0)
this.cachedHashString = "-1"
this.configLookup = NewSet(hashATNConfig, equalATNConfigs)
b.configs = make([]ATNConfig, 0)
b.cachedHashString = "-1"
b.configLookup = NewSet(hashATNConfig, equalATNConfigs)
}
func (this *BaseATNConfigSet) FullContext() bool {
return this.fullCtx
func (b *BaseATNConfigSet) FullContext() bool {
return b.fullCtx
}
func (this *BaseATNConfigSet) GetDipsIntoOuterContext() bool {
return this.dipsIntoOuterContext
func (b *BaseATNConfigSet) GetDipsIntoOuterContext() bool {
return b.dipsIntoOuterContext
}
func (this *BaseATNConfigSet) SetDipsIntoOuterContext(v bool) {
this.dipsIntoOuterContext = v
func (b *BaseATNConfigSet) SetDipsIntoOuterContext(v bool) {
b.dipsIntoOuterContext = v
}
func (this *BaseATNConfigSet) GetUniqueAlt() int {
return this.uniqueAlt
func (b *BaseATNConfigSet) GetUniqueAlt() int {
return b.uniqueAlt
}
func (this *BaseATNConfigSet) SetUniqueAlt(v int) {
this.uniqueAlt = v
func (b *BaseATNConfigSet) SetUniqueAlt(v int) {
b.uniqueAlt = v
}
func (this *BaseATNConfigSet) GetConflictingAlts() *BitSet {
return this.conflictingAlts
func (b *BaseATNConfigSet) GetConflictingAlts() *BitSet {
return b.conflictingAlts
}
func (this *BaseATNConfigSet) SetConflictingAlts(v *BitSet) {
this.conflictingAlts = v
func (b *BaseATNConfigSet) SetConflictingAlts(v *BitSet) {
b.conflictingAlts = v
}
func (this *BaseATNConfigSet) ReadOnly() bool {
return this.readOnly
func (b *BaseATNConfigSet) ReadOnly() bool {
return b.readOnly
}
func (this *BaseATNConfigSet) SetReadOnly(readOnly bool) {
this.readOnly = readOnly
func (b *BaseATNConfigSet) SetReadOnly(readOnly bool) {
b.readOnly = readOnly
if readOnly {
this.configLookup = nil // can't mod, no need for lookup cache
b.configLookup = nil // can't mod, no need for lookup cache
}
}
func (this *BaseATNConfigSet) String() string {
func (b *BaseATNConfigSet) String() string {
s := "["
for i, c := range this.configs {
for i, c := range b.configs {
s += c.String()
if i != len(this.configs)-1 {
if i != len(b.configs)-1 {
s += ", "
}
}
s += "]"
if this.hasSemanticContext {
s += ",hasSemanticContext=" + fmt.Sprint(this.hasSemanticContext)
if b.hasSemanticContext {
s += ",hasSemanticContext=" + fmt.Sprint(b.hasSemanticContext)
}
if this.uniqueAlt != ATNInvalidAltNumber {
s += ",uniqueAlt=" + fmt.Sprint(this.uniqueAlt)
if b.uniqueAlt != ATNInvalidAltNumber {
s += ",uniqueAlt=" + fmt.Sprint(b.uniqueAlt)
}
if this.conflictingAlts != nil {
s += ",conflictingAlts=" + this.conflictingAlts.String()
if b.conflictingAlts != nil {
s += ",conflictingAlts=" + b.conflictingAlts.String()
}
if this.dipsIntoOuterContext {
if b.dipsIntoOuterContext {
s += ",dipsIntoOuterContext"
}
@ -341,12 +341,12 @@ type OrderedATNConfigSet struct {
func NewOrderedATNConfigSet() *OrderedATNConfigSet {
this := new(OrderedATNConfigSet)
o := new(OrderedATNConfigSet)
this.BaseATNConfigSet = NewBaseATNConfigSet(false)
this.configLookup = NewSet(nil, nil)
o.BaseATNConfigSet = NewBaseATNConfigSet(false)
o.configLookup = NewSet(nil, nil)
return this
return o
}
func hashATNConfig(c interface{}) string {

View File

@ -44,11 +44,11 @@ func NewATNDeserializer(options *ATNDeserializationOptions) *ATNDeserializer {
options = ATNDeserializationOptionsdefaultOptions
}
this := new(ATNDeserializer)
a := new(ATNDeserializer)
this.deserializationOptions = options
a.deserializationOptions = options
return this
return a
}
func stringInSlice(a string, list []string) int {
@ -72,40 +72,40 @@ func stringInSlice(a string, list []string) int {
// serialized ATN at or after the feature identified by {@code feature} was
// introduced otherwise, {@code false}.
func (this *ATNDeserializer) isFeatureSupported(feature, actualUuid string) bool {
func (a *ATNDeserializer) isFeatureSupported(feature, actualUUID string) bool {
var idx1 = stringInSlice(feature, SupportedUUIDs)
if idx1 < 0 {
return false
}
var idx2 = stringInSlice(actualUuid, SupportedUUIDs)
var idx2 = stringInSlice(actualUUID, SupportedUUIDs)
return idx2 >= idx1
}
func (this *ATNDeserializer) DeserializeFromUInt16(data []uint16) *ATN {
func (a *ATNDeserializer) DeserializeFromUInt16(data []uint16) *ATN {
this.reset(utf16.Decode(data))
this.checkVersion()
this.checkUUID()
var atn = this.readATN()
this.readStates(atn)
this.readRules(atn)
this.readModes(atn)
var sets = this.readSets(atn)
this.readEdges(atn, sets)
this.readDecisions(atn)
this.readLexerActions(atn)
this.markPrecedenceDecisions(atn)
this.verifyATN(atn)
if this.deserializationOptions.generateRuleBypassTransitions && atn.grammarType == ATNTypeParser {
this.generateRuleBypassTransitions(atn)
a.reset(utf16.Decode(data))
a.checkVersion()
a.checkUUID()
var atn = a.readATN()
a.readStates(atn)
a.readRules(atn)
a.readModes(atn)
var sets = a.readSets(atn)
a.readEdges(atn, sets)
a.readDecisions(atn)
a.readLexerActions(atn)
a.markPrecedenceDecisions(atn)
a.verifyATN(atn)
if a.deserializationOptions.generateRuleBypassTransitions && atn.grammarType == ATNTypeParser {
a.generateRuleBypassTransitions(atn)
// re-verify after modification
this.verifyATN(atn)
a.verifyATN(atn)
}
return atn
}
func (this *ATNDeserializer) reset(data []rune) {
func (a *ATNDeserializer) reset(data []rune) {
temp := make([]rune, len(data))
@ -118,55 +118,55 @@ func (this *ATNDeserializer) reset(data []rune) {
}
}
this.data = temp
this.pos = 0
a.data = temp
a.pos = 0
}
func (this *ATNDeserializer) checkVersion() {
var version = this.readInt()
func (a *ATNDeserializer) checkVersion() {
var version = a.readInt()
if version != SerializedVersion {
panic("Could not deserialize ATN with version " + strconv.Itoa(version) + " (expected " + strconv.Itoa(SerializedVersion) + ").")
}
}
func (this *ATNDeserializer) checkUUID() {
var uuid = this.readUUID()
func (a *ATNDeserializer) checkUUID() {
var uuid = a.readUUID()
if stringInSlice(uuid, SupportedUUIDs) < 0 {
panic("Could not deserialize ATN with UUID: " + uuid + " (expected " + SerializedUUID + " or a legacy UUID).")
}
this.uuid = uuid
a.uuid = uuid
}
func (this *ATNDeserializer) readATN() *ATN {
var grammarType = this.readInt()
var maxTokenType = this.readInt()
func (a *ATNDeserializer) readATN() *ATN {
var grammarType = a.readInt()
var maxTokenType = a.readInt()
return NewATN(grammarType, maxTokenType)
}
func (this *ATNDeserializer) readStates(atn *ATN) {
func (a *ATNDeserializer) readStates(atn *ATN) {
var loopBackStateNumbers = make([]LoopEndStateIntPair, 0)
var endStateNumbers = make([]BlockStartStateIntPair, 0)
var nstates = this.readInt()
var nstates = a.readInt()
for i := 0; i < nstates; i++ {
var stype = this.readInt()
var stype = a.readInt()
// ignore bad type of states
if stype == ATNStateInvalidType {
atn.addState(nil)
continue
}
var ruleIndex = this.readInt()
var ruleIndex = a.readInt()
if ruleIndex == 0xFFFF {
ruleIndex = -1
}
var s = this.stateFactory(stype, ruleIndex)
var s = a.stateFactory(stype, ruleIndex)
if stype == ATNStateLoopEnd {
var loopBackStateNumber = this.readInt()
var loopBackStateNumber = a.readInt()
loopBackStateNumbers = append(loopBackStateNumbers, LoopEndStateIntPair{s.(*LoopEndState), loopBackStateNumber})
} else if s2, ok := s.(BlockStartState); ok {
var endStateNumber = this.readInt()
var endStateNumber = a.readInt()
endStateNumbers = append(endStateNumbers, BlockStartStateIntPair{s2, endStateNumber})
}
atn.addState(s)
@ -183,32 +183,32 @@ func (this *ATNDeserializer) readStates(atn *ATN) {
pair.item0.setEndState(atn.states[pair.item1].(*BlockEndState))
}
var numNonGreedyStates = this.readInt()
var numNonGreedyStates = a.readInt()
for j := 0; j < numNonGreedyStates; j++ {
stateNumber := this.readInt()
stateNumber := a.readInt()
atn.states[stateNumber].(DecisionState).setNonGreedy(true)
}
var numPrecedenceStates = this.readInt()
var numPrecedenceStates = a.readInt()
for j := 0; j < numPrecedenceStates; j++ {
stateNumber := this.readInt()
stateNumber := a.readInt()
atn.states[stateNumber].(*RuleStartState).isPrecedenceRule = true
}
}
func (this *ATNDeserializer) readRules(atn *ATN) {
func (a *ATNDeserializer) readRules(atn *ATN) {
var nrules = this.readInt()
var nrules = a.readInt()
if atn.grammarType == ATNTypeLexer {
atn.ruleToTokenType = make([]int, nrules) // initIntArray(nrules, 0)
}
atn.ruleToStartState = make([]*RuleStartState, nrules) // initIntArray(nrules, 0)
for i := 0; i < nrules; i++ {
var s = this.readInt()
var s = a.readInt()
var startState = atn.states[s].(*RuleStartState)
atn.ruleToStartState[i] = startState
if atn.grammarType == ATNTypeLexer {
var tokenType = this.readInt()
var tokenType = a.readInt()
if tokenType == 0xFFFF {
tokenType = TokenEOF
}
@ -225,45 +225,45 @@ func (this *ATNDeserializer) readRules(atn *ATN) {
}
}
func (this *ATNDeserializer) readModes(atn *ATN) {
var nmodes = this.readInt()
func (a *ATNDeserializer) readModes(atn *ATN) {
var nmodes = a.readInt()
for i := 0; i < nmodes; i++ {
var s = this.readInt()
var s = a.readInt()
atn.modeToStartState = append(atn.modeToStartState, atn.states[s].(*TokensStartState))
}
}
func (this *ATNDeserializer) readSets(atn *ATN) []*IntervalSet {
func (a *ATNDeserializer) readSets(atn *ATN) []*IntervalSet {
var sets = make([]*IntervalSet, 0)
var m = this.readInt()
var m = a.readInt()
for i := 0; i < m; i++ {
var iset = NewIntervalSet()
sets = append(sets, iset)
var n = this.readInt()
var containsEof = this.readInt()
if containsEof != 0 {
var n = a.readInt()
var containsEOF = a.readInt()
if containsEOF != 0 {
iset.addOne(-1)
}
for j := 0; j < n; j++ {
var i1 = this.readInt()
var i2 = this.readInt()
var i1 = a.readInt()
var i2 = a.readInt()
iset.addRange(i1, i2)
}
}
return sets
}
func (this *ATNDeserializer) readEdges(atn *ATN, sets []*IntervalSet) {
func (a *ATNDeserializer) readEdges(atn *ATN, sets []*IntervalSet) {
var nedges = this.readInt()
var nedges = a.readInt()
for i := 0; i < nedges; i++ {
var src = this.readInt()
var trg = this.readInt()
var ttype = this.readInt()
var arg1 = this.readInt()
var arg2 = this.readInt()
var arg3 = this.readInt()
trans := this.edgeFactory(atn, ttype, src, trg, arg1, arg2, arg3, sets)
var src = a.readInt()
var trg = a.readInt()
var ttype = a.readInt()
var arg1 = a.readInt()
var arg2 = a.readInt()
var arg3 = a.readInt()
trans := a.edgeFactory(atn, ttype, src, trg, arg1, arg2, arg3, sets)
var srcState = atn.states[src]
srcState.AddTransition(trans, -1)
}
@ -319,47 +319,47 @@ func (this *ATNDeserializer) readEdges(atn *ATN, sets []*IntervalSet) {
}
}
func (this *ATNDeserializer) readDecisions(atn *ATN) {
var ndecisions = this.readInt()
func (a *ATNDeserializer) readDecisions(atn *ATN) {
var ndecisions = a.readInt()
for i := 0; i < ndecisions; i++ {
var s = this.readInt()
var s = a.readInt()
var decState = atn.states[s].(DecisionState)
atn.DecisionToState = append(atn.DecisionToState, decState)
decState.setDecision(i)
}
}
func (this *ATNDeserializer) readLexerActions(atn *ATN) {
func (a *ATNDeserializer) readLexerActions(atn *ATN) {
if atn.grammarType == ATNTypeLexer {
var count = this.readInt()
var count = a.readInt()
atn.lexerActions = make([]LexerAction, count) // initIntArray(count, nil)
for i := 0; i < count; i++ {
var actionType = this.readInt()
var data1 = this.readInt()
var actionType = a.readInt()
var data1 = a.readInt()
if data1 == 0xFFFF {
data1 = -1
}
var data2 = this.readInt()
var data2 = a.readInt()
if data2 == 0xFFFF {
data2 = -1
}
var lexerAction = this.lexerActionFactory(actionType, data1, data2)
var lexerAction = a.lexerActionFactory(actionType, data1, data2)
atn.lexerActions[i] = lexerAction
}
}
}
func (this *ATNDeserializer) generateRuleBypassTransitions(atn *ATN) {
func (a *ATNDeserializer) generateRuleBypassTransitions(atn *ATN) {
var count = len(atn.ruleToStartState)
for i := 0; i < count; i++ {
atn.ruleToTokenType[i] = atn.maxTokenType + i + 1
}
for i := 0; i < count; i++ {
this.generateRuleBypassTransition(atn, i)
a.generateRuleBypassTransition(atn, i)
}
}
func (this *ATNDeserializer) generateRuleBypassTransition(atn *ATN, idx int) {
func (a *ATNDeserializer) generateRuleBypassTransition(atn *ATN, idx int) {
var bypassStart = NewBasicBlockStartState()
bypassStart.ruleIndex = idx
@ -375,15 +375,15 @@ func (this *ATNDeserializer) generateRuleBypassTransition(atn *ATN, idx int) {
bypassStop.startState = bypassStart
var excludeTransition Transition = nil
var endState ATNState = nil
var excludeTransition Transition
var endState ATNState
if atn.ruleToStartState[idx].isPrecedenceRule {
// wrap from the beginning of the rule to the StarLoopEntryState
endState = nil
for i := 0; i < len(atn.states); i++ {
state := atn.states[i]
if this.stateIsEndStateFor(state, idx) != nil {
if a.stateIsEndStateFor(state, idx) != nil {
endState = state
excludeTransition = state.(*StarLoopEntryState).loopBackState.GetTransitions()[0]
break
@ -429,7 +429,7 @@ func (this *ATNDeserializer) generateRuleBypassTransition(atn *ATN, idx int) {
bypassStart.AddTransition(NewEpsilonTransition(MatchState, -1), -1)
}
func (this *ATNDeserializer) stateIsEndStateFor(state ATNState, idx int) ATNState {
func (a *ATNDeserializer) stateIsEndStateFor(state ATNState, idx int) ATNState {
if state.GetRuleIndex() != idx {
return nil
}
@ -445,9 +445,9 @@ func (this *ATNDeserializer) stateIsEndStateFor(state ATNState, idx int) ATNStat
if maybeLoopEndState.(*LoopEndState).epsilonOnlyTransitions && ok {
return state
} else {
return nil
}
return nil
}
//
@ -457,12 +457,12 @@ func (this *ATNDeserializer) stateIsEndStateFor(state ATNState, idx int) ATNStat
//
// @param atn The ATN.
//
func (this *ATNDeserializer) markPrecedenceDecisions(atn *ATN) {
func (a *ATNDeserializer) markPrecedenceDecisions(atn *ATN) {
for _, state := range atn.states {
if _, ok := state.(*StarLoopEntryState); !ok {
continue
}
// We analyze the ATN to determine if this ATN decision state is the
// We analyze the ATN to determine if a ATN decision state is the
// decision for the closure block that determines whether a
// precedence rule should continue or complete.
//
@ -482,8 +482,8 @@ func (this *ATNDeserializer) markPrecedenceDecisions(atn *ATN) {
}
}
func (this *ATNDeserializer) verifyATN(atn *ATN) {
if !this.deserializationOptions.verifyATN {
func (a *ATNDeserializer) verifyATN(atn *ATN) {
if !a.deserializationOptions.verifyATN {
return
}
// verify assumptions
@ -493,52 +493,52 @@ func (this *ATNDeserializer) verifyATN(atn *ATN) {
if state == nil {
continue
}
this.checkCondition(state.GetEpsilonOnlyTransitions() || len(state.GetTransitions()) <= 1, "")
a.checkCondition(state.GetEpsilonOnlyTransitions() || len(state.GetTransitions()) <= 1, "")
switch s2 := state.(type) {
case *PlusBlockStartState:
this.checkCondition(s2.loopBackState != nil, "")
a.checkCondition(s2.loopBackState != nil, "")
case *StarLoopEntryState:
this.checkCondition(s2.loopBackState != nil, "")
this.checkCondition(len(s2.GetTransitions()) == 2, "")
a.checkCondition(s2.loopBackState != nil, "")
a.checkCondition(len(s2.GetTransitions()) == 2, "")
switch s2 := state.(type) {
case *StarBlockStartState:
_, ok2 := s2.GetTransitions()[1].getTarget().(*LoopEndState)
this.checkCondition(ok2, "")
this.checkCondition(!s2.nonGreedy, "")
a.checkCondition(ok2, "")
a.checkCondition(!s2.nonGreedy, "")
case *LoopEndState:
s3, ok2 := s2.GetTransitions()[1].getTarget().(*StarBlockStartState)
this.checkCondition(ok2, "")
this.checkCondition(s3.nonGreedy, "")
a.checkCondition(ok2, "")
a.checkCondition(s3.nonGreedy, "")
default:
panic("IllegalState")
}
case *StarLoopbackState:
this.checkCondition(len(state.GetTransitions()) == 1, "")
a.checkCondition(len(state.GetTransitions()) == 1, "")
_, ok2 := state.GetTransitions()[0].getTarget().(*StarLoopEntryState)
this.checkCondition(ok2, "")
a.checkCondition(ok2, "")
case *LoopEndState:
this.checkCondition(s2.loopBackState != nil, "")
a.checkCondition(s2.loopBackState != nil, "")
case *RuleStartState:
this.checkCondition(s2.stopState != nil, "")
a.checkCondition(s2.stopState != nil, "")
case *BaseBlockStartState:
this.checkCondition(s2.endState != nil, "")
a.checkCondition(s2.endState != nil, "")
case *BlockEndState:
this.checkCondition(s2.startState != nil, "")
a.checkCondition(s2.startState != nil, "")
case DecisionState:
this.checkCondition(len(s2.GetTransitions()) <= 1 || s2.getDecision() >= 0, "")
a.checkCondition(len(s2.GetTransitions()) <= 1 || s2.getDecision() >= 0, "")
default:
_, ok := s2.(*RuleStopState)
this.checkCondition(len(s2.GetTransitions()) <= 1 || ok, "")
a.checkCondition(len(s2.GetTransitions()) <= 1 || ok, "")
}
}
}
func (this *ATNDeserializer) checkCondition(condition bool, message string) {
func (a *ATNDeserializer) checkCondition(condition bool, message string) {
if !condition {
if message == "" {
message = "IllegalState"
@ -547,16 +547,16 @@ func (this *ATNDeserializer) checkCondition(condition bool, message string) {
}
}
func (this *ATNDeserializer) readInt() int {
v := this.data[this.pos]
this.pos += 1
func (a *ATNDeserializer) readInt() int {
v := a.data[a.pos]
a.pos++
return int(v)
}
//func (this *ATNDeserializer) readLong() int64 {
//func (a *ATNDeserializer) readLong() int64 {
// panic("Not implemented")
// var low = this.readInt32()
// var high = this.readInt32()
// var low = a.readInt32()
// var high = a.readInt32()
// return (low & 0x00000000FFFFFFFF) | (high << int32)
//}
@ -570,10 +570,10 @@ func createByteToHex() []string {
var byteToHex = createByteToHex()
func (this *ATNDeserializer) readUUID() string {
func (a *ATNDeserializer) readUUID() string {
var bb = make([]int, 16)
for i := 7; i >= 0; i-- {
var integer = this.readInt()
var integer = a.readInt()
bb[(2*i)+1] = integer & 0xFF
bb[2*i] = (integer >> 8) & 0xFF
}
@ -587,7 +587,7 @@ func (this *ATNDeserializer) readUUID() string {
byteToHex[bb[14]] + byteToHex[bb[15]]
}
func (this *ATNDeserializer) edgeFactory(atn *ATN, typeIndex, src, trg, arg1, arg2, arg3 int, sets []*IntervalSet) Transition {
func (a *ATNDeserializer) edgeFactory(atn *ATN, typeIndex, src, trg, arg1, arg2, arg3 int, sets []*IntervalSet) Transition {
var target = atn.states[trg]
@ -597,9 +597,8 @@ func (this *ATNDeserializer) edgeFactory(atn *ATN, typeIndex, src, trg, arg1, ar
case TransitionRANGE:
if arg3 != 0 {
return NewRangeTransition(target, TokenEOF, arg2)
} else {
return NewRangeTransition(target, arg1, arg2)
}
return NewRangeTransition(target, arg1, arg2)
case TransitionRULE:
return NewRuleTransition(atn.states[arg1], arg2, arg3, target)
case TransitionPREDICATE:
@ -609,14 +608,13 @@ func (this *ATNDeserializer) edgeFactory(atn *ATN, typeIndex, src, trg, arg1, ar
case TransitionATOM:
if arg3 != 0 {
return NewAtomTransition(target, TokenEOF)
} else {
return NewAtomTransition(target, arg1)
}
return NewAtomTransition(target, arg1)
case TransitionACTION:
return NewActionTransition(target, arg1, arg2, arg3 != 0)
case TransitionSET:
return NewSetTransition(target, sets[arg1])
case TransitionNOT_SET:
case TransitionNOTSET:
return NewNotSetTransition(target, sets[arg1])
case TransitionWILDCARD:
return NewWildcardTransition(target)
@ -625,7 +623,7 @@ func (this *ATNDeserializer) edgeFactory(atn *ATN, typeIndex, src, trg, arg1, ar
panic("The specified transition type is not valid.")
}
func (this *ATNDeserializer) stateFactory(typeIndex, ruleIndex int) ATNState {
func (a *ATNDeserializer) stateFactory(typeIndex, ruleIndex int) ATNState {
var s ATNState
switch typeIndex {
@ -664,7 +662,7 @@ func (this *ATNDeserializer) stateFactory(typeIndex, ruleIndex int) ATNState {
return s
}
func (this *ATNDeserializer) lexerActionFactory(typeIndex, data1, data2 int) LexerAction {
func (a *ATNDeserializer) lexerActionFactory(typeIndex, data1, data2 int) LexerAction {
switch typeIndex {
case LexerActionTypeChannel:
return NewLexerChannelAction(data1)
@ -686,5 +684,4 @@ func (this *ATNDeserializer) lexerActionFactory(typeIndex, data1, data2 int) Lex
message := fmt.Sprintf("The specified lexer action typeIndex%d is not valid.", typeIndex)
panic(message)
}
return nil
}

View File

@ -7,20 +7,20 @@ type BaseATNSimulator struct {
func NewBaseATNSimulator(atn *ATN, sharedContextCache *PredictionContextCache) *BaseATNSimulator {
this := new(BaseATNSimulator)
b := new(BaseATNSimulator)
this.atn = atn
this.sharedContextCache = sharedContextCache
b.atn = atn
b.sharedContextCache = sharedContextCache
return this
return b
}
var ATNSimulatorError = NewDFAState(0x7FFFFFFF, NewBaseATNConfigSet(false))
func (this *BaseATNSimulator) getCachedContext(context PredictionContext) PredictionContext {
if this.sharedContextCache == nil {
func (b *BaseATNSimulator) getCachedContext(context PredictionContext) PredictionContext {
if b.sharedContextCache == nil {
return context
}
var visited = make(map[PredictionContext]PredictionContext)
return getCachedBasePredictionContext(context, this.sharedContextCache, visited)
return getCachedBasePredictionContext(context, b.sharedContextCache, visited)
}

View File

@ -125,33 +125,33 @@ func (as *BaseATNState) SetNextTokenWithinRule(v *IntervalSet) {
as.NextTokenWithinRule = v
}
func (this *BaseATNState) String() string {
return strconv.Itoa(this.stateNumber)
func (as *BaseATNState) String() string {
return strconv.Itoa(as.stateNumber)
}
func (this *BaseATNState) equals(other interface{}) bool {
func (as *BaseATNState) equals(other interface{}) bool {
if ot, ok := other.(ATNState); ok {
return this.stateNumber == ot.GetStateNumber()
} else {
return false
return as.stateNumber == ot.GetStateNumber()
}
}
func (this *BaseATNState) isNonGreedyExitState() bool {
return false
}
func (this *BaseATNState) AddTransition(trans Transition, index int) {
if len(this.transitions) == 0 {
this.epsilonOnlyTransitions = trans.getIsEpsilon()
} else if this.epsilonOnlyTransitions != trans.getIsEpsilon() {
this.epsilonOnlyTransitions = false
func (as *BaseATNState) isNonGreedyExitState() bool {
return false
}
func (as *BaseATNState) AddTransition(trans Transition, index int) {
if len(as.transitions) == 0 {
as.epsilonOnlyTransitions = trans.getIsEpsilon()
} else if as.epsilonOnlyTransitions != trans.getIsEpsilon() {
as.epsilonOnlyTransitions = false
}
if index == -1 {
this.transitions = append(this.transitions, trans)
as.transitions = append(as.transitions, trans)
} else {
this.transitions = append(this.transitions[:index], append([]Transition{trans}, this.transitions[index:]...)...)
// this.transitions.splice(index, 1, trans)
as.transitions = append(as.transitions[:index], append([]Transition{trans}, as.transitions[index:]...)...)
// as.transitions.splice(index, 1, trans)
}
}
@ -160,11 +160,11 @@ type BasicState struct {
}
func NewBasicState() *BasicState {
this := new(BasicState)
this.BaseATNState = NewBaseATNState()
b := new(BasicState)
b.BaseATNState = NewBaseATNState()
this.stateType = ATNStateBasic
return this
b.stateType = ATNStateBasic
return b
}
type DecisionState interface {
@ -186,14 +186,14 @@ type BaseDecisionState struct {
func NewBaseDecisionState() *BaseDecisionState {
this := new(BaseDecisionState)
b := new(BaseDecisionState)
this.BaseATNState = NewBaseATNState()
b.BaseATNState = NewBaseATNState()
this.decision = -1
this.nonGreedy = false
b.decision = -1
b.nonGreedy = false
return this
return b
}
func (s *BaseDecisionState) getDecision() int {
@ -228,12 +228,12 @@ type BaseBlockStartState struct {
func NewBlockStartState() *BaseBlockStartState {
this := new(BaseBlockStartState)
b := new(BaseBlockStartState)
this.BaseDecisionState = NewBaseDecisionState()
this.endState = nil
b.BaseDecisionState = NewBaseDecisionState()
b.endState = nil
return this
return b
}
func (s *BaseBlockStartState) getEndState() *BlockEndState {
@ -250,12 +250,12 @@ type BasicBlockStartState struct {
func NewBasicBlockStartState() *BasicBlockStartState {
this := new(BasicBlockStartState)
b := new(BasicBlockStartState)
this.BaseBlockStartState = NewBlockStartState()
b.BaseBlockStartState = NewBlockStartState()
this.stateType = ATNStateBlockStart
return this
b.stateType = ATNStateBlockStart
return b
}
// Terminal node of a simple {@code (a|b|c)} block.
@ -267,13 +267,13 @@ type BlockEndState struct {
func NewBlockEndState() *BlockEndState {
this := new(BlockEndState)
b := new(BlockEndState)
this.BaseATNState = NewBaseATNState()
this.stateType = ATNStateBlockEnd
this.startState = nil
b.BaseATNState = NewBaseATNState()
b.stateType = ATNStateBlockEnd
b.startState = nil
return this
return b
}
// The last node in the ATN for a rule, unless that rule is the start symbol.
@ -286,11 +286,11 @@ type RuleStopState struct {
}
func NewRuleStopState() *RuleStopState {
this := new(RuleStopState)
r := new(RuleStopState)
this.BaseATNState = NewBaseATNState()
this.stateType = ATNStateRuleStop
return this
r.BaseATNState = NewBaseATNState()
r.stateType = ATNStateRuleStop
return r
}
type RuleStartState struct {
@ -302,14 +302,14 @@ type RuleStartState struct {
func NewRuleStartState() *RuleStartState {
this := new(RuleStartState)
r := new(RuleStartState)
this.BaseATNState = NewBaseATNState()
this.stateType = ATNStateRuleStart
this.stopState = nil
this.isPrecedenceRule = false
r.BaseATNState = NewBaseATNState()
r.stateType = ATNStateRuleStart
r.stopState = nil
r.isPrecedenceRule = false
return this
return r
}
// Decision state for {@code A+} and {@code (A|B)+}. It has two transitions:
@ -321,12 +321,12 @@ type PlusLoopbackState struct {
func NewPlusLoopbackState() *PlusLoopbackState {
this := new(PlusLoopbackState)
p := new(PlusLoopbackState)
this.BaseDecisionState = NewBaseDecisionState()
p.BaseDecisionState = NewBaseDecisionState()
this.stateType = ATNStatePlusLoopBack
return this
p.stateType = ATNStatePlusLoopBack
return p
}
// Start of {@code (A|B|...)+} loop. Technically a decision state, but
@ -342,14 +342,14 @@ type PlusBlockStartState struct {
func NewPlusBlockStartState() *PlusBlockStartState {
this := new(PlusBlockStartState)
p := new(PlusBlockStartState)
this.BaseBlockStartState = NewBlockStartState()
p.BaseBlockStartState = NewBlockStartState()
this.stateType = ATNStatePlusBlockStart
this.loopBackState = nil
p.stateType = ATNStatePlusBlockStart
p.loopBackState = nil
return this
return p
}
// The block that begins a closure loop.
@ -359,13 +359,13 @@ type StarBlockStartState struct {
func NewStarBlockStartState() *StarBlockStartState {
this := new(StarBlockStartState)
s := new(StarBlockStartState)
this.BaseBlockStartState = NewBlockStartState()
s.BaseBlockStartState = NewBlockStartState()
this.stateType = ATNStateStarBlockStart
s.stateType = ATNStateStarBlockStart
return this
return s
}
type StarLoopbackState struct {
@ -374,12 +374,12 @@ type StarLoopbackState struct {
func NewStarLoopbackState() *StarLoopbackState {
this := new(StarLoopbackState)
s := new(StarLoopbackState)
this.BaseATNState = NewBaseATNState()
s.BaseATNState = NewBaseATNState()
this.stateType = ATNStateStarLoopBack
return this
s.stateType = ATNStateStarLoopBack
return s
}
type StarLoopEntryState struct {
@ -391,17 +391,17 @@ type StarLoopEntryState struct {
func NewStarLoopEntryState() *StarLoopEntryState {
this := new(StarLoopEntryState)
s := new(StarLoopEntryState)
this.BaseDecisionState = NewBaseDecisionState()
s.BaseDecisionState = NewBaseDecisionState()
this.stateType = ATNStateStarLoopEntry
this.loopBackState = nil
s.stateType = ATNStateStarLoopEntry
s.loopBackState = nil
// Indicates whether this state can benefit from a precedence DFA during SLL decision making.
this.precedenceRuleDecision = false
// Indicates whether s state can benefit from a precedence DFA during SLL decision making.
s.precedenceRuleDecision = false
return this
return s
}
// Mark the end of a * or + loop.
@ -413,14 +413,14 @@ type LoopEndState struct {
func NewLoopEndState() *LoopEndState {
this := new(LoopEndState)
l := new(LoopEndState)
this.BaseATNState = NewBaseATNState()
l.BaseATNState = NewBaseATNState()
this.stateType = ATNStateLoopEnd
this.loopBackState = nil
l.stateType = ATNStateLoopEnd
l.loopBackState = nil
return this
return l
}
// The Tokens rule start state linking to each lexer rule start state */
@ -430,10 +430,10 @@ type TokensStartState struct {
func NewTokensStartState() *TokensStartState {
this := new(TokensStartState)
t := new(TokensStartState)
this.BaseDecisionState = NewBaseDecisionState()
t.BaseDecisionState = NewBaseDecisionState()
this.stateType = ATNStateTokenStart
return this
t.stateType = ATNStateTokenStart
return t
}

View File

@ -47,7 +47,7 @@ func NewCommonTokenFactory(copyText bool) *CommonTokenFactory {
//
var CommonTokenFactoryDEFAULT = NewCommonTokenFactory(false)
func (this *CommonTokenFactory) Create(source *TokenSourceCharStreamPair, ttype int, text string, channel, start, stop, line, column int) Token {
func (c *CommonTokenFactory) Create(source *TokenSourceCharStreamPair, ttype int, text string, channel, start, stop, line, column int) Token {
if PortDebug {
fmt.Println("Token factory creating: " + text)
@ -58,7 +58,7 @@ func (this *CommonTokenFactory) Create(source *TokenSourceCharStreamPair, ttype
t.column = column
if text != "" {
t.SetText(text)
} else if this.copyText && source.charStream != nil {
} else if c.copyText && source.charStream != nil {
t.SetText(source.charStream.GetTextFromInterval(NewInterval(start, stop)))
}
@ -66,7 +66,7 @@ func (this *CommonTokenFactory) Create(source *TokenSourceCharStreamPair, ttype
}
func (this *CommonTokenFactory) createThin(ttype int, text string) Token {
func (c *CommonTokenFactory) createThin(ttype int, text string) Token {
if PortDebug {
fmt.Println("Token factory creating: " + text)

View File

@ -70,59 +70,59 @@ func NewCommonTokenStream(lexer Lexer, channel int) *CommonTokenStream {
return ts
}
func (bt *CommonTokenStream) GetAllTokens() []Token {
return bt.tokens
func (c *CommonTokenStream) GetAllTokens() []Token {
return c.tokens
}
func (bt *CommonTokenStream) Mark() int {
func (c *CommonTokenStream) Mark() int {
return 0
}
func (bt *CommonTokenStream) Release(marker int) {
func (c *CommonTokenStream) Release(marker int) {
// no resources to release
}
func (bt *CommonTokenStream) reset() {
bt.Seek(0)
func (c *CommonTokenStream) reset() {
c.Seek(0)
}
func (bt *CommonTokenStream) Seek(index int) {
bt.lazyInit()
bt.index = bt.adjustSeekIndex(index)
func (c *CommonTokenStream) Seek(index int) {
c.lazyInit()
c.index = c.adjustSeekIndex(index)
}
func (bt *CommonTokenStream) Get(index int) Token {
bt.lazyInit()
return bt.tokens[index]
func (c *CommonTokenStream) Get(index int) Token {
c.lazyInit()
return c.tokens[index]
}
func (bt *CommonTokenStream) Consume() {
var SkipEofCheck = false
if bt.index >= 0 {
if bt.fetchedEOF {
func (c *CommonTokenStream) Consume() {
var SkipEOFCheck = false
if c.index >= 0 {
if c.fetchedEOF {
// the last token in tokens is EOF. Skip check if p indexes any
// fetched token except the last.
SkipEofCheck = bt.index < len(bt.tokens)-1
SkipEOFCheck = c.index < len(c.tokens)-1
} else {
// no EOF token in tokens. Skip check if p indexes a fetched token.
SkipEofCheck = bt.index < len(bt.tokens)
SkipEOFCheck = c.index < len(c.tokens)
}
} else {
// not yet initialized
SkipEofCheck = false
SkipEOFCheck = false
}
if PortDebug {
fmt.Println("Consume 1")
}
if !SkipEofCheck && bt.LA(1) == TokenEOF {
if !SkipEOFCheck && c.LA(1) == TokenEOF {
panic("cannot consume EOF")
}
if bt.Sync(bt.index + 1) {
if c.Sync(c.index + 1) {
if PortDebug {
fmt.Println("Consume 2")
}
bt.index = bt.adjustSeekIndex(bt.index + 1)
c.index = c.adjustSeekIndex(c.index + 1)
}
}
@ -132,10 +132,10 @@ func (bt *CommonTokenStream) Consume() {
// {@code false}.
// @see //Get(int i)
// /
func (bt *CommonTokenStream) Sync(i int) bool {
var n = i - len(bt.tokens) + 1 // how many more elements we need?
func (c *CommonTokenStream) Sync(i int) bool {
var n = i - len(c.tokens) + 1 // how many more elements we need?
if n > 0 {
var fetched = bt.fetch(n)
var fetched = c.fetch(n)
if PortDebug {
fmt.Println("Sync done")
}
@ -148,20 +148,20 @@ func (bt *CommonTokenStream) Sync(i int) bool {
//
// @return The actual number of elements added to the buffer.
// /
func (bt *CommonTokenStream) fetch(n int) int {
if bt.fetchedEOF {
func (c *CommonTokenStream) fetch(n int) int {
if c.fetchedEOF {
return 0
}
for i := 0; i < n; i++ {
var t Token = bt.tokenSource.NextToken()
var t = c.tokenSource.NextToken()
if PortDebug {
fmt.Println("fetch loop")
}
t.SetTokenIndex(len(bt.tokens))
bt.tokens = append(bt.tokens, t)
t.SetTokenIndex(len(c.tokens))
c.tokens = append(c.tokens, t)
if t.GetTokenType() == TokenEOF {
bt.fetchedEOF = true
c.fetchedEOF = true
return i + 1
}
}
@ -173,18 +173,18 @@ func (bt *CommonTokenStream) fetch(n int) int {
}
// Get all tokens from start..stop inclusively///
func (bt *CommonTokenStream) GetTokens(start int, stop int, types *IntervalSet) []Token {
func (c *CommonTokenStream) GetTokens(start int, stop int, types *IntervalSet) []Token {
if start < 0 || stop < 0 {
return nil
}
bt.lazyInit()
c.lazyInit()
var subset = make([]Token, 0)
if stop >= len(bt.tokens) {
stop = len(bt.tokens) - 1
if stop >= len(c.tokens) {
stop = len(c.tokens) - 1
}
for i := start; i < stop; i++ {
var t = bt.tokens[i]
var t = c.tokens[i]
if t.GetTokenType() == TokenEOF {
break
}
@ -195,49 +195,49 @@ func (bt *CommonTokenStream) GetTokens(start int, stop int, types *IntervalSet)
return subset
}
func (bt *CommonTokenStream) LA(i int) int {
return bt.LT(i).GetTokenType()
func (c *CommonTokenStream) LA(i int) int {
return c.LT(i).GetTokenType()
}
func (bt *CommonTokenStream) lazyInit() {
if bt.index == -1 {
bt.setup()
func (c *CommonTokenStream) lazyInit() {
if c.index == -1 {
c.setup()
}
}
func (bt *CommonTokenStream) setup() {
bt.Sync(0)
bt.index = bt.adjustSeekIndex(0)
func (c *CommonTokenStream) setup() {
c.Sync(0)
c.index = c.adjustSeekIndex(0)
}
func (bt *CommonTokenStream) GetTokenSource() TokenSource {
return bt.tokenSource
func (c *CommonTokenStream) GetTokenSource() TokenSource {
return c.tokenSource
}
// Reset bt token stream by setting its token source.///
func (bt *CommonTokenStream) SetTokenSource(tokenSource TokenSource) {
bt.tokenSource = tokenSource
bt.tokens = make([]Token, 0)
bt.index = -1
// Reset c token stream by setting its token source.///
func (c *CommonTokenStream) SetTokenSource(tokenSource TokenSource) {
c.tokenSource = tokenSource
c.tokens = make([]Token, 0)
c.index = -1
}
// Given a starting index, return the index of the next token on channel.
// Return i if tokens[i] is on channel. Return -1 if there are no tokens
// on channel between i and EOF.
// /
func (bt *CommonTokenStream) NextTokenOnChannel(i, channel int) int {
bt.Sync(i)
if i >= len(bt.tokens) {
func (c *CommonTokenStream) NextTokenOnChannel(i, channel int) int {
c.Sync(i)
if i >= len(c.tokens) {
return -1
}
var token = bt.tokens[i]
for token.GetChannel() != bt.channel {
var token = c.tokens[i]
for token.GetChannel() != c.channel {
if token.GetTokenType() == TokenEOF {
return -1
}
i += 1
bt.Sync(i)
token = bt.tokens[i]
i++
c.Sync(i)
token = c.tokens[i]
}
return i
}
@ -245,9 +245,9 @@ func (bt *CommonTokenStream) NextTokenOnChannel(i, channel int) int {
// Given a starting index, return the index of the previous token on channel.
// Return i if tokens[i] is on channel. Return -1 if there are no tokens
// on channel between i and 0.
func (bt *CommonTokenStream) previousTokenOnChannel(i, channel int) int {
for i >= 0 && bt.tokens[i].GetChannel() != channel {
i -= 1
func (c *CommonTokenStream) previousTokenOnChannel(i, channel int) int {
for i >= 0 && c.tokens[i].GetChannel() != channel {
i--
}
return i
}
@ -255,45 +255,45 @@ func (bt *CommonTokenStream) previousTokenOnChannel(i, channel int) int {
// Collect all tokens on specified channel to the right of
// the current token up until we see a token on DEFAULT_TOKEN_CHANNEL or
// EOF. If channel is -1, find any non default channel token.
func (bt *CommonTokenStream) getHiddenTokensToRight(tokenIndex, channel int) []Token {
bt.lazyInit()
if tokenIndex < 0 || tokenIndex >= len(bt.tokens) {
panic(strconv.Itoa(tokenIndex) + " not in 0.." + strconv.Itoa(len(bt.tokens)-1))
func (c *CommonTokenStream) getHiddenTokensToRight(tokenIndex, channel int) []Token {
c.lazyInit()
if tokenIndex < 0 || tokenIndex >= len(c.tokens) {
panic(strconv.Itoa(tokenIndex) + " not in 0.." + strconv.Itoa(len(c.tokens)-1))
}
var nextOnChannel = bt.NextTokenOnChannel(tokenIndex+1, LexerDefaultTokenChannel)
var from_ = tokenIndex + 1
var nextOnChannel = c.NextTokenOnChannel(tokenIndex+1, LexerDefaultTokenChannel)
var from = tokenIndex + 1
// if none onchannel to right, nextOnChannel=-1 so set to = last token
var to int
if nextOnChannel == -1 {
to = len(bt.tokens) - 1
to = len(c.tokens) - 1
} else {
to = nextOnChannel
}
return bt.filterForChannel(from_, to, channel)
return c.filterForChannel(from, to, channel)
}
// Collect all tokens on specified channel to the left of
// the current token up until we see a token on DEFAULT_TOKEN_CHANNEL.
// If channel is -1, find any non default channel token.
func (bt *CommonTokenStream) getHiddenTokensToLeft(tokenIndex, channel int) []Token {
bt.lazyInit()
if tokenIndex < 0 || tokenIndex >= len(bt.tokens) {
panic(strconv.Itoa(tokenIndex) + " not in 0.." + strconv.Itoa(len(bt.tokens)-1))
func (c *CommonTokenStream) getHiddenTokensToLeft(tokenIndex, channel int) []Token {
c.lazyInit()
if tokenIndex < 0 || tokenIndex >= len(c.tokens) {
panic(strconv.Itoa(tokenIndex) + " not in 0.." + strconv.Itoa(len(c.tokens)-1))
}
var prevOnChannel = bt.previousTokenOnChannel(tokenIndex-1, LexerDefaultTokenChannel)
var prevOnChannel = c.previousTokenOnChannel(tokenIndex-1, LexerDefaultTokenChannel)
if prevOnChannel == tokenIndex-1 {
return nil
}
// if none on channel to left, prevOnChannel=-1 then from=0
var from_ = prevOnChannel + 1
var from = prevOnChannel + 1
var to = tokenIndex - 1
return bt.filterForChannel(from_, to, channel)
return c.filterForChannel(from, to, channel)
}
func (bt *CommonTokenStream) filterForChannel(left, right, channel int) []Token {
func (c *CommonTokenStream) filterForChannel(left, right, channel int) []Token {
var hidden = make([]Token, 0)
for i := left; i < right+1; i++ {
var t = bt.tokens[i]
var t = c.tokens[i]
if channel == -1 {
if t.GetChannel() != LexerDefaultTokenChannel {
hidden = append(hidden, t)
@ -308,40 +308,40 @@ func (bt *CommonTokenStream) filterForChannel(left, right, channel int) []Token
return hidden
}
func (bt *CommonTokenStream) GetSourceName() string {
return bt.tokenSource.GetSourceName()
func (c *CommonTokenStream) GetSourceName() string {
return c.tokenSource.GetSourceName()
}
func (bt *CommonTokenStream) Size() int {
return len(bt.tokens)
func (c *CommonTokenStream) Size() int {
return len(c.tokens)
}
func (bt *CommonTokenStream) Index() int {
return bt.index
func (c *CommonTokenStream) Index() int {
return c.index
}
func (bt *CommonTokenStream) GetAllText() string {
return bt.GetTextFromInterval(nil)
func (c *CommonTokenStream) GetAllText() string {
return c.GetTextFromInterval(nil)
}
func (bt *CommonTokenStream) GetTextFromTokens(start, end Token) string {
func (c *CommonTokenStream) GetTextFromTokens(start, end Token) string {
if start == nil || end == nil {
return ""
}
return bt.GetTextFromInterval(NewInterval(start.GetTokenIndex(), end.GetTokenIndex()))
return c.GetTextFromInterval(NewInterval(start.GetTokenIndex(), end.GetTokenIndex()))
}
func (bt *CommonTokenStream) GetTextFromRuleContext(interval RuleContext) string {
return bt.GetTextFromInterval(interval.GetSourceInterval())
func (c *CommonTokenStream) GetTextFromRuleContext(interval RuleContext) string {
return c.GetTextFromInterval(interval.GetSourceInterval())
}
func (bt *CommonTokenStream) GetTextFromInterval(interval *Interval) string {
func (c *CommonTokenStream) GetTextFromInterval(interval *Interval) string {
bt.lazyInit()
bt.Fill()
c.lazyInit()
c.Fill()
if interval == nil {
interval = NewInterval(0, len(bt.tokens)-1)
interval = NewInterval(0, len(c.tokens)-1)
}
var start = interval.start
@ -349,13 +349,13 @@ func (bt *CommonTokenStream) GetTextFromInterval(interval *Interval) string {
if start < 0 || stop < 0 {
return ""
}
if stop >= len(bt.tokens) {
stop = len(bt.tokens) - 1
if stop >= len(c.tokens) {
stop = len(c.tokens) - 1
}
var s = ""
for i := start; i < stop+1; i++ {
var t = bt.tokens[i]
var t = c.tokens[i]
if t.GetTokenType() == TokenEOF {
break
}
@ -366,65 +366,65 @@ func (bt *CommonTokenStream) GetTextFromInterval(interval *Interval) string {
}
// Get all tokens from lexer until EOF///
func (bt *CommonTokenStream) Fill() {
bt.lazyInit()
for bt.fetch(1000) == 1000 {
func (c *CommonTokenStream) Fill() {
c.lazyInit()
for c.fetch(1000) == 1000 {
continue
}
}
func (ts *CommonTokenStream) adjustSeekIndex(i int) int {
return ts.NextTokenOnChannel(i, ts.channel)
func (c *CommonTokenStream) adjustSeekIndex(i int) int {
return c.NextTokenOnChannel(i, c.channel)
}
func (ts *CommonTokenStream) LB(k int) Token {
func (c *CommonTokenStream) LB(k int) Token {
if k == 0 || ts.index-k < 0 {
if k == 0 || c.index-k < 0 {
return nil
}
var i = ts.index
var i = c.index
var n = 1
// find k good tokens looking backwards
for n <= k {
// Skip off-channel tokens
i = ts.previousTokenOnChannel(i-1, ts.channel)
n += 1
i = c.previousTokenOnChannel(i-1, c.channel)
n++
}
if i < 0 {
return nil
}
return ts.tokens[i]
return c.tokens[i]
}
func (ts *CommonTokenStream) LT(k int) Token {
ts.lazyInit()
func (c *CommonTokenStream) LT(k int) Token {
c.lazyInit()
if k == 0 {
return nil
}
if k < 0 {
return ts.LB(-k)
return c.LB(-k)
}
var i = ts.index
var i = c.index
var n = 1 // we know tokens[pos] is a good one
// find k good tokens
for n < k {
// Skip off-channel tokens, but make sure to not look past EOF
if ts.Sync(i + 1) {
i = ts.NextTokenOnChannel(i+1, ts.channel)
if c.Sync(i + 1) {
i = c.NextTokenOnChannel(i+1, c.channel)
}
n += 1
n++
}
return ts.tokens[i]
return c.tokens[i]
}
// Count EOF just once.///
func (ts *CommonTokenStream) getNumberOfOnChannelTokens() int {
func (c *CommonTokenStream) getNumberOfOnChannelTokens() int {
var n = 0
ts.Fill()
for i := 0; i < len(ts.tokens); i++ {
var t = ts.tokens[i]
if t.GetChannel() == ts.channel {
n += 1
c.Fill()
for i := 0; i < len(c.tokens); i++ {
var t = c.tokens[i]
if t.GetChannel() == c.channel {
n++
}
if t.GetTokenType() == TokenEOF {
break

View File

@ -5,28 +5,28 @@ import "sort"
type DFA struct {
atnStartState DecisionState
decision int
_states map[string]*DFAState
states map[string]*DFAState
s0 *DFAState
precedenceDfa bool
}
func NewDFA(atnStartState DecisionState, decision int) *DFA {
this := new(DFA)
d := new(DFA)
// From which ATN state did we create this DFA?
this.atnStartState = atnStartState
this.decision = decision
// From which ATN state did we create d DFA?
d.atnStartState = atnStartState
d.decision = decision
// A set of all DFA states. Use {@link Map} so we can get old state back
// ({@link Set} only allows you to see if it's there).
this._states = make(map[string]*DFAState)
this.s0 = nil
// {@code true} if this DFA is for a precedence decision otherwise,
d.states = make(map[string]*DFAState)
d.s0 = nil
// {@code true} if d DFA is for a precedence decision otherwise,
// {@code false}. This is the backing field for {@link //isPrecedenceDfa},
// {@link //setPrecedenceDfa}.
this.precedenceDfa = false
d.precedenceDfa = false
return this
return d
}
// Get the start state for a specific precedence value.
@ -35,18 +35,18 @@ func NewDFA(atnStartState DecisionState, decision int) *DFA {
// @return The start state corresponding to the specified precedence, or
// {@code nil} if no start state exists for the specified precedence.
//
// @panics IllegalStateException if this is not a precedence DFA.
// @panics IllegalStateException if d is not a precedence DFA.
// @see //isPrecedenceDfa()
func (this *DFA) getPrecedenceStartState(precedence int) *DFAState {
if !(this.precedenceDfa) {
func (d *DFA) getPrecedenceStartState(precedence int) *DFAState {
if !(d.precedenceDfa) {
panic("Only precedence DFAs may contain a precedence start state.")
}
// s0.edges is never nil for a precedence DFA
if precedence < 0 || precedence >= len(this.s0.edges) {
if precedence < 0 || precedence >= len(d.s0.edges) {
return nil
}
return this.s0.edges[precedence]
return d.s0.edges[precedence]
}
// Set the start state for a specific precedence value.
@ -55,11 +55,11 @@ func (this *DFA) getPrecedenceStartState(precedence int) *DFAState {
// @param startState The start state corresponding to the specified
// precedence.
//
// @panics IllegalStateException if this is not a precedence DFA.
// @panics IllegalStateException if d is not a precedence DFA.
// @see //isPrecedenceDfa()
//
func (this *DFA) setPrecedenceStartState(precedence int, startState *DFAState) {
if !(this.precedenceDfa) {
func (d *DFA) setPrecedenceStartState(precedence int, startState *DFAState) {
if !(d.precedenceDfa) {
panic("Only precedence DFAs may contain a precedence start state.")
}
if precedence < 0 {
@ -71,16 +71,16 @@ func (this *DFA) setPrecedenceStartState(precedence int, startState *DFAState) {
// s0.edges is never nil for a precedence DFA
// s0.edges is never null for a precedence DFA
if (precedence >= len(this.s0.edges)) {
if precedence >= len(d.s0.edges) {
// enlarge the slice
this.s0.edges = append( this.s0.edges, make([]*DFAState, precedence + 1 - len(this.s0.edges))...)
d.s0.edges = append(d.s0.edges, make([]*DFAState, precedence+1-len(d.s0.edges))...)
}
this.s0.edges[precedence] = startState
d.s0.edges[precedence] = startState
}
//
// Sets whether this is a precedence DFA. If the specified value differs
// Sets whether d is a precedence DFA. If the specified value differs
// from the current DFA configuration, the following actions are taken
// otherwise no changes are made to the current DFA.
//
@ -93,27 +93,27 @@ func (this *DFA) setPrecedenceStartState(precedence int, startState *DFAState) {
// <li>The {@link //precedenceDfa} field is updated</li>
// </ul>
//
// @param precedenceDfa {@code true} if this is a precedence DFA otherwise,
// @param precedenceDfa {@code true} if d is a precedence DFA otherwise,
// {@code false}
func (this *DFA) setPrecedenceDfa(precedenceDfa bool) {
if this.precedenceDfa != precedenceDfa {
this._states = make(map[string]*DFAState)
func (d *DFA) setPrecedenceDfa(precedenceDfa bool) {
if d.precedenceDfa != precedenceDfa {
d.states = make(map[string]*DFAState)
if precedenceDfa {
var precedenceState = NewDFAState(-1, NewBaseATNConfigSet(false))
precedenceState.edges = make([]*DFAState, 0)
precedenceState.isAcceptState = false
precedenceState.requiresFullContext = false
this.s0 = precedenceState
d.s0 = precedenceState
} else {
this.s0 = nil
d.s0 = nil
}
this.precedenceDfa = precedenceDfa
d.precedenceDfa = precedenceDfa
}
}
func (this *DFA) GetStates() map[string]*DFAState {
return this._states
func (d *DFA) GetStates() map[string]*DFAState {
return d.states
}
type DFAStateList []*DFAState
@ -122,13 +122,13 @@ func (a DFAStateList) Len() int { return len(a) }
func (a DFAStateList) Swap(i, j int) { a[i], a[j] = a[j], a[i] }
func (a DFAStateList) Less(i, j int) bool { return a[i].stateNumber < a[j].stateNumber }
// Return a list of all states in this DFA, ordered by state number.
func (this *DFA) sortedStates() []*DFAState {
// Return a list of all states in d DFA, ordered by state number.
func (d *DFA) sortedStates() []*DFAState {
// extract the values
vs := make([]*DFAState, len(this._states))
vs := make([]*DFAState, len(d.states))
i := 0
for _, v := range this._states {
for _, v := range d.states {
vs[i] = v
i++
}
@ -137,18 +137,18 @@ func (this *DFA) sortedStates() []*DFAState {
return vs
}
func (this *DFA) String(literalNames []string, symbolicNames []string) string {
if this.s0 == nil {
func (d *DFA) String(literalNames []string, symbolicNames []string) string {
if d.s0 == nil {
return ""
}
var serializer = NewDFASerializer(this, literalNames, symbolicNames)
var serializer = NewDFASerializer(d, literalNames, symbolicNames)
return serializer.String()
}
func (this *DFA) ToLexerString() string {
if this.s0 == nil {
func (d *DFA) ToLexerString() string {
if d.s0 == nil {
return ""
}
var serializer = NewLexerDFASerializer(this)
var serializer = NewLexerDFASerializer(d)
return serializer.String()
}

View File

@ -22,34 +22,34 @@ func NewDFASerializer(dfa *DFA, literalNames, symbolicNames []string) *DFASerial
symbolicNames = make([]string, 0)
}
this := new(DFASerializer)
d := new(DFASerializer)
this.dfa = dfa
this.literalNames = literalNames
this.symbolicNames = symbolicNames
d.dfa = dfa
d.literalNames = literalNames
d.symbolicNames = symbolicNames
return this
return d
}
func (this *DFASerializer) String() string {
func (d *DFASerializer) String() string {
if this.dfa.s0 == nil {
if d.dfa.s0 == nil {
return ""
}
var buf = ""
var states = this.dfa.sortedStates()
var states = d.dfa.sortedStates()
for _, s := range states {
if s.edges != nil {
var n = len(s.edges)
for j := 0; j < n; j++ {
var t = s.edges[j]
if t != nil && t.stateNumber != 0x7FFFFFFF {
buf += this.GetStateString(s)
buf += d.GetStateString(s)
buf += "-"
buf += this.getEdgeLabel(j)
buf += d.getEdgeLabel(j)
buf += "->"
buf += this.GetStateString(t)
buf += d.GetStateString(t)
buf += "\n"
}
}
@ -62,19 +62,19 @@ func (this *DFASerializer) String() string {
return buf
}
func (this *DFASerializer) getEdgeLabel(i int) string {
func (d *DFASerializer) getEdgeLabel(i int) string {
if i == 0 {
return "EOF"
} else if this.literalNames != nil && i-1 < len(this.literalNames) {
return this.literalNames[i-1]
} else if this.symbolicNames != nil && i-1 < len(this.symbolicNames) {
return this.symbolicNames[i-1]
} else {
return strconv.Itoa(i - 1)
} else if d.literalNames != nil && i-1 < len(d.literalNames) {
return d.literalNames[i-1]
} else if d.symbolicNames != nil && i-1 < len(d.symbolicNames) {
return d.symbolicNames[i-1]
}
return strconv.Itoa(i - 1)
}
func (this *DFASerializer) GetStateString(s *DFAState) string {
func (d *DFASerializer) GetStateString(s *DFAState) string {
var a, b string
@ -90,12 +90,12 @@ func (this *DFASerializer) GetStateString(s *DFAState) string {
if s.isAcceptState {
if s.predicates != nil {
return baseStateStr + "=>" + fmt.Sprint(s.predicates)
} else {
return baseStateStr + "=>" + fmt.Sprint(s.prediction)
}
} else {
return baseStateStr
return baseStateStr + "=>" + fmt.Sprint(s.prediction)
}
return baseStateStr
}
type LexerDFASerializer struct {
@ -104,25 +104,25 @@ type LexerDFASerializer struct {
func NewLexerDFASerializer(dfa *DFA) *LexerDFASerializer {
this := new(LexerDFASerializer)
l := new(LexerDFASerializer)
this.DFASerializer = NewDFASerializer(dfa, nil, nil)
l.DFASerializer = NewDFASerializer(dfa, nil, nil)
return this
return l
}
func (this *LexerDFASerializer) getEdgeLabel(i int) string {
func (l *LexerDFASerializer) getEdgeLabel(i int) string {
return "'" + string(i) + "'"
}
func (this *LexerDFASerializer) String() string {
func (l *LexerDFASerializer) String() string {
if this.dfa.s0 == nil {
if l.dfa.s0 == nil {
return ""
}
var buf = ""
var states = this.dfa.sortedStates()
var states = l.dfa.sortedStates()
for i := 0; i < len(states); i++ {
var s = states[i]
if s.edges != nil {
@ -130,11 +130,11 @@ func (this *LexerDFASerializer) String() string {
for j := 0; j < n; j++ {
var t = s.edges[j]
if t != nil && t.stateNumber != 0x7FFFFFFF {
buf += this.GetStateString(s)
buf += l.GetStateString(s)
buf += "-"
buf += this.getEdgeLabel(j)
buf += l.getEdgeLabel(j)
buf += "->"
buf += this.GetStateString(t)
buf += l.GetStateString(t)
buf += "\n"
}
}

View File

@ -13,16 +13,16 @@ type PredPrediction struct {
}
func NewPredPrediction(pred SemanticContext, alt int) *PredPrediction {
this := new(PredPrediction)
p := new(PredPrediction)
this.alt = alt
this.pred = pred
p.alt = alt
p.pred = pred
return this
return p
}
func (this *PredPrediction) String() string {
return "(" + fmt.Sprint(this.pred) + ", " + fmt.Sprint(this.alt) + ")"
func (p *PredPrediction) String() string {
return "(" + fmt.Sprint(p.pred) + ", " + fmt.Sprint(p.alt) + ")"
}
// A DFA state represents a set of possible ATN configurations.
@ -67,30 +67,30 @@ func NewDFAState(stateNumber int, configs ATNConfigSet) *DFAState {
configs = NewBaseATNConfigSet(false)
}
this := new(DFAState)
d := new(DFAState)
this.stateNumber = stateNumber
this.configs = configs
d.stateNumber = stateNumber
d.configs = configs
// {@code edges[symbol]} points to target of symbol. Shift up by 1 so (-1)
// {@link Token//EOF} maps to {@code edges[0]}.
this.edges = nil
this.isAcceptState = false
d.edges = nil
d.isAcceptState = false
// if accept state, what ttype do we Match or alt do we predict?
// This is set to {@link ATN//INVALID_ALT_NUMBER} when {@link
// //predicates}{@code !=nil} or
// {@link //requiresFullContext}.
this.prediction = 0
this.lexerActionExecutor = nil
// Indicates that this state was created during SLL prediction that
d.prediction = 0
d.lexerActionExecutor = nil
// Indicates that d state was created during SLL prediction that
// discovered a conflict between the configurations in the state. Future
// {@link ParserATNSimulator//execATN} invocations immediately jumped doing
// full context prediction if this field is true.
this.requiresFullContext = false
// During SLL parsing, this is a list of predicates associated with the
// full context prediction if d field is true.
d.requiresFullContext = false
// During SLL parsing, d is a list of predicates associated with the
// ATN configurations of the DFA state. When we have predicates,
// {@link //requiresFullContext} is {@code false} since full context
// prediction evaluates predicates
// on-the-fly. If this is not nil, then {@link //prediction} is
// on-the-fly. If d is not nil, then {@link //prediction} is
// {@link ATN//INVALID_ALT_NUMBER}.
//
// <p>We only use these for non-{@link //requiresFullContext} but
@ -100,28 +100,28 @@ func NewDFAState(stateNumber int, configs ATNConfigSet) *DFAState {
//
// <p>This list is computed by {@link
// ParserATNSimulator//predicateDFAState}.</p>
this.predicates = nil
return this
d.predicates = nil
return d
}
// Get the set of all alts mentioned by all ATN configurations in this
// Get the set of all alts mentioned by all ATN configurations in d
// DFA state.
func (this *DFAState) GetAltSet() *Set {
func (d *DFAState) GetAltSet() *Set {
var alts = NewSet(nil, nil)
if this.configs != nil {
for _, c := range this.configs.GetItems() {
if d.configs != nil {
for _, c := range d.configs.GetItems() {
alts.add(c.GetAlt())
}
}
if alts.length() == 0 {
return nil
} else {
return alts
}
return alts
}
func (this *DFAState) setPrediction(v int) {
this.prediction = v
func (d *DFAState) setPrediction(v int) {
d.prediction = v
}
// Two {@link DFAState} instances are equal if their ATN configuration sets
@ -133,34 +133,34 @@ func (this *DFAState) setPrediction(v int) {
//
// <p>Cannot test the DFA state numbers here because in
// {@link ParserATNSimulator//addDFAState} we need to know if any other state
// exists that has this exact set of ATN configurations. The
// exists that has d exact set of ATN configurations. The
// {@link //stateNumber} is irrelevant.</p>
func (this *DFAState) equals(other interface{}) bool {
func (d *DFAState) equals(other interface{}) bool {
if this == other {
if d == other {
return true
} else if _, ok := other.(*DFAState); !ok {
return false
}
return this.configs.Equals(other.(*DFAState).configs)
return d.configs.Equals(other.(*DFAState).configs)
}
func (this *DFAState) String() string {
return strconv.Itoa(this.stateNumber) + ":" + this.Hash()
func (d *DFAState) String() string {
return strconv.Itoa(d.stateNumber) + ":" + d.Hash()
}
func (this *DFAState) Hash() string {
func (d *DFAState) Hash() string {
var s string
if this.isAcceptState {
if this.predicates != nil {
s = "=>" + fmt.Sprint(this.predicates)
if d.isAcceptState {
if d.predicates != nil {
s = "=>" + fmt.Sprint(d.predicates)
} else {
s = "=>" + fmt.Sprint(this.prediction)
s = "=>" + fmt.Sprint(d.prediction)
}
}
return fmt.Sprint(this.configs) + s
return fmt.Sprint(d.configs) + s
}

View File

@ -20,7 +20,7 @@ import (
// full-context prediction resolved an SLL conflict to a unique alternative,
// <em>and</em> the minimum alternative of the SLL conflict was found to not be
// a truly viable alternative. Two-stage parsing cannot be used for inputs where
// this situation occurs.</li>
// d situation occurs.</li>
// </ul>
type DiagnosticErrorListener struct {
@ -38,37 +38,37 @@ func NewDiagnosticErrorListener(exactOnly bool) *DiagnosticErrorListener {
return n
}
func (this *DiagnosticErrorListener) ReportAmbiguity(recognizer Parser, dfa *DFA, startIndex, stopIndex int, exact bool, ambigAlts *BitSet, configs ATNConfigSet) {
if this.exactOnly && !exact {
func (d *DiagnosticErrorListener) ReportAmbiguity(recognizer Parser, dfa *DFA, startIndex, stopIndex int, exact bool, ambigAlts *BitSet, configs ATNConfigSet) {
if d.exactOnly && !exact {
return
}
var msg = "reportAmbiguity d=" +
this.getDecisionDescription(recognizer, dfa) +
d.getDecisionDescription(recognizer, dfa) +
": ambigAlts=" +
this.getConflictingAlts(ambigAlts, configs).String() +
d.getConflictingAlts(ambigAlts, configs).String() +
", input='" +
recognizer.GetTokenStream().GetTextFromInterval(NewInterval(startIndex, stopIndex)) + "'"
recognizer.NotifyErrorListeners(msg, nil, nil)
}
func (this *DiagnosticErrorListener) ReportAttemptingFullContext(recognizer Parser, dfa *DFA, startIndex, stopIndex int, conflictingAlts *BitSet, configs ATNConfigSet) {
func (d *DiagnosticErrorListener) ReportAttemptingFullContext(recognizer Parser, dfa *DFA, startIndex, stopIndex int, conflictingAlts *BitSet, configs ATNConfigSet) {
var msg = "reportAttemptingFullContext d=" +
this.getDecisionDescription(recognizer, dfa) +
d.getDecisionDescription(recognizer, dfa) +
", input='" +
recognizer.GetTokenStream().GetTextFromInterval(NewInterval(startIndex, stopIndex)) + "'"
recognizer.NotifyErrorListeners(msg, nil, nil)
}
func (this *DiagnosticErrorListener) ReportContextSensitivity(recognizer Parser, dfa *DFA, startIndex, stopIndex, prediction int, configs ATNConfigSet) {
func (d *DiagnosticErrorListener) ReportContextSensitivity(recognizer Parser, dfa *DFA, startIndex, stopIndex, prediction int, configs ATNConfigSet) {
var msg = "reportContextSensitivity d=" +
this.getDecisionDescription(recognizer, dfa) +
d.getDecisionDescription(recognizer, dfa) +
", input='" +
recognizer.GetTokenStream().GetTextFromInterval(NewInterval(startIndex, stopIndex)) + "'"
recognizer.NotifyErrorListeners(msg, nil, nil)
}
func (this *DiagnosticErrorListener) getDecisionDescription(recognizer Parser, dfa *DFA) string {
func (d *DiagnosticErrorListener) getDecisionDescription(recognizer Parser, dfa *DFA) string {
var decision = dfa.decision
var ruleIndex = dfa.atnStartState.GetRuleIndex()
@ -94,7 +94,7 @@ func (this *DiagnosticErrorListener) getDecisionDescription(recognizer Parser, d
// @return Returns {@code ReportedAlts} if it is not {@code nil}, otherwise
// returns the set of alternatives represented in {@code configs}.
//
func (this *DiagnosticErrorListener) getConflictingAlts(ReportedAlts *BitSet, set ATNConfigSet) *BitSet {
func (d *DiagnosticErrorListener) getConflictingAlts(ReportedAlts *BitSet, set ATNConfigSet) *BitSet {
if ReportedAlts != nil {
return ReportedAlts
}

View File

@ -2,8 +2,8 @@ package antlr
import (
"fmt"
"strconv"
"os"
"strconv"
)
// Provides an empty default implementation of {@link ANTLRErrorListener}. The
@ -24,25 +24,25 @@ func NewDefaultErrorListener() *DefaultErrorListener {
return new(DefaultErrorListener)
}
func (this *DefaultErrorListener) SyntaxError(recognizer Recognizer, offendingSymbol interface{}, line, column int, msg string, e RecognitionException) {
func (d *DefaultErrorListener) SyntaxError(recognizer Recognizer, offendingSymbol interface{}, line, column int, msg string, e RecognitionException) {
if PortDebug {
fmt.Println("SyntaxError!")
}
}
func (this *DefaultErrorListener) ReportAmbiguity(recognizer Parser, dfa *DFA, startIndex, stopIndex int, exact bool, ambigAlts *BitSet, configs ATNConfigSet) {
func (d *DefaultErrorListener) ReportAmbiguity(recognizer Parser, dfa *DFA, startIndex, stopIndex int, exact bool, ambigAlts *BitSet, configs ATNConfigSet) {
if PortDebug {
fmt.Println("ReportAmbiguity!")
}
}
func (this *DefaultErrorListener) ReportAttemptingFullContext(recognizer Parser, dfa *DFA, startIndex, stopIndex int, conflictingAlts *BitSet, configs ATNConfigSet) {
func (d *DefaultErrorListener) ReportAttemptingFullContext(recognizer Parser, dfa *DFA, startIndex, stopIndex int, conflictingAlts *BitSet, configs ATNConfigSet) {
if PortDebug {
fmt.Println("ReportAttemptingFullContext!")
}
}
func (this *DefaultErrorListener) ReportContextSensitivity(recognizer Parser, dfa *DFA, startIndex, stopIndex, prediction int, configs ATNConfigSet) {
func (d *DefaultErrorListener) ReportContextSensitivity(recognizer Parser, dfa *DFA, startIndex, stopIndex, prediction int, configs ATNConfigSet) {
if PortDebug {
fmt.Println("ReportContextSensitivity!")
}
@ -73,8 +73,8 @@ var ConsoleErrorListenerINSTANCE = NewConsoleErrorListener()
// line <em>line</em>:<em>charPositionInLine</em> <em>msg</em>
// </pre>
//
func (this *ConsoleErrorListener) SyntaxError(recognizer Recognizer, offendingSymbol interface{}, line, column int, msg string, e RecognitionException) {
fmt.Fprintln(os.Stderr, "line " + strconv.Itoa(line) + ":" + strconv.Itoa(column) + " " + msg)
func (c *ConsoleErrorListener) SyntaxError(recognizer Recognizer, offendingSymbol interface{}, line, column int, msg string, e RecognitionException) {
fmt.Fprintln(os.Stderr, "line "+strconv.Itoa(line)+":"+strconv.Itoa(column)+" "+msg)
}
type ProxyErrorListener struct {
@ -91,26 +91,26 @@ func NewProxyErrorListener(delegates []ErrorListener) *ProxyErrorListener {
return l
}
func (this *ProxyErrorListener) SyntaxError(recognizer Recognizer, offendingSymbol interface{}, line, column int, msg string, e RecognitionException) {
for _, d := range this.delegates {
func (p *ProxyErrorListener) SyntaxError(recognizer Recognizer, offendingSymbol interface{}, line, column int, msg string, e RecognitionException) {
for _, d := range p.delegates {
d.SyntaxError(recognizer, offendingSymbol, line, column, msg, e)
}
}
func (this *ProxyErrorListener) ReportAmbiguity(recognizer Parser, dfa *DFA, startIndex, stopIndex int, exact bool, ambigAlts *BitSet, configs ATNConfigSet) {
for _, d := range this.delegates {
func (p *ProxyErrorListener) ReportAmbiguity(recognizer Parser, dfa *DFA, startIndex, stopIndex int, exact bool, ambigAlts *BitSet, configs ATNConfigSet) {
for _, d := range p.delegates {
d.ReportAmbiguity(recognizer, dfa, startIndex, stopIndex, exact, ambigAlts, configs)
}
}
func (this *ProxyErrorListener) ReportAttemptingFullContext(recognizer Parser, dfa *DFA, startIndex, stopIndex int, conflictingAlts *BitSet, configs ATNConfigSet) {
for _, d := range this.delegates {
func (p *ProxyErrorListener) ReportAttemptingFullContext(recognizer Parser, dfa *DFA, startIndex, stopIndex int, conflictingAlts *BitSet, configs ATNConfigSet) {
for _, d := range p.delegates {
d.ReportAttemptingFullContext(recognizer, dfa, startIndex, stopIndex, conflictingAlts, configs)
}
}
func (this *ProxyErrorListener) ReportContextSensitivity(recognizer Parser, dfa *DFA, startIndex, stopIndex, prediction int, configs ATNConfigSet) {
for _, d := range this.delegates {
func (p *ProxyErrorListener) ReportContextSensitivity(recognizer Parser, dfa *DFA, startIndex, stopIndex, prediction int, configs ATNConfigSet) {
for _, d := range p.delegates {
d.ReportContextSensitivity(recognizer, dfa, startIndex, stopIndex, prediction, configs)
}
}

View File

@ -51,8 +51,8 @@ func NewDefaultErrorStrategy() *DefaultErrorStrategy {
// <p>The default implementation simply calls {@link //endErrorCondition} to
// ensure that the handler is not in error recovery mode.</p>
func (this *DefaultErrorStrategy) reset(recognizer Parser) {
this.endErrorCondition(recognizer)
func (d *DefaultErrorStrategy) reset(recognizer Parser) {
d.endErrorCondition(recognizer)
}
//
@ -61,12 +61,12 @@ func (this *DefaultErrorStrategy) reset(recognizer Parser) {
//
// @param recognizer the parser instance
//
func (this *DefaultErrorStrategy) beginErrorCondition(recognizer Parser) {
this.errorRecoveryMode = true
func (d *DefaultErrorStrategy) beginErrorCondition(recognizer Parser) {
d.errorRecoveryMode = true
}
func (this *DefaultErrorStrategy) inErrorRecoveryMode(recognizer Parser) bool {
return this.errorRecoveryMode
func (d *DefaultErrorStrategy) inErrorRecoveryMode(recognizer Parser) bool {
return d.errorRecoveryMode
}
//
@ -75,10 +75,10 @@ func (this *DefaultErrorStrategy) inErrorRecoveryMode(recognizer Parser) bool {
//
// @param recognizer
//
func (this *DefaultErrorStrategy) endErrorCondition(recognizer Parser) {
this.errorRecoveryMode = false
this.lastErrorStates = nil
this.lastErrorIndex = -1
func (d *DefaultErrorStrategy) endErrorCondition(recognizer Parser) {
d.errorRecoveryMode = false
d.lastErrorStates = nil
d.lastErrorIndex = -1
}
//
@ -86,8 +86,8 @@ func (this *DefaultErrorStrategy) endErrorCondition(recognizer Parser) {
//
// <p>The default implementation simply calls {@link //endErrorCondition}.</p>
//
func (this *DefaultErrorStrategy) ReportMatch(recognizer Parser) {
this.endErrorCondition(recognizer)
func (d *DefaultErrorStrategy) ReportMatch(recognizer Parser) {
d.endErrorCondition(recognizer)
}
//
@ -109,13 +109,13 @@ func (this *DefaultErrorStrategy) ReportMatch(recognizer Parser) {
// the exception</li>
// </ul>
//
func (this *DefaultErrorStrategy) ReportError(recognizer Parser, e RecognitionException) {
func (d *DefaultErrorStrategy) ReportError(recognizer Parser, e RecognitionException) {
// if we've already Reported an error and have not Matched a token
// yet successfully, don't Report any errors.
if this.inErrorRecoveryMode(recognizer) {
if d.inErrorRecoveryMode(recognizer) {
return // don't Report spurious errors
}
this.beginErrorCondition(recognizer)
d.beginErrorCondition(recognizer)
switch t := e.(type) {
default:
@ -123,11 +123,11 @@ func (this *DefaultErrorStrategy) ReportError(recognizer Parser, e RecognitionEx
// fmt.Println(e.stack)
recognizer.NotifyErrorListeners(e.GetMessage(), e.GetOffendingToken(), e)
case *NoViableAltException:
this.ReportNoViableAlternative(recognizer, t)
d.ReportNoViableAlternative(recognizer, t)
case *InputMisMatchException:
this.ReportInputMisMatch(recognizer, t)
d.ReportInputMisMatch(recognizer, t)
case *FailedPredicateException:
this.ReportFailedPredicate(recognizer, t)
d.ReportFailedPredicate(recognizer, t)
}
}
@ -137,28 +137,28 @@ func (this *DefaultErrorStrategy) ReportError(recognizer Parser, e RecognitionEx
// until we find one in the reSynchronization set--loosely the set of tokens
// that can follow the current rule.</p>
//
func (this *DefaultErrorStrategy) Recover(recognizer Parser, e RecognitionException) {
func (d *DefaultErrorStrategy) Recover(recognizer Parser, e RecognitionException) {
if this.lastErrorIndex == recognizer.GetInputStream().Index() &&
this.lastErrorStates != nil && this.lastErrorStates.contains(recognizer.GetState()) {
if d.lastErrorIndex == recognizer.GetInputStream().Index() &&
d.lastErrorStates != nil && d.lastErrorStates.contains(recognizer.GetState()) {
// uh oh, another error at same token index and previously-Visited
// state in ATN must be a case where LT(1) is in the recovery
// token set so nothing got consumed. Consume a single token
// at least to prevent an infinite loop this is a failsafe.
// at least to prevent an infinite loop d is a failsafe.
recognizer.Consume()
}
this.lastErrorIndex = recognizer.GetInputStream().Index()
if this.lastErrorStates == nil {
this.lastErrorStates = NewIntervalSet()
d.lastErrorIndex = recognizer.GetInputStream().Index()
if d.lastErrorStates == nil {
d.lastErrorStates = NewIntervalSet()
}
this.lastErrorStates.addOne(recognizer.GetState())
var followSet = this.getErrorRecoverySet(recognizer)
this.consumeUntil(recognizer, followSet)
d.lastErrorStates.addOne(recognizer.GetState())
var followSet = d.getErrorRecoverySet(recognizer)
d.consumeUntil(recognizer, followSet)
}
// The default implementation of {@link ANTLRErrorStrategy//Sync} makes sure
// that the current lookahead symbol is consistent with what were expecting
// at this point in the ATN. You can call this anytime but ANTLR only
// at d point in the ATN. You can call d anytime but ANTLR only
// generates code to check before subrules/loops and each iteration.
//
// <p>Implements Jim Idle's magic Sync mechanism in closures and optional
@ -198,12 +198,12 @@ func (this *DefaultErrorStrategy) Recover(recognizer Parser, e RecognitionExcept
//
// <p>This functionality cost a little bit of effort because the parser has to
// compare token set at the start of the loop and at each iteration. If for
// some reason speed is suffering for you, you can turn off this
// functionality by simply overriding this method as a blank { }.</p>
// some reason speed is suffering for you, you can turn off d
// functionality by simply overriding d method as a blank { }.</p>
//
func (this *DefaultErrorStrategy) Sync(recognizer Parser) {
func (d *DefaultErrorStrategy) Sync(recognizer Parser) {
// If already recovering, don't try to Sync
if this.inErrorRecoveryMode(recognizer) {
if d.inErrorRecoveryMode(recognizer) {
return
}
@ -247,19 +247,18 @@ func (this *DefaultErrorStrategy) Sync(recognizer Parser) {
fallthrough
case ATNStateStarLoopEntry:
// Report error and recover if possible
if this.singleTokenDeletion(recognizer) != nil {
if d.singleTokenDeletion(recognizer) != nil {
return
} else {
panic(NewInputMisMatchException(recognizer))
}
panic(NewInputMisMatchException(recognizer))
case ATNStatePlusLoopBack:
fallthrough
case ATNStateStarLoopBack:
this.ReportUnwantedToken(recognizer)
d.ReportUnwantedToken(recognizer)
var expecting = NewIntervalSet()
expecting.addSet(recognizer.GetExpectedTokens())
var whatFollowsLoopIterationOrRule = expecting.addSet(this.getErrorRecoverySet(recognizer))
this.consumeUntil(recognizer, whatFollowsLoopIterationOrRule)
var whatFollowsLoopIterationOrRule = expecting.addSet(d.getErrorRecoverySet(recognizer))
d.consumeUntil(recognizer, whatFollowsLoopIterationOrRule)
default:
// do nothing if we can't identify the exact kind of ATN state
}
@ -273,7 +272,7 @@ func (this *DefaultErrorStrategy) Sync(recognizer Parser) {
// @param recognizer the parser instance
// @param e the recognition exception
//
func (this *DefaultErrorStrategy) ReportNoViableAlternative(recognizer Parser, e *NoViableAltException) {
func (d *DefaultErrorStrategy) ReportNoViableAlternative(recognizer Parser, e *NoViableAltException) {
var tokens = recognizer.GetTokenStream()
var input string
if tokens != nil {
@ -285,7 +284,7 @@ func (this *DefaultErrorStrategy) ReportNoViableAlternative(recognizer Parser, e
} else {
input = "<unknown input>"
}
var msg = "no viable alternative at input " + this.escapeWSAndQuote(input)
var msg = "no viable alternative at input " + d.escapeWSAndQuote(input)
recognizer.NotifyErrorListeners(msg, e.offendingToken, e)
}
@ -313,16 +312,16 @@ func (this *DefaultErrorStrategy) ReportInputMisMatch(recognizer Parser, e *Inpu
// @param recognizer the parser instance
// @param e the recognition exception
//
func (this *DefaultErrorStrategy) ReportFailedPredicate(recognizer Parser, e *FailedPredicateException) {
func (d *DefaultErrorStrategy) ReportFailedPredicate(recognizer Parser, e *FailedPredicateException) {
var ruleName = recognizer.GetRuleNames()[recognizer.GetParserRuleContext().GetRuleIndex()]
var msg = "rule " + ruleName + " " + e.message
recognizer.NotifyErrorListeners(msg, e.offendingToken, e)
}
// This method is called to Report a syntax error which requires the removal
// of a token from the input stream. At the time this method is called, the
// of a token from the input stream. At the time d method is called, the
// erroneous symbol is current {@code LT(1)} symbol and has not yet been
// removed from the input stream. When this method returns,
// removed from the input stream. When d method returns,
// {@code recognizer} is in error recovery mode.
//
// <p>This method is called when {@link //singleTokenDeletion} identifies
@ -336,22 +335,22 @@ func (this *DefaultErrorStrategy) ReportFailedPredicate(recognizer Parser, e *Fa
//
// @param recognizer the parser instance
//
func (this *DefaultErrorStrategy) ReportUnwantedToken(recognizer Parser) {
if this.inErrorRecoveryMode(recognizer) {
func (d *DefaultErrorStrategy) ReportUnwantedToken(recognizer Parser) {
if d.inErrorRecoveryMode(recognizer) {
return
}
this.beginErrorCondition(recognizer)
d.beginErrorCondition(recognizer)
var t = recognizer.GetCurrentToken()
var tokenName = this.GetTokenErrorDisplay(t)
var expecting = this.getExpectedTokens(recognizer)
var tokenName = d.GetTokenErrorDisplay(t)
var expecting = d.getExpectedTokens(recognizer)
var msg = "extraneous input " + tokenName + " expecting " +
expecting.StringVerbose(recognizer.GetLiteralNames(), recognizer.GetSymbolicNames(), false)
recognizer.NotifyErrorListeners(msg, t, nil)
}
// This method is called to Report a syntax error which requires the
// insertion of a missing token into the input stream. At the time this
// method is called, the missing token has not yet been inserted. When this
// insertion of a missing token into the input stream. At the time d
// method is called, the missing token has not yet been inserted. When d
// method returns, {@code recognizer} is in error recovery mode.
//
// <p>This method is called when {@link //singleTokenInsertion} identifies
@ -365,21 +364,21 @@ func (this *DefaultErrorStrategy) ReportUnwantedToken(recognizer Parser) {
//
// @param recognizer the parser instance
//
func (this *DefaultErrorStrategy) ReportMissingToken(recognizer Parser) {
if this.inErrorRecoveryMode(recognizer) {
func (d *DefaultErrorStrategy) ReportMissingToken(recognizer Parser) {
if d.inErrorRecoveryMode(recognizer) {
return
}
this.beginErrorCondition(recognizer)
d.beginErrorCondition(recognizer)
var t = recognizer.GetCurrentToken()
var expecting = this.getExpectedTokens(recognizer)
var expecting = d.getExpectedTokens(recognizer)
var msg = "missing " + expecting.StringVerbose(recognizer.GetLiteralNames(), recognizer.GetSymbolicNames(), false) +
" at " + this.GetTokenErrorDisplay(t)
" at " + d.GetTokenErrorDisplay(t)
recognizer.NotifyErrorListeners(msg, t, nil)
}
// <p>The default implementation attempts to recover from the mismatched input
// by using single token insertion and deletion as described below. If the
// recovery attempt fails, this method panics an
// recovery attempt fails, d method panics an
// {@link InputMisMatchException}.</p>
//
// <p><strong>EXTRA TOKEN</strong> (single token deletion)</p>
@ -413,7 +412,7 @@ func (this *DefaultErrorStrategy) ReportMissingToken(recognizer Parser) {
// stat &rarr expr &rarr atom
// </pre>
//
// and it will be trying to Match the {@code ')'} at this point in the
// and it will be trying to Match the {@code ')'} at d point in the
// derivation:
//
// <pre>
@ -426,9 +425,9 @@ func (this *DefaultErrorStrategy) ReportMissingToken(recognizer Parser) {
// is in the set of tokens that can follow the {@code ')'} token reference
// in rule {@code atom}. It can assume that you forgot the {@code ')'}.
//
func (this *DefaultErrorStrategy) RecoverInline(recognizer Parser) Token {
func (d *DefaultErrorStrategy) RecoverInline(recognizer Parser) Token {
// SINGLE TOKEN DELETION
var MatchedSymbol = this.singleTokenDeletion(recognizer)
var MatchedSymbol = d.singleTokenDeletion(recognizer)
if MatchedSymbol != nil {
// we have deleted the extra token.
// now, move past ttype token as if all were ok
@ -436,8 +435,8 @@ func (this *DefaultErrorStrategy) RecoverInline(recognizer Parser) Token {
return MatchedSymbol
}
// SINGLE TOKEN INSERTION
if this.singleTokenInsertion(recognizer) {
return this.getMissingSymbol(recognizer)
if d.singleTokenInsertion(recognizer) {
return d.getMissingSymbol(recognizer)
}
// even that didn't work must panic the exception
panic(NewInputMisMatchException(recognizer))
@ -452,15 +451,15 @@ func (this *DefaultErrorStrategy) RecoverInline(recognizer Parser) Token {
//
// <p>This method determines whether or not single-token insertion is viable by
// checking if the {@code LA(1)} input symbol could be successfully Matched
// if it were instead the {@code LA(2)} symbol. If this method returns
// if it were instead the {@code LA(2)} symbol. If d method returns
// {@code true}, the caller is responsible for creating and inserting a
// token with the correct type to produce this behavior.</p>
// token with the correct type to produce d behavior.</p>
//
// @param recognizer the parser instance
// @return {@code true} if single-token insertion is a viable recovery
// strategy for the current mismatched input, otherwise {@code false}
//
func (this *DefaultErrorStrategy) singleTokenInsertion(recognizer Parser) bool {
func (d *DefaultErrorStrategy) singleTokenInsertion(recognizer Parser) bool {
var currentSymbolType = recognizer.GetTokenStream().LA(1)
// if current token is consistent with what could come after current
// ATN state, then we know we're missing a token error recovery
@ -470,11 +469,11 @@ func (this *DefaultErrorStrategy) singleTokenInsertion(recognizer Parser) bool {
var next = currentState.GetTransitions()[0].getTarget()
var expectingAtLL2 = atn.NextTokens(next, recognizer.GetParserRuleContext())
if expectingAtLL2.contains(currentSymbolType) {
this.ReportMissingToken(recognizer)
d.ReportMissingToken(recognizer)
return true
} else {
return false
}
return false
}
// This method implements the single-token deletion inline error recovery
@ -484,7 +483,7 @@ func (this *DefaultErrorStrategy) singleTokenInsertion(recognizer Parser) bool {
// {@code recognizer} will <em>not</em> be in error recovery mode since the
// returned token was a successful Match.
//
// <p>If the single-token deletion is successful, this method calls
// <p>If the single-token deletion is successful, d method calls
// {@link //ReportUnwantedToken} to Report the error, followed by
// {@link Parser//consume} to actually "delete" the extraneous token. Then,
// before returning {@link //ReportMatch} is called to signal a successful
@ -495,11 +494,11 @@ func (this *DefaultErrorStrategy) singleTokenInsertion(recognizer Parser) bool {
// deletion successfully recovers from the mismatched input, otherwise
// {@code nil}
//
func (this *DefaultErrorStrategy) singleTokenDeletion(recognizer Parser) Token {
func (d *DefaultErrorStrategy) singleTokenDeletion(recognizer Parser) Token {
var NextTokenType = recognizer.GetTokenStream().LA(2)
var expecting = this.getExpectedTokens(recognizer)
var expecting = d.getExpectedTokens(recognizer)
if expecting.contains(NextTokenType) {
this.ReportUnwantedToken(recognizer)
d.ReportUnwantedToken(recognizer)
// print("recoverFromMisMatchedToken deleting " \
// + str(recognizer.GetTokenStream().LT(1)) \
// + " since " + str(recognizer.GetTokenStream().LT(2)) \
@ -507,11 +506,11 @@ func (this *DefaultErrorStrategy) singleTokenDeletion(recognizer Parser) Token {
recognizer.Consume() // simply delete extra token
// we want to return the token we're actually Matching
var MatchedSymbol = recognizer.GetCurrentToken()
this.ReportMatch(recognizer) // we know current token is correct
d.ReportMatch(recognizer) // we know current token is correct
return MatchedSymbol
} else {
return nil
}
return nil
}
// Conjure up a missing token during error recovery.
@ -522,7 +521,7 @@ func (this *DefaultErrorStrategy) singleTokenDeletion(recognizer Parser) Token {
// that there has been an identifier Matched previously and that
// $x points at that token. If that token is missing, but
// the next token in the stream is what we want we assume that
// this token is missing and we keep going. Because we
// d token is missing and we keep going. Because we
// have to return some token to replace the missing token,
// we have to conjure one up. This method gives the user control
// over the tokens returned for missing tokens. Mostly,
@ -531,11 +530,11 @@ func (this *DefaultErrorStrategy) singleTokenDeletion(recognizer Parser) Token {
// action in the parser or tree parser works. It simply creates
// a CommonToken of the appropriate type. The text will be the token.
// If you change what tokens must be created by the lexer,
// override this method to create the appropriate tokens.
// override d method to create the appropriate tokens.
//
func (this *DefaultErrorStrategy) getMissingSymbol(recognizer Parser) Token {
func (d *DefaultErrorStrategy) getMissingSymbol(recognizer Parser) Token {
var currentSymbol = recognizer.GetCurrentToken()
var expecting = this.getExpectedTokens(recognizer)
var expecting = d.getExpectedTokens(recognizer)
var expectedTokenType = expecting.first()
var tokenText string
@ -543,7 +542,7 @@ func (this *DefaultErrorStrategy) getMissingSymbol(recognizer Parser) Token {
tokenText = "<missing EOF>"
} else {
ln := recognizer.GetLiteralNames()
if expectedTokenType > 0 && expectedTokenType < len(ln) {
if expectedTokenType > 0 && expectedTokenType < len(ln) {
tokenText = "<missing " + recognizer.GetLiteralNames()[expectedTokenType] + ">"
} else {
tokenText = "<missing undefined>" // TODO matches the JS impl
@ -563,7 +562,7 @@ func (this *DefaultErrorStrategy) getMissingSymbol(recognizer Parser) Token {
return tf.Create(current.GetSource(), expectedTokenType, tokenText, TokenDefaultChannel, -1, -1, current.GetLine(), current.GetColumn())
}
func (this *DefaultErrorStrategy) getExpectedTokens(recognizer Parser) *IntervalSet {
func (d *DefaultErrorStrategy) getExpectedTokens(recognizer Parser) *IntervalSet {
return recognizer.GetExpectedTokens()
}
@ -575,7 +574,7 @@ func (this *DefaultErrorStrategy) getExpectedTokens(recognizer Parser) *Interval
// your token objects because you don't have to go modify your lexer
// so that it creates a NewJava type.
//
func (this *DefaultErrorStrategy) GetTokenErrorDisplay(t Token) string {
func (d *DefaultErrorStrategy) GetTokenErrorDisplay(t Token) string {
if t == nil {
return "<no token>"
}
@ -587,10 +586,10 @@ func (this *DefaultErrorStrategy) GetTokenErrorDisplay(t Token) string {
s = "<" + strconv.Itoa(t.GetTokenType()) + ">"
}
}
return this.escapeWSAndQuote(s)
return d.escapeWSAndQuote(s)
}
func (this *DefaultErrorStrategy) escapeWSAndQuote(s string) string {
func (d *DefaultErrorStrategy) escapeWSAndQuote(s string) string {
s = strings.Replace(s, "\t", "\\t", -1)
s = strings.Replace(s, "\n", "\\n", -1)
s = strings.Replace(s, "\r", "\\r", -1)
@ -599,7 +598,7 @@ func (this *DefaultErrorStrategy) escapeWSAndQuote(s string) string {
// Compute the error recovery set for the current rule. During
// rule invocation, the parser pushes the set of tokens that can
// follow that rule reference on the stack this amounts to
// follow that rule reference on the stack d amounts to
// computing FIRST of what follows the rule reference in the
// enclosing rule. See LinearApproximator.FIRST().
// This local follow set only includes tokens
@ -656,7 +655,7 @@ func (this *DefaultErrorStrategy) escapeWSAndQuote(s string) string {
// reSync to one of those tokens. Note that FOLLOW(c)='^' and if
// we reSync'd to that token, we'd consume until EOF. We need to
// Sync to context-sensitive FOLLOWs for a, b, and c: {']','^'}.
// In this case, for input "[]", LA(1) is ']' and in the set, so we would
// In d case, for input "[]", LA(1) is ']' and in the set, so we would
// not consume anything. After printing an error, rule c would
// return normally. Rule b would not find the required '^' though.
// At this point, it gets a mismatched token error and panics an
@ -689,7 +688,7 @@ func (this *DefaultErrorStrategy) escapeWSAndQuote(s string) string {
// Like Grosch I implement context-sensitive FOLLOW sets that are combined
// at run-time upon error to avoid overhead during parsing.
//
func (this *DefaultErrorStrategy) getErrorRecoverySet(recognizer Parser) *IntervalSet {
func (d *DefaultErrorStrategy) getErrorRecoverySet(recognizer Parser) *IntervalSet {
var atn = recognizer.GetInterpreter().atn
var ctx = recognizer.GetParserRuleContext()
var recoverSet = NewIntervalSet()
@ -706,7 +705,7 @@ func (this *DefaultErrorStrategy) getErrorRecoverySet(recognizer Parser) *Interv
}
// Consume tokens until one Matches the given token set.//
func (this *DefaultErrorStrategy) consumeUntil(recognizer Parser, set *IntervalSet) {
func (d *DefaultErrorStrategy) consumeUntil(recognizer Parser, set *IntervalSet) {
var ttype = recognizer.GetTokenStream().LA(1)
for ttype != TokenEOF && !set.contains(ttype) {
recognizer.Consume()
@ -748,11 +747,11 @@ type BailErrorStrategy struct {
func NewBailErrorStrategy() *BailErrorStrategy {
this := new(BailErrorStrategy)
b := new(BailErrorStrategy)
this.DefaultErrorStrategy = NewDefaultErrorStrategy()
b.DefaultErrorStrategy = NewDefaultErrorStrategy()
return this
return b
}
// Instead of recovering from exception {@code e}, re-panic it wrapped
@ -760,7 +759,7 @@ func NewBailErrorStrategy() *BailErrorStrategy {
// rule func catches. Use {@link Exception//getCause()} to get the
// original {@link RecognitionException}.
//
func (this *BailErrorStrategy) Recover(recognizer Parser, e RecognitionException) {
func (b *BailErrorStrategy) Recover(recognizer Parser, e RecognitionException) {
var context = recognizer.GetParserRuleContext()
for context != nil {
context.SetException(e)
@ -772,11 +771,11 @@ func (this *BailErrorStrategy) Recover(recognizer Parser, e RecognitionException
// Make sure we don't attempt to recover inline if the parser
// successfully recovers, it won't panic an exception.
//
func (this *BailErrorStrategy) RecoverInline(recognizer Parser) {
this.Recover(recognizer, NewInputMisMatchException(recognizer))
func (b *BailErrorStrategy) RecoverInline(recognizer Parser) {
b.Recover(recognizer, NewInputMisMatchException(recognizer))
}
// Make sure we don't attempt to recover from problems in subrules.//
func (this *BailErrorStrategy) Sync(recognizer Parser) {
func (b *BailErrorStrategy) Sync(recognizer Parser) {
// pass
}

View File

@ -56,40 +56,40 @@ func NewBaseRecognitionException(message string, recognizer Recognizer, input In
return t
}
func (this *BaseRecognitionException) GetMessage() string {
return this.message
func (b *BaseRecognitionException) GetMessage() string {
return b.message
}
func (this *BaseRecognitionException) GetOffendingToken() Token {
return this.offendingToken
func (b *BaseRecognitionException) GetOffendingToken() Token {
return b.offendingToken
}
func (this *BaseRecognitionException) GetInputStream() IntStream {
return this.input
func (b *BaseRecognitionException) GetInputStream() IntStream {
return b.input
}
// <p>If the state number is not known, this method returns -1.</p>
// <p>If the state number is not known, b method returns -1.</p>
//
// Gets the set of input symbols which could potentially follow the
// previously Matched symbol at the time this exception was panicn.
// previously Matched symbol at the time b exception was panicn.
//
// <p>If the set of expected tokens is not known and could not be computed,
// this method returns {@code nil}.</p>
// b method returns {@code nil}.</p>
//
// @return The set of token types that could potentially follow the current
// state in the ATN, or {@code nil} if the information is not available.
// /
func (this *BaseRecognitionException) getExpectedTokens() *IntervalSet {
if this.recognizer != nil {
return this.recognizer.GetATN().getExpectedTokens(this.offendingState, this.ctx)
} else {
return nil
func (b *BaseRecognitionException) getExpectedTokens() *IntervalSet {
if b.recognizer != nil {
return b.recognizer.GetATN().getExpectedTokens(b.offendingState, b.ctx)
}
return nil
}
func (this *BaseRecognitionException) String() string {
return this.message
func (b *BaseRecognitionException) String() string {
return b.message
}
type LexerNoViableAltException struct {
@ -101,20 +101,20 @@ type LexerNoViableAltException struct {
func NewLexerNoViableAltException(lexer Lexer, input CharStream, startIndex int, deadEndConfigs ATNConfigSet) *LexerNoViableAltException {
this := new(LexerNoViableAltException)
l := new(LexerNoViableAltException)
this.BaseRecognitionException = NewBaseRecognitionException("", lexer, input, nil)
l.BaseRecognitionException = NewBaseRecognitionException("", lexer, input, nil)
this.startIndex = startIndex
this.deadEndConfigs = deadEndConfigs
l.startIndex = startIndex
l.deadEndConfigs = deadEndConfigs
return this
return l
}
func (this *LexerNoViableAltException) String() string {
func (l *LexerNoViableAltException) String() string {
var symbol = ""
if this.startIndex >= 0 && this.startIndex < this.input.Size() {
symbol = this.input.(CharStream).GetTextFromInterval(NewInterval(this.startIndex, this.startIndex))
if l.startIndex >= 0 && l.startIndex < l.input.Size() {
symbol = l.input.(CharStream).GetTextFromInterval(NewInterval(l.startIndex, l.startIndex))
}
return "LexerNoViableAltException" + symbol
}
@ -151,20 +151,20 @@ func NewNoViableAltException(recognizer Parser, input TokenStream, startToken To
input = recognizer.GetInputStream().(TokenStream)
}
this := new(NoViableAltException)
this.BaseRecognitionException = NewBaseRecognitionException("", recognizer, input, ctx)
n := new(NoViableAltException)
n.BaseRecognitionException = NewBaseRecognitionException("", recognizer, input, ctx)
// Which configurations did we try at input.Index() that couldn't Match
// input.LT(1)?//
this.deadEndConfigs = deadEndConfigs
n.deadEndConfigs = deadEndConfigs
// The token object at the start index the input stream might
// not be buffering tokens so get a reference to it. (At the
// time the error occurred, of course the stream needs to keep a
// buffer all of the tokens but later we might not have access to those.)
this.startToken = startToken
this.offendingToken = offendingToken
n.startToken = startToken
n.offendingToken = offendingToken
return this
return n
}
type InputMisMatchException struct {
@ -176,12 +176,12 @@ type InputMisMatchException struct {
//
func NewInputMisMatchException(recognizer Parser) *InputMisMatchException {
this := new(InputMisMatchException)
this.BaseRecognitionException = NewBaseRecognitionException("", recognizer, recognizer.GetInputStream(), recognizer.GetParserRuleContext())
i := new(InputMisMatchException)
i.BaseRecognitionException = NewBaseRecognitionException("", recognizer, recognizer.GetInputStream(), recognizer.GetParserRuleContext())
this.offendingToken = recognizer.GetCurrentToken()
i.offendingToken = recognizer.GetCurrentToken()
return this
return i
}
@ -200,31 +200,31 @@ type FailedPredicateException struct {
func NewFailedPredicateException(recognizer Parser, predicate string, message string) *FailedPredicateException {
this := new(FailedPredicateException)
f := new(FailedPredicateException)
this.BaseRecognitionException = NewBaseRecognitionException(this.formatMessage(predicate, message), recognizer, recognizer.GetInputStream(), recognizer.GetParserRuleContext())
f.BaseRecognitionException = NewBaseRecognitionException(f.formatMessage(predicate, message), recognizer, recognizer.GetInputStream(), recognizer.GetParserRuleContext())
var s = recognizer.GetInterpreter().atn.states[recognizer.GetState()]
var trans = s.GetTransitions()[0]
if trans2, ok := trans.(*PredicateTransition); ok {
this.ruleIndex = trans2.ruleIndex
this.predicateIndex = trans2.predIndex
f.ruleIndex = trans2.ruleIndex
f.predicateIndex = trans2.predIndex
} else {
this.ruleIndex = 0
this.predicateIndex = 0
f.ruleIndex = 0
f.predicateIndex = 0
}
this.predicate = predicate
this.offendingToken = recognizer.GetCurrentToken()
f.predicate = predicate
f.offendingToken = recognizer.GetCurrentToken()
return this
return f
}
func (this *FailedPredicateException) formatMessage(predicate, message string) string {
func (f *FailedPredicateException) formatMessage(predicate, message string) string {
if message != "" {
return message
} else {
return "failed predicate: {" + predicate + "}?"
}
return "failed predicate: {" + predicate + "}?"
}
type ParseCancellationException struct {

View File

@ -30,7 +30,7 @@ func (is *InputStream) Consume() {
// assert is.LA(1) == TokenEOF
panic("cannot consume EOF")
}
is.index += 1
is.index++
}
func (is *InputStream) LA(offset int) int {
@ -39,7 +39,7 @@ func (is *InputStream) LA(offset int) int {
return 0 // nil
}
if offset < 0 {
offset += 1 // e.g., translate LA(-1) to use offset=0
offset++ // e.g., translate LA(-1) to use offset=0
}
var pos = is.index + offset - 1
@ -88,24 +88,24 @@ func (is *InputStream) GetText(start int, stop int) string {
}
if start >= is.size {
return ""
} else {
return string(is.data[start : stop+1])
}
return string(is.data[start : stop+1])
}
func (is *InputStream) GetTextFromTokens(start, stop Token) string {
if ( start!=nil && stop !=nil ) {
return is.GetTextFromInterval(NewInterval(start.GetTokenIndex(), stop.GetTokenIndex()));
if start != nil && stop != nil {
return is.GetTextFromInterval(NewInterval(start.GetTokenIndex(), stop.GetTokenIndex()))
}
return "";
return ""
}
func (is *InputStream) GetTextFromInterval(i *Interval) string {
return is.GetText(i.start, i.stop)
}
func (f *InputStream) GetSourceName() string {
func (*InputStream) GetSourceName() string {
return "Obtained from string"
}

View File

@ -27,9 +27,9 @@ func (i *Interval) contains(item int) bool {
func (i *Interval) String() string {
if i.start == i.stop-1 {
return strconv.Itoa(i.start)
} else {
return strconv.Itoa(i.start) + ".." + strconv.Itoa(i.stop-1)
}
return strconv.Itoa(i.start) + ".." + strconv.Itoa(i.stop-1)
}
func (i *Interval) length() int {
@ -54,9 +54,9 @@ func NewIntervalSet() *IntervalSet {
func (i *IntervalSet) first() int {
if len(i.intervals) == 0 {
return TokenInvalidType
} else {
return i.intervals[0].start
}
return i.intervals[0].start
}
func (i *IntervalSet) addOne(v int) {
@ -67,33 +67,33 @@ func (i *IntervalSet) addRange(l, h int) {
i.addInterval(NewInterval(l, h+1))
}
func (is *IntervalSet) addInterval(v *Interval) {
func (i *IntervalSet) addInterval(v *Interval) {
if PortDebug {
fmt.Println("addInterval" + v.String())
}
if is.intervals == nil {
is.intervals = make([]*Interval, 0)
is.intervals = append(is.intervals, v)
if i.intervals == nil {
i.intervals = make([]*Interval, 0)
i.intervals = append(i.intervals, v)
} else {
// find insert pos
for k := 0; k < len(is.intervals); k++ {
var i = is.intervals[k]
// distinct range -> insert
if v.stop < i.start {
// is.intervals = splice(k, 0, v)
is.intervals = append(is.intervals[0:k], append([]*Interval{v}, is.intervals[k:]...)...)
for k := 0; k < len(i.intervals); k++ {
var interval = i.intervals[k]
// ditinct range -> insert
if v.stop < interval.start {
// i.intervals = splice(k, 0, v)
i.intervals = append(i.intervals[0:k], append([]*Interval{v}, i.intervals[k:]...)...)
return
} else if v.stop == i.start {
is.intervals[k].start = v.start
} else if v.stop == interval.start {
i.intervals[k].start = v.start
return
} else if v.start <= i.stop {
is.intervals[k] = NewInterval(intMin(i.start, v.start), intMax(i.stop, v.stop))
is.reduce(k)
} else if v.start <= interval.stop {
i.intervals[k] = NewInterval(intMin(interval.start, v.start), intMax(interval.stop, v.stop))
i.reduce(k)
return
}
}
// greater than any existing
is.intervals = append(is.intervals, v)
// greater than any exiting
i.intervals = append(i.intervals, v)
}
}
@ -129,11 +129,11 @@ func (i *IntervalSet) reduce(k int) {
}
}
func (is *IntervalSet) complement(start int, stop int) *IntervalSet {
func (i *IntervalSet) complement(start int, stop int) *IntervalSet {
var result = NewIntervalSet()
result.addInterval(NewInterval(start, stop+1))
for i := 0; i < len(is.intervals); i++ {
result.removeRange(is.intervals[i])
for j := 0; j < len(i.intervals); j++ {
result.removeRange(i.intervals[j])
}
return result
}
@ -141,78 +141,77 @@ func (is *IntervalSet) complement(start int, stop int) *IntervalSet {
func (i *IntervalSet) contains(item int) bool {
if i.intervals == nil {
return false
} else {
for k := 0; k < len(i.intervals); k++ {
if i.intervals[k].contains(item) {
return true
}
}
return false
}
for k := 0; k < len(i.intervals); k++ {
if i.intervals[k].contains(item) {
return true
}
}
return false
}
func (is *IntervalSet) length() int {
func (i *IntervalSet) length() int {
len := 0
for _, v := range is.intervals {
for _, v := range i.intervals {
len += v.length()
}
return len
}
func (is *IntervalSet) removeRange(v *Interval) {
func (i *IntervalSet) removeRange(v *Interval) {
if v.start == v.stop-1 {
is.removeOne(v.start)
} else if is.intervals != nil {
i.removeOne(v.start)
} else if i.intervals != nil {
k := 0
for n := 0; n < len(is.intervals); n++ {
var i = is.intervals[k]
for n := 0; n < len(i.intervals); n++ {
var ni = i.intervals[k]
// intervals are ordered
if v.stop <= i.start {
if v.stop <= ni.start {
return
} else if v.start > i.start && v.stop < i.stop {
is.intervals[k] = NewInterval(i.start, v.start)
var x = NewInterval(v.stop, i.stop)
// is.intervals.splice(k, 0, x)
is.intervals = append(is.intervals[0:k], append([]*Interval{x}, is.intervals[k:]...)...)
} else if v.start > ni.start && v.stop < ni.stop {
i.intervals[k] = NewInterval(ni.start, v.start)
var x = NewInterval(v.stop, ni.stop)
// i.intervals.splice(k, 0, x)
i.intervals = append(i.intervals[0:k], append([]*Interval{x}, i.intervals[k:]...)...)
return
} else if v.start <= i.start && v.stop >= i.stop {
// is.intervals.splice(k, 1)
is.intervals = append(is.intervals[0:k], is.intervals[k+1:]...)
} else if v.start <= ni.start && v.stop >= ni.stop {
// i.intervals.splice(k, 1)
i.intervals = append(i.intervals[0:k], i.intervals[k+1:]...)
k = k - 1 // need another pass
} else if v.start < i.stop {
is.intervals[k] = NewInterval(i.start, v.start)
} else if v.stop < i.stop {
is.intervals[k] = NewInterval(v.stop, i.stop)
} else if v.start < ni.stop {
i.intervals[k] = NewInterval(ni.start, v.start)
} else if v.stop < ni.stop {
i.intervals[k] = NewInterval(v.stop, ni.stop)
}
k += 1
k++
}
}
}
func (is *IntervalSet) removeOne(v int) {
if is.intervals != nil {
for k := 0; k < len(is.intervals); k++ {
var i = is.intervals[k]
// intervals is ordered
if v < i.start {
func (i *IntervalSet) removeOne(v int) {
if i.intervals != nil {
for k := 0; k < len(i.intervals); k++ {
var ki = i.intervals[k]
// intervals i ordered
if v < ki.start {
return
} else if v == i.start && v == i.stop-1 {
// is.intervals.splice(k, 1);
is.intervals = append(is.intervals[0:k], is.intervals[k+1:]...)
} else if v == ki.start && v == ki.stop-1 {
// i.intervals.splice(k, 1);
i.intervals = append(i.intervals[0:k], i.intervals[k+1:]...)
return
} else if v == i.start {
is.intervals[k] = NewInterval(i.start+1, i.stop)
} else if v == ki.start {
i.intervals[k] = NewInterval(ki.start+1, ki.stop)
return
} else if v == i.stop-1 {
is.intervals[k] = NewInterval(i.start, i.stop-1)
} else if v == ki.stop-1 {
i.intervals[k] = NewInterval(ki.start, ki.stop-1)
return
} else if v < i.stop-1 {
var x = NewInterval(i.start, v)
i.start = v + 1
// is.intervals.splice(k, 0, x);
is.intervals = append(is.intervals[0:k], append([]*Interval{x}, is.intervals[k:]...)...)
} else if v < ki.stop-1 {
var x = NewInterval(ki.start, v)
ki.start = v + 1
// i.intervals.splice(k, 0, x);
i.intervals = append(i.intervals[0:k], append([]*Interval{x}, i.intervals[k:]...)...)
return
}
}
@ -231,16 +230,16 @@ func (i *IntervalSet) StringVerbose(literalNames []string, symbolicNames []strin
return i.toTokenString(literalNames, symbolicNames)
} else if elemsAreChar {
return i.toCharString()
} else {
return i.toIndexString()
}
return i.toIndexString()
}
func (is *IntervalSet) toCharString() string {
var names = make([]string, len(is.intervals))
func (i *IntervalSet) toCharString() string {
var names = make([]string, len(i.intervals))
for i := 0; i < len(is.intervals); i++ {
var v = is.intervals[i]
for j := 0; j < len(i.intervals); j++ {
var v = i.intervals[j]
if v.stop == v.start+1 {
if v.start == TokenEOF {
names = append(names, "<EOF>")
@ -253,16 +252,16 @@ func (is *IntervalSet) toCharString() string {
}
if len(names) > 1 {
return "{" + strings.Join(names, ", ") + "}"
} else {
return names[0]
}
return names[0]
}
func (is *IntervalSet) toIndexString() string {
func (i *IntervalSet) toIndexString() string {
var names = make([]string, 0)
for i := 0; i < len(is.intervals); i++ {
var v = is.intervals[i]
for j := 0; j < len(i.intervals); j++ {
var v = i.intervals[j]
if v.stop == v.start+1 {
if v.start == TokenEOF {
names = append(names, "<EOF>")
@ -275,23 +274,23 @@ func (is *IntervalSet) toIndexString() string {
}
if len(names) > 1 {
return "{" + strings.Join(names, ", ") + "}"
} else {
return names[0]
}
return names[0]
}
func (is *IntervalSet) toTokenString(literalNames []string, symbolicNames []string) string {
func (i *IntervalSet) toTokenString(literalNames []string, symbolicNames []string) string {
var names = make([]string, 0)
for _,v := range is.intervals {
for _, v := range i.intervals {
for j := v.start; j < v.stop; j++ {
names = append(names, is.elementName(literalNames, symbolicNames, j))
names = append(names, i.elementName(literalNames, symbolicNames, j))
}
}
if len(names) > 1 {
return "{" + strings.Join(names, ", ") + "}"
} else {
return names[0]
}
return names[0]
}
func (i *IntervalSet) elementName(literalNames []string, symbolicNames []string, a int) string {
@ -302,8 +301,8 @@ func (i *IntervalSet) elementName(literalNames []string, symbolicNames []string,
} else {
if a < len(literalNames) && literalNames[a] != "" {
return literalNames[a]
} else {
return symbolicNames[a]
}
return symbolicNames[a]
}
}

View File

@ -25,23 +25,22 @@ type Lexer interface {
type BaseLexer struct {
*BaseRecognizer
Interpreter *LexerATNSimulator
Interpreter *LexerATNSimulator
TokenStartCharIndex int
TokenStartLine int
TokenStartColumn int
ActionType int
_input CharStream
_factory TokenFactory
_tokenFactorySourcePair *TokenSourceCharStreamPair
_token Token
_hitEOF bool
_channel int
_type int
_modeStack IntStack
_mode int
_text string
TokenStartLine int
TokenStartColumn int
ActionType int
input CharStream
factory TokenFactory
tokenFactorySourcePair *TokenSourceCharStreamPair
token Token
hitEOF bool
channel int
thetype int
modeStack IntStack
mode int
text string
}
func NewBaseLexer(input CharStream) *BaseLexer {
@ -50,9 +49,9 @@ func NewBaseLexer(input CharStream) *BaseLexer {
lexer.BaseRecognizer = NewBaseRecognizer()
lexer._input = input
lexer._factory = CommonTokenFactoryDEFAULT
lexer._tokenFactorySourcePair = &TokenSourceCharStreamPair{lexer, input}
lexer.input = input
lexer.factory = CommonTokenFactoryDEFAULT
lexer.tokenFactorySourcePair = &TokenSourceCharStreamPair{lexer, input}
lexer.Interpreter = nil // child classes must populate it
@ -63,7 +62,7 @@ func NewBaseLexer(input CharStream) *BaseLexer {
// emissions, then set l to the last token to be Matched or
// something nonnil so that the auto token emit mechanism will not
// emit another token.
lexer._token = nil
lexer.token = nil
// What character index in the stream did the current token start at?
// Needed, for example, to get the text for current token. Set at
@ -78,21 +77,21 @@ func NewBaseLexer(input CharStream) *BaseLexer {
// Once we see EOF on char stream, next token will be EOF.
// If you have DONE : EOF then you see DONE EOF.
lexer._hitEOF = false
lexer.hitEOF = false
// The channel number for the current token///
lexer._channel = TokenDefaultChannel
lexer.channel = TokenDefaultChannel
// The token type for the current token///
lexer._type = TokenInvalidType
lexer.thetype = TokenInvalidType
lexer._modeStack = make([]int, 0)
lexer._mode = LexerDefaultMode
lexer.modeStack = make([]int, 0)
lexer.mode = LexerDefaultMode
// You can set the text for the current token to override what is in
// the input char buffer. Use setText() or can set l instance var.
// /
lexer._text = ""
lexer.text = ""
return lexer
}
@ -110,110 +109,110 @@ const (
LexerMaxCharValue = '\uFFFE'
)
func (l *BaseLexer) reset() {
func (b *BaseLexer) reset() {
// wack Lexer state variables
if l._input != nil {
l._input.Seek(0) // rewind the input
if b.input != nil {
b.input.Seek(0) // rewind the input
}
l._token = nil
l._type = TokenInvalidType
l._channel = TokenDefaultChannel
l.TokenStartCharIndex = -1
l.TokenStartColumn = -1
l.TokenStartLine = -1
l._text = ""
b.token = nil
b.thetype = TokenInvalidType
b.channel = TokenDefaultChannel
b.TokenStartCharIndex = -1
b.TokenStartColumn = -1
b.TokenStartLine = -1
b.text = ""
l._hitEOF = false
l._mode = LexerDefaultMode
l._modeStack = make([]int, 0)
b.hitEOF = false
b.mode = LexerDefaultMode
b.modeStack = make([]int, 0)
l.Interpreter.reset()
b.Interpreter.reset()
}
func (l *BaseLexer) GetInterpreter() *LexerATNSimulator {
return l.Interpreter
func (b *BaseLexer) GetInterpreter() *LexerATNSimulator {
return b.Interpreter
}
func (l *BaseLexer) GetInputStream() CharStream {
return l._input
func (b *BaseLexer) GetInputStream() CharStream {
return b.input
}
func (l *BaseLexer) GetSourceName() string {
return l.GrammarFileName
func (b *BaseLexer) GetSourceName() string {
return b.GrammarFileName
}
func (l *BaseLexer) setChannel(v int) {
l._channel = v
func (b *BaseLexer) setChannel(v int) {
b.channel = v
}
func (l *BaseLexer) GetTokenFactory() TokenFactory {
return l._factory
func (b *BaseLexer) GetTokenFactory() TokenFactory {
return b.factory
}
func (l *BaseLexer) setTokenFactory(f TokenFactory) {
l._factory = f
func (b *BaseLexer) setTokenFactory(f TokenFactory) {
b.factory = f
}
func (l *BaseLexer) safeMatch() (ret int) {
func (b *BaseLexer) safeMatch() (ret int) {
// previously in catch block
defer func() {
if e := recover(); e != nil {
if re, ok := e.(RecognitionException); ok {
l.notifyListeners(re) // Report error
l.Recover(re)
b.notifyListeners(re) // Report error
b.Recover(re)
ret = LexerSkip // default
}
}
}()
return l.Interpreter.Match(l._input, l._mode)
return b.Interpreter.Match(b.input, b.mode)
}
// Return a token from l source i.e., Match a token on the char stream.
func (l *BaseLexer) NextToken() Token {
if l._input == nil {
func (b *BaseLexer) NextToken() Token {
if b.input == nil {
panic("NextToken requires a non-nil input stream.")
}
var tokenStartMarker = l._input.Mark()
var tokenStartMarker = b.input.Mark()
// previously in finally block
defer func() {
// make sure we release marker after Match or
// unbuffered char stream will keep buffering
l._input.Release(tokenStartMarker)
b.input.Release(tokenStartMarker)
}()
for true {
if l._hitEOF {
l.emitEOF()
return l._token
if b.hitEOF {
b.emitEOF()
return b.token
}
l._token = nil
l._channel = TokenDefaultChannel
l.TokenStartCharIndex = l._input.Index()
l.TokenStartColumn = l.Interpreter.column
l.TokenStartLine = l.Interpreter.line
l._text = ""
b.token = nil
b.channel = TokenDefaultChannel
b.TokenStartCharIndex = b.input.Index()
b.TokenStartColumn = b.Interpreter.column
b.TokenStartLine = b.Interpreter.line
b.text = ""
var continueOuter = false
for true {
l._type = TokenInvalidType
b.thetype = TokenInvalidType
var ttype = LexerSkip
ttype = l.safeMatch()
ttype = b.safeMatch()
if l._input.LA(1) == TokenEOF {
l._hitEOF = true
if b.input.LA(1) == TokenEOF {
b.hitEOF = true
}
if l._type == TokenInvalidType {
l._type = ttype
if b.thetype == TokenInvalidType {
b.thetype = ttype
}
if l._type == LexerSkip {
if b.thetype == LexerSkip {
continueOuter = true
break
}
if l._type != LexerMore {
if b.thetype != LexerMore {
break
}
if PortDebug {
@ -227,10 +226,10 @@ func (l *BaseLexer) NextToken() Token {
if continueOuter {
continue
}
if l._token == nil {
l.emit()
if b.token == nil {
b.emit()
}
return l._token
return b.token
}
return nil
@ -238,52 +237,48 @@ func (l *BaseLexer) NextToken() Token {
// Instruct the lexer to Skip creating a token for current lexer rule
// and look for another token. NextToken() knows to keep looking when
// a lexer rule finishes with token set to SKIP_TOKEN. Recall that
// a lexer rule finishes with token set to SKIPTOKEN. Recall that
// if token==nil at end of any token rule, it creates one for you
// and emits it.
// /
func (l *BaseLexer) Skip() {
l._type = LexerSkip
func (b *BaseLexer) Skip() {
b.thetype = LexerSkip
}
func (l *BaseLexer) More() {
l._type = LexerMore
func (b *BaseLexer) More() {
b.thetype = LexerMore
}
func (l *BaseLexer) mode(m int) {
l._mode = m
}
func (l *BaseLexer) pushMode(m int) {
func (b *BaseLexer) pushMode(m int) {
if LexerATNSimulatorDebug {
fmt.Println("pushMode " + strconv.Itoa(m))
}
l._modeStack.Push(l._mode)
l.mode(m)
b.modeStack.Push(b.mode)
b.mode = m
}
func (l *BaseLexer) popMode() int {
if len(l._modeStack) == 0 {
func (b *BaseLexer) popMode() int {
if len(b.modeStack) == 0 {
panic("Empty Stack")
}
if LexerATNSimulatorDebug {
fmt.Println("popMode back to " + fmt.Sprint(l._modeStack[0:len(l._modeStack)-1]))
fmt.Println("popMode back to " + fmt.Sprint(b.modeStack[0:len(b.modeStack)-1]))
}
i, _ := l._modeStack.Pop()
l.mode(i)
return l._mode
i, _ := b.modeStack.Pop()
b.mode = i
return b.mode
}
func (l *BaseLexer) inputStream() CharStream {
return l._input
func (b *BaseLexer) inputStream() CharStream {
return b.input
}
func (l *BaseLexer) setInputStream(input CharStream) {
l._input = nil
l._tokenFactorySourcePair = &TokenSourceCharStreamPair{l, l._input}
l.reset()
l._input = input
l._tokenFactorySourcePair = &TokenSourceCharStreamPair{l, l._input}
func (b *BaseLexer) setInputStream(input CharStream) {
b.input = nil
b.tokenFactorySourcePair = &TokenSourceCharStreamPair{b, b.input}
b.reset()
b.input = input
b.tokenFactorySourcePair = &TokenSourceCharStreamPair{b, b.input}
}
// By default does not support multiple emits per NextToken invocation
@ -291,8 +286,8 @@ func (l *BaseLexer) setInputStream(input CharStream) {
// and GetToken (to push tokens into a list and pull from that list
// rather than a single variable as l implementation does).
// /
func (l *BaseLexer) emitToken(token Token) {
l._token = token
func (b *BaseLexer) emitToken(token Token) {
b.token = token
}
// The standard method called to automatically emit a token at the
@ -301,94 +296,94 @@ func (l *BaseLexer) emitToken(token Token) {
// use that to set the token's text. Override l method to emit
// custom Token objects or provide a Newfactory.
// /
func (l *BaseLexer) emit() Token {
func (b *BaseLexer) emit() Token {
if PortDebug {
fmt.Println("emit")
}
var t = l._factory.Create(l._tokenFactorySourcePair, l._type, l._text, l._channel, l.TokenStartCharIndex, l.getCharIndex()-1, l.TokenStartLine, l.TokenStartColumn)
l.emitToken(t)
var t = b.factory.Create(b.tokenFactorySourcePair, b.thetype, b.text, b.channel, b.TokenStartCharIndex, b.getCharIndex()-1, b.TokenStartLine, b.TokenStartColumn)
b.emitToken(t)
return t
}
func (l *BaseLexer) emitEOF() Token {
cpos := l.GetCharPositionInLine()
lpos := l.GetLine()
func (b *BaseLexer) emitEOF() Token {
cpos := b.GetCharPositionInLine()
lpos := b.GetLine()
if PortDebug {
fmt.Println("emitEOF")
}
var eof = l._factory.Create(l._tokenFactorySourcePair, TokenEOF, "", TokenDefaultChannel, l._input.Index(), l._input.Index()-1, lpos, cpos)
l.emitToken(eof)
var eof = b.factory.Create(b.tokenFactorySourcePair, TokenEOF, "", TokenDefaultChannel, b.input.Index(), b.input.Index()-1, lpos, cpos)
b.emitToken(eof)
return eof
}
func (l *BaseLexer) GetCharPositionInLine() int {
return l.Interpreter.column
func (b *BaseLexer) GetCharPositionInLine() int {
return b.Interpreter.column
}
func (l *BaseLexer) GetLine() int {
return l.Interpreter.line
func (b *BaseLexer) GetLine() int {
return b.Interpreter.line
}
func (l *BaseLexer) getType() int {
return l._type
func (b *BaseLexer) getType() int {
return b.thetype
}
func (l *BaseLexer) setType(t int) {
l._type = t
func (b *BaseLexer) setType(t int) {
b.thetype = t
}
// What is the index of the current character of lookahead?///
func (l *BaseLexer) getCharIndex() int {
return l._input.Index()
func (b *BaseLexer) getCharIndex() int {
return b.input.Index()
}
// Return the text Matched so far for the current token or any text override.
//Set the complete text of l token it wipes any previous changes to the text.
func (l *BaseLexer) GetText() string {
if l._text != "" {
return l._text
} else {
return l.Interpreter.GetText(l._input)
func (b *BaseLexer) GetText() string {
if b.text != "" {
return b.text
}
return b.Interpreter.GetText(b.input)
}
func (l *BaseLexer) SetText(text string) {
l._text = text
func (b *BaseLexer) SetText(text string) {
b.text = text
}
func (this *BaseLexer) GetATN() *ATN {
return this.Interpreter.atn
func (b *BaseLexer) GetATN() *ATN {
return b.Interpreter.atn
}
// Return a list of all Token objects in input char stream.
// Forces load of all tokens. Does not include EOF token.
// /
func (l *BaseLexer) getAllTokens() []Token {
func (b *BaseLexer) getAllTokens() []Token {
if PortDebug {
fmt.Println("getAllTokens")
}
var tokens = make([]Token, 0)
var t = l.NextToken()
var t = b.NextToken()
for t.GetTokenType() != TokenEOF {
tokens = append(tokens, t)
if PortDebug {
fmt.Println("getAllTokens")
}
t = l.NextToken()
t = b.NextToken()
}
return tokens
}
func (l *BaseLexer) notifyListeners(e RecognitionException) {
var start = l.TokenStartCharIndex
var stop = l._input.Index()
var text = l._input.GetTextFromInterval(NewInterval(start, stop))
func (b *BaseLexer) notifyListeners(e RecognitionException) {
var start = b.TokenStartCharIndex
var stop = b.input.Index()
var text = b.input.GetTextFromInterval(NewInterval(start, stop))
var msg = "token recognition error at: '" + text + "'"
var listener = l.GetErrorListenerDispatch()
listener.SyntaxError(l, nil, l.TokenStartLine, l.TokenStartColumn, msg, e)
var listener = b.GetErrorListenerDispatch()
listener.SyntaxError(b, nil, b.TokenStartLine, b.TokenStartColumn, msg, e)
}
func (l *BaseLexer) getErrorDisplayForChar(c rune) string {
func (b *BaseLexer) getErrorDisplayForChar(c rune) string {
if c == TokenEOF {
return "<EOF>"
} else if c == '\n' {
@ -402,8 +397,8 @@ func (l *BaseLexer) getErrorDisplayForChar(c rune) string {
}
}
func (l *BaseLexer) getCharErrorDisplay(c rune) string {
return "'" + l.getErrorDisplayForChar(c) + "'"
func (b *BaseLexer) getCharErrorDisplay(c rune) string {
return "'" + b.getErrorDisplayForChar(c) + "'"
}
// Lexers can normally Match any char in it's vocabulary after Matching
@ -411,14 +406,14 @@ func (l *BaseLexer) getCharErrorDisplay(c rune) string {
// it all works out. You can instead use the rule invocation stack
// to do sophisticated error recovery if you are in a fragment rule.
// /
func (l *BaseLexer) Recover(re RecognitionException) {
if l._input.LA(1) != TokenEOF {
func (b *BaseLexer) Recover(re RecognitionException) {
if b.input.LA(1) != TokenEOF {
if _, ok := re.(*LexerNoViableAltException); ok {
// Skip a char and try again
l.Interpreter.consume(l._input)
b.Interpreter.consume(b.input)
} else {
// TODO: Do we lose character or line position information?
l._input.Consume()
b.input.Consume()
}
}
}

View File

@ -35,30 +35,30 @@ func NewBaseLexerAction(action int) *BaseLexerAction {
return la
}
func (this *BaseLexerAction) execute(lexer Lexer) {
func (b *BaseLexerAction) execute(lexer Lexer) {
panic("Not implemented")
}
func (this *BaseLexerAction) getActionType() int {
return this.actionType
func (b *BaseLexerAction) getActionType() int {
return b.actionType
}
func (this *BaseLexerAction) getIsPositionDependent() bool {
return this.isPositionDependent
func (b *BaseLexerAction) getIsPositionDependent() bool {
return b.isPositionDependent
}
func (this *BaseLexerAction) Hash() string {
return strconv.Itoa(this.actionType)
func (b *BaseLexerAction) Hash() string {
return strconv.Itoa(b.actionType)
}
func (this *BaseLexerAction) equals(other LexerAction) bool {
return this == other
func (b *BaseLexerAction) equals(other LexerAction) bool {
return b == other
}
//
// Implements the {@code Skip} lexer action by calling {@link Lexer//Skip}.
//
// <p>The {@code Skip} command does not have any parameters, so this action is
// <p>The {@code Skip} command does not have any parameters, so l action is
// implemented as a singleton instance exposed by {@link //INSTANCE}.</p>
type LexerSkipAction struct {
*BaseLexerAction
@ -70,14 +70,14 @@ func NewLexerSkipAction() *LexerSkipAction {
return la
}
// Provides a singleton instance of this parameterless lexer action.
// Provides a singleton instance of l parameterless lexer action.
var LexerSkipActionINSTANCE = NewLexerSkipAction()
func (this *LexerSkipAction) execute(lexer Lexer) {
func (l *LexerSkipAction) execute(lexer Lexer) {
lexer.Skip()
}
func (this *LexerSkipAction) String() string {
func (l *LexerSkipAction) String() string {
return "skip"
}
@ -86,36 +86,36 @@ func (this *LexerSkipAction) String() string {
type LexerTypeAction struct {
*BaseLexerAction
_type int
thetype int
}
func NewLexerTypeAction(_type int) *LexerTypeAction {
this := new(LexerTypeAction)
this.BaseLexerAction = NewBaseLexerAction(LexerActionTypeType)
this._type = _type
return this
func NewLexerTypeAction(thetype int) *LexerTypeAction {
l := new(LexerTypeAction)
l.BaseLexerAction = NewBaseLexerAction(LexerActionTypeType)
l.thetype = thetype
return l
}
func (this *LexerTypeAction) execute(lexer Lexer) {
lexer.setType(this._type)
func (l *LexerTypeAction) execute(lexer Lexer) {
lexer.setType(l.thetype)
}
func (this *LexerTypeAction) Hash() string {
return strconv.Itoa(this.actionType) + strconv.Itoa(this._type)
func (l *LexerTypeAction) Hash() string {
return strconv.Itoa(l.actionType) + strconv.Itoa(l.thetype)
}
func (this *LexerTypeAction) equals(other LexerAction) bool {
if this == other {
func (l *LexerTypeAction) equals(other LexerAction) bool {
if l == other {
return true
} else if _, ok := other.(*LexerTypeAction); !ok {
return false
} else {
return this._type == other.(*LexerTypeAction)._type
return l.thetype == other.(*LexerTypeAction).thetype
}
}
func (this *LexerTypeAction) String() string {
return "actionType(" + strconv.Itoa(this._type) + ")"
func (l *LexerTypeAction) String() string {
return "actionType(" + strconv.Itoa(l.thetype) + ")"
}
// Implements the {@code pushMode} lexer action by calling
@ -128,40 +128,40 @@ type LexerPushModeAction struct {
func NewLexerPushModeAction(mode int) *LexerPushModeAction {
this := new(LexerPushModeAction)
this.BaseLexerAction = NewBaseLexerAction(LexerActionTypePushMode)
l := new(LexerPushModeAction)
l.BaseLexerAction = NewBaseLexerAction(LexerActionTypePushMode)
this.mode = mode
return this
l.mode = mode
return l
}
// <p>This action is implemented by calling {@link Lexer//pushMode} with the
// value provided by {@link //getMode}.</p>
func (this *LexerPushModeAction) execute(lexer Lexer) {
lexer.pushMode(this.mode)
func (l *LexerPushModeAction) execute(lexer Lexer) {
lexer.pushMode(l.mode)
}
func (this *LexerPushModeAction) Hash() string {
return strconv.Itoa(this.actionType) + strconv.Itoa(this.mode)
func (l *LexerPushModeAction) Hash() string {
return strconv.Itoa(l.actionType) + strconv.Itoa(l.mode)
}
func (this *LexerPushModeAction) equals(other LexerAction) bool {
if this == other {
func (l *LexerPushModeAction) equals(other LexerAction) bool {
if l == other {
return true
} else if _, ok := other.(*LexerPushModeAction); !ok {
return false
} else {
return this.mode == other.(*LexerPushModeAction).mode
return l.mode == other.(*LexerPushModeAction).mode
}
}
func (this *LexerPushModeAction) String() string {
return "pushMode(" + strconv.Itoa(this.mode) + ")"
func (l *LexerPushModeAction) String() string {
return "pushMode(" + strconv.Itoa(l.mode) + ")"
}
// Implements the {@code popMode} lexer action by calling {@link Lexer//popMode}.
//
// <p>The {@code popMode} command does not have any parameters, so this action is
// <p>The {@code popMode} command does not have any parameters, so l action is
// implemented as a singleton instance exposed by {@link //INSTANCE}.</p>
type LexerPopModeAction struct {
*BaseLexerAction
@ -169,27 +169,27 @@ type LexerPopModeAction struct {
func NewLexerPopModeAction() *LexerPopModeAction {
this := new(LexerPopModeAction)
l := new(LexerPopModeAction)
this.BaseLexerAction = NewBaseLexerAction(LexerActionTypePopMode)
l.BaseLexerAction = NewBaseLexerAction(LexerActionTypePopMode)
return this
return l
}
var LexerPopModeActionINSTANCE = NewLexerPopModeAction()
// <p>This action is implemented by calling {@link Lexer//popMode}.</p>
func (this *LexerPopModeAction) execute(lexer Lexer) {
func (l *LexerPopModeAction) execute(lexer Lexer) {
lexer.popMode()
}
func (this *LexerPopModeAction) String() string {
func (l *LexerPopModeAction) String() string {
return "popMode"
}
// Implements the {@code more} lexer action by calling {@link Lexer//more}.
//
// <p>The {@code more} command does not have any parameters, so this action is
// <p>The {@code more} command does not have any parameters, so l action is
// implemented as a singleton instance exposed by {@link //INSTANCE}.</p>
type LexerMoreAction struct {
@ -197,20 +197,20 @@ type LexerMoreAction struct {
}
func NewLexerMoreAction() *LexerModeAction {
this := new(LexerModeAction)
this.BaseLexerAction = NewBaseLexerAction(LexerActionTypeMore)
l := new(LexerModeAction)
l.BaseLexerAction = NewBaseLexerAction(LexerActionTypeMore)
return this
return l
}
var LexerMoreActionINSTANCE = NewLexerMoreAction()
// <p>This action is implemented by calling {@link Lexer//popMode}.</p>
func (this *LexerMoreAction) execute(lexer Lexer) {
func (l *LexerMoreAction) execute(lexer Lexer) {
lexer.More()
}
func (this *LexerMoreAction) String() string {
func (l *LexerMoreAction) String() string {
return "more"
}
@ -223,34 +223,34 @@ type LexerModeAction struct {
}
func NewLexerModeAction(mode int) *LexerModeAction {
this := new(LexerModeAction)
this.BaseLexerAction = NewBaseLexerAction(LexerActionTypeMode)
this.mode = mode
return this
l := new(LexerModeAction)
l.BaseLexerAction = NewBaseLexerAction(LexerActionTypeMode)
l.mode = mode
return l
}
// <p>This action is implemented by calling {@link Lexer//mode} with the
// value provided by {@link //getMode}.</p>
func (this *LexerModeAction) execute(lexer Lexer) {
lexer.mode(this.mode)
func (l *LexerModeAction) execute(lexer Lexer) {
lexer.mode(l.mode)
}
func (this *LexerModeAction) Hash() string {
return strconv.Itoa(this.actionType) + strconv.Itoa(this.mode)
func (l *LexerModeAction) Hash() string {
return strconv.Itoa(l.actionType) + strconv.Itoa(l.mode)
}
func (this *LexerModeAction) equals(other LexerAction) bool {
if this == other {
func (l *LexerModeAction) equals(other LexerAction) bool {
if l == other {
return true
} else if _, ok := other.(*LexerModeAction); !ok {
return false
} else {
return this.mode == other.(*LexerModeAction).mode
return l.mode == other.(*LexerModeAction).mode
}
}
func (this *LexerModeAction) String() string {
return "mode(" + strconv.Itoa(this.mode) + ")"
func (l *LexerModeAction) String() string {
return "mode(" + strconv.Itoa(l.mode) + ")"
}
// Executes a custom lexer action by calling {@link Recognizer//action} with the
@ -276,31 +276,31 @@ type LexerCustomAction struct {
}
func NewLexerCustomAction(ruleIndex, actionIndex int) *LexerCustomAction {
this := new(LexerCustomAction)
this.BaseLexerAction = NewBaseLexerAction(LexerActionTypeCustom)
this.ruleIndex = ruleIndex
this.actionIndex = actionIndex
this.isPositionDependent = true
return this
l := new(LexerCustomAction)
l.BaseLexerAction = NewBaseLexerAction(LexerActionTypeCustom)
l.ruleIndex = ruleIndex
l.actionIndex = actionIndex
l.isPositionDependent = true
return l
}
// <p>Custom actions are implemented by calling {@link Lexer//action} with the
// appropriate rule and action indexes.</p>
func (this *LexerCustomAction) execute(lexer Lexer) {
lexer.Action(nil, this.ruleIndex, this.actionIndex)
func (l *LexerCustomAction) execute(lexer Lexer) {
lexer.Action(nil, l.ruleIndex, l.actionIndex)
}
func (this *LexerCustomAction) Hash() string {
return strconv.Itoa(this.actionType) + strconv.Itoa(this.ruleIndex) + strconv.Itoa(this.actionIndex)
func (l *LexerCustomAction) Hash() string {
return strconv.Itoa(l.actionType) + strconv.Itoa(l.ruleIndex) + strconv.Itoa(l.actionIndex)
}
func (this *LexerCustomAction) equals(other LexerAction) bool {
if this == other {
func (l *LexerCustomAction) equals(other LexerAction) bool {
if l == other {
return true
} else if _, ok := other.(*LexerCustomAction); !ok {
return false
} else {
return this.ruleIndex == other.(*LexerCustomAction).ruleIndex && this.actionIndex == other.(*LexerCustomAction).actionIndex
return l.ruleIndex == other.(*LexerCustomAction).ruleIndex && l.actionIndex == other.(*LexerCustomAction).actionIndex
}
}
@ -315,34 +315,34 @@ type LexerChannelAction struct {
}
func NewLexerChannelAction(channel int) *LexerChannelAction {
this := new(LexerChannelAction)
this.BaseLexerAction = NewBaseLexerAction(LexerActionTypeChannel)
this.channel = channel
return this
l := new(LexerChannelAction)
l.BaseLexerAction = NewBaseLexerAction(LexerActionTypeChannel)
l.channel = channel
return l
}
// <p>This action is implemented by calling {@link Lexer//setChannel} with the
// value provided by {@link //getChannel}.</p>
func (this *LexerChannelAction) execute(lexer Lexer) {
lexer.setChannel(this.channel)
func (l *LexerChannelAction) execute(lexer Lexer) {
lexer.setChannel(l.channel)
}
func (this *LexerChannelAction) Hash() string {
return strconv.Itoa(this.actionType) + strconv.Itoa(this.channel)
func (l *LexerChannelAction) Hash() string {
return strconv.Itoa(l.actionType) + strconv.Itoa(l.channel)
}
func (this *LexerChannelAction) equals(other LexerAction) bool {
if this == other {
func (l *LexerChannelAction) equals(other LexerAction) bool {
if l == other {
return true
} else if _, ok := other.(*LexerChannelAction); !ok {
return false
} else {
return this.channel == other.(*LexerChannelAction).channel
return l.channel == other.(*LexerChannelAction).channel
}
}
func (this *LexerChannelAction) String() string {
return "channel(" + strconv.Itoa(this.channel) + ")"
func (l *LexerChannelAction) String() string {
return "channel(" + strconv.Itoa(l.channel) + ")"
}
// This implementation of {@link LexerAction} is used for tracking input offsets
@ -375,33 +375,33 @@ type LexerIndexedCustomAction struct {
func NewLexerIndexedCustomAction(offset int, lexerAction LexerAction) *LexerIndexedCustomAction {
this := new(LexerIndexedCustomAction)
this.BaseLexerAction = NewBaseLexerAction(lexerAction.getActionType())
l := new(LexerIndexedCustomAction)
l.BaseLexerAction = NewBaseLexerAction(lexerAction.getActionType())
this.offset = offset
this.lexerAction = lexerAction
this.isPositionDependent = true
l.offset = offset
l.lexerAction = lexerAction
l.isPositionDependent = true
return this
return l
}
// <p>This method calls {@link //execute} on the result of {@link //getAction}
// using the provided {@code lexer}.</p>
func (this *LexerIndexedCustomAction) execute(lexer Lexer) {
func (l *LexerIndexedCustomAction) execute(lexer Lexer) {
// assume the input stream position was properly set by the calling code
this.lexerAction.execute(lexer)
l.lexerAction.execute(lexer)
}
func (this *LexerIndexedCustomAction) Hash() string {
return strconv.Itoa(this.actionType) + strconv.Itoa(this.offset) + this.lexerAction.Hash()
func (l *LexerIndexedCustomAction) Hash() string {
return strconv.Itoa(l.actionType) + strconv.Itoa(l.offset) + l.lexerAction.Hash()
}
func (this *LexerIndexedCustomAction) equals(other LexerAction) bool {
if this == other {
func (l *LexerIndexedCustomAction) equals(other LexerAction) bool {
if l == other {
return true
} else if _, ok := other.(*LexerIndexedCustomAction); !ok {
return false
} else {
return this.offset == other.(*LexerIndexedCustomAction).offset && this.lexerAction == other.(*LexerIndexedCustomAction).lexerAction
return l.offset == other.(*LexerIndexedCustomAction).offset && l.lexerAction == other.(*LexerIndexedCustomAction).lexerAction
}
}

View File

@ -18,9 +18,9 @@ func NewLexerActionExecutor(lexerActions []LexerAction) *LexerActionExecutor {
lexerActions = make([]LexerAction, 0)
}
this := new(LexerActionExecutor)
l := new(LexerActionExecutor)
this.lexerActions = lexerActions
l.lexerActions = lexerActions
// Caches the result of {@link //hashCode} since the hash code is an element
// of the performance-critical {@link LexerATNConfig//hashCode} operation.
@ -30,9 +30,9 @@ func NewLexerActionExecutor(lexerActions []LexerAction) *LexerActionExecutor {
s += a.Hash()
}
this.cachedHashString = s // "".join([str(la) for la in
l.cachedHashString = s // "".join([str(la) for la in
return this
return l
}
// Creates a {@link LexerActionExecutor} which executes the actions for
@ -87,30 +87,30 @@ func LexerActionExecutorappend(lexerActionExecutor *LexerActionExecutor, lexerAc
// @return A {@link LexerActionExecutor} which stores input stream offsets
// for all position-dependent lexer actions.
// /
func (this *LexerActionExecutor) fixOffsetBeforeMatch(offset int) *LexerActionExecutor {
var updatedLexerActions []LexerAction = nil
for i := 0; i < len(this.lexerActions); i++ {
_, ok := this.lexerActions[i].(*LexerIndexedCustomAction)
if this.lexerActions[i].getIsPositionDependent() && !ok {
func (l *LexerActionExecutor) fixOffsetBeforeMatch(offset int) *LexerActionExecutor {
var updatedLexerActions []LexerAction
for i := 0; i < len(l.lexerActions); i++ {
_, ok := l.lexerActions[i].(*LexerIndexedCustomAction)
if l.lexerActions[i].getIsPositionDependent() && !ok {
if updatedLexerActions == nil {
updatedLexerActions = make([]LexerAction, 0)
for _, a := range this.lexerActions {
for _, a := range l.lexerActions {
updatedLexerActions = append(updatedLexerActions, a)
}
}
updatedLexerActions[i] = NewLexerIndexedCustomAction(offset, this.lexerActions[i])
updatedLexerActions[i] = NewLexerIndexedCustomAction(offset, l.lexerActions[i])
}
}
if updatedLexerActions == nil {
return this
} else {
return NewLexerActionExecutor(updatedLexerActions)
return l
}
return NewLexerActionExecutor(updatedLexerActions)
}
// Execute the actions encapsulated by this executor within the context of a
// Execute the actions encapsulated by l executor within the context of a
// particular {@link Lexer}.
//
// <p>This method calls {@link IntStream//seek} to set the position of the
@ -121,14 +121,14 @@ func (this *LexerActionExecutor) fixOffsetBeforeMatch(offset int) *LexerActionEx
//
// @param lexer The lexer instance.
// @param input The input stream which is the source for the current token.
// When this method is called, the current {@link IntStream//index} for
// When l method is called, the current {@link IntStream//index} for
// {@code input} should be the start of the following token, i.e. 1
// character past the end of the current token.
// @param startIndex The token start index. This value may be passed to
// {@link IntStream//seek} to set the {@code input} position to the beginning
// of the token.
// /
func (this *LexerActionExecutor) execute(lexer Lexer, input CharStream, startIndex int) {
func (l *LexerActionExecutor) execute(lexer Lexer, input CharStream, startIndex int) {
var requiresSeek = false
var stopIndex = input.Index()
@ -138,8 +138,8 @@ func (this *LexerActionExecutor) execute(lexer Lexer, input CharStream, startInd
}
}()
for i := 0; i < len(this.lexerActions); i++ {
var lexerAction LexerAction = this.lexerActions[i]
for i := 0; i < len(l.lexerActions); i++ {
var lexerAction = l.lexerActions[i]
if la, ok := lexerAction.(*LexerIndexedCustomAction); ok {
var offset = la.offset
input.Seek(startIndex + offset)
@ -153,17 +153,17 @@ func (this *LexerActionExecutor) execute(lexer Lexer, input CharStream, startInd
}
}
func (this *LexerActionExecutor) Hash() string {
return this.cachedHashString
func (l *LexerActionExecutor) Hash() string {
return l.cachedHashString
}
func (this *LexerActionExecutor) equals(other interface{}) bool {
if this == other {
func (l *LexerActionExecutor) equals(other interface{}) bool {
if l == other {
return true
} else if _, ok := other.(*LexerActionExecutor); !ok {
return false
} else {
return this.cachedHashString == other.(*LexerActionExecutor).cachedHashString &&
&this.lexerActions == &other.(*LexerActionExecutor).lexerActions
return l.cachedHashString == other.(*LexerActionExecutor).cachedHashString &&
&l.lexerActions == &other.(*LexerActionExecutor).lexerActions
}
}

View File

@ -37,14 +37,14 @@ type SimState struct {
func NewSimState() *SimState {
this := new(SimState)
resetSimState(this)
return this
s := new(SimState)
resetSimState(s)
return s
}
func (this *SimState) reset() {
resetSimState(this)
func (s *SimState) reset() {
resetSimState(s)
}
type LexerATNSimulator struct {
@ -52,65 +52,65 @@ type LexerATNSimulator struct {
recog Lexer
predictionMode int
DecisionToDFA []*DFA
DecisionToDFA []*DFA
mergeCache DoubleDict
startIndex int
line int
column int
mode int
prevAccept *SimState
Match_calls int
MatchCalls int
}
func NewLexerATNSimulator(recog Lexer, atn *ATN, decisionToDFA []*DFA, sharedContextCache *PredictionContextCache) *LexerATNSimulator {
this := new(LexerATNSimulator)
l := new(LexerATNSimulator)
this.BaseATNSimulator = NewBaseATNSimulator(atn, sharedContextCache)
l.BaseATNSimulator = NewBaseATNSimulator(atn, sharedContextCache)
this.DecisionToDFA = decisionToDFA
this.recog = recog
l.DecisionToDFA = decisionToDFA
l.recog = recog
// The current token's starting index into the character stream.
// Shared across DFA to ATN simulation in case the ATN fails and the
// DFA did not have a previous accept state. In this case, we use the
// DFA did not have a previous accept state. In l case, we use the
// ATN-generated exception object.
this.startIndex = -1
l.startIndex = -1
// line number 1..n within the input///
this.line = 1
l.line = 1
// The index of the character relative to the beginning of the line
// 0..n-1///
this.column = 0
this.mode = LexerDefaultMode
l.column = 0
l.mode = LexerDefaultMode
// Used during DFA/ATN exec to record the most recent accept configuration
// info
this.prevAccept = NewSimState()
l.prevAccept = NewSimState()
// done
return this
return l
}
var LexerATNSimulatorDebug = false
var LexerATNSimulatorDFADebug = false
var LexerATNSimulatorMIN_DFA_EDGE = 0
var LexerATNSimulatorMAX_DFA_EDGE = 127 // forces unicode to stay in ATN
var LexerATNSimulatorMinDFAEdge = 0
var LexerATNSimulatorMaxDFAEdge = 127 // forces unicode to stay in ATN
var LexerATNSimulatorMatch_calls = 0
var LexerATNSimulatorMatchCalls = 0
func (this *LexerATNSimulator) copyState(simulator *LexerATNSimulator) {
this.column = simulator.column
this.line = simulator.line
this.mode = simulator.mode
this.startIndex = simulator.startIndex
func (l *LexerATNSimulator) copyState(simulator *LexerATNSimulator) {
l.column = simulator.column
l.line = simulator.line
l.mode = simulator.mode
l.startIndex = simulator.startIndex
}
func (this *LexerATNSimulator) Match(input CharStream, mode int) int {
func (l *LexerATNSimulator) Match(input CharStream, mode int) int {
if PortDebug {
fmt.Println("Match")
}
this.Match_calls += 1
this.mode = mode
l.MatchCalls++
l.mode = mode
var mark = input.Mark()
defer func() {
@ -120,65 +120,66 @@ func (this *LexerATNSimulator) Match(input CharStream, mode int) int {
input.Release(mark)
}()
this.startIndex = input.Index()
this.prevAccept.reset()
l.startIndex = input.Index()
l.prevAccept.reset()
var dfa = this.DecisionToDFA[mode]
var dfa = l.DecisionToDFA[mode]
if dfa.s0 == nil {
if PortDebug {
fmt.Println("MatchATN")
}
return this.MatchATN(input)
} else {
if PortDebug {
fmt.Println("execATN")
}
return this.execATN(input, dfa.s0)
return l.MatchATN(input)
}
if PortDebug {
fmt.Println("execATN")
}
return l.execATN(input, dfa.s0)
}
func (this *LexerATNSimulator) reset() {
this.prevAccept.reset()
this.startIndex = -1
this.line = 1
this.column = 0
this.mode = LexerDefaultMode
func (l *LexerATNSimulator) reset() {
l.prevAccept.reset()
l.startIndex = -1
l.line = 1
l.column = 0
l.mode = LexerDefaultMode
}
func (this *LexerATNSimulator) MatchATN(input CharStream) int {
var startState = this.atn.modeToStartState[this.mode]
func (l *LexerATNSimulator) MatchATN(input CharStream) int {
var startState = l.atn.modeToStartState[l.mode]
if LexerATNSimulatorDebug {
fmt.Println("MatchATN mode " + strconv.Itoa(this.mode) + " start: " + startState.String())
fmt.Println("MatchATN mode " + strconv.Itoa(l.mode) + " start: " + startState.String())
}
var old_mode = this.mode
var s0_closure = this.computeStartState(input, startState)
var suppressEdge = s0_closure.hasSemanticContext
s0_closure.hasSemanticContext = false
var oldMode = l.mode
var s0Closure = l.computeStartState(input, startState)
var suppressEdge = s0Closure.hasSemanticContext
s0Closure.hasSemanticContext = false
var next = this.addDFAState(s0_closure)
var next = l.addDFAState(s0Closure)
if !suppressEdge {
this.DecisionToDFA[this.mode].s0 = next
l.DecisionToDFA[l.mode].s0 = next
}
var predict = this.execATN(input, next)
var predict = l.execATN(input, next)
if LexerATNSimulatorDebug {
fmt.Println("DFA after MatchATN: " + this.DecisionToDFA[old_mode].ToLexerString())
fmt.Println("DFA after MatchATN: " + l.DecisionToDFA[oldMode].ToLexerString())
}
return predict
}
func (this *LexerATNSimulator) execATN(input CharStream, ds0 *DFAState) int {
func (l *LexerATNSimulator) execATN(input CharStream, ds0 *DFAState) int {
if LexerATNSimulatorDebug {
fmt.Println("start state closure=" + ds0.configs.String())
}
if ds0.isAcceptState {
// allow zero-length tokens
this.captureSimState(this.prevAccept, input, ds0)
l.captureSimState(l.prevAccept, input, ds0)
}
var t = input.LA(1)
var s = ds0 // s is current/from DFA state
@ -206,26 +207,26 @@ func (this *LexerATNSimulator) execATN(input CharStream, ds0 *DFAState) int {
// A character will take us back to an existing DFA state
// that already has lots of edges out of it. e.g., .* in comments.
// print("Target for:" + str(s) + " and:" + str(t))
var target = this.getExistingTargetState(s, t)
// if PortDebug {
// fmt.Println(target)
// }
var target = l.getExistingTargetState(s, t)
// if PortDebug {
// fmt.Println(target)
// }
if target == nil {
target = this.computeTargetState(input, s, t)
target = l.computeTargetState(input, s, t)
// print("Computed:" + str(target))
}
if target == ATNSimulatorError {
break
}
// If this is a consumable input element, make sure to consume before
// If l is a consumable input element, make sure to consume before
// capturing the accept state so the input index, line, and char
// position accurately reflect the state of the interpreter at the
// end of the token.
if t != TokenEOF {
this.consume(input)
l.consume(input)
}
if target.isAcceptState {
this.captureSimState(this.prevAccept, input, target)
l.captureSimState(l.prevAccept, input, target)
if t == TokenEOF {
break
}
@ -237,24 +238,24 @@ func (this *LexerATNSimulator) execATN(input CharStream, ds0 *DFAState) int {
if PortDebug {
fmt.Println("DONE WITH execATN loop")
}
return this.failOrAccept(this.prevAccept, input, s.configs, t)
return l.failOrAccept(l.prevAccept, input, s.configs, t)
}
// Get an existing target state for an edge in the DFA. If the target state
// for the edge has not yet been computed or is otherwise not available,
// this method returns {@code nil}.
// l method returns {@code nil}.
//
// @param s The current DFA state
// @param t The next input symbol
// @return The existing target DFA state for the given input symbol
// {@code t}, or {@code nil} if the target state for this edge is not
// {@code t}, or {@code nil} if the target state for l edge is not
// already cached
func (this *LexerATNSimulator) getExistingTargetState(s *DFAState, t int) *DFAState {
if s.edges == nil || t < LexerATNSimulatorMIN_DFA_EDGE || t > LexerATNSimulatorMAX_DFA_EDGE {
func (l *LexerATNSimulator) getExistingTargetState(s *DFAState, t int) *DFAState {
if s.edges == nil || t < LexerATNSimulatorMinDFAEdge || t > LexerATNSimulatorMaxDFAEdge {
return nil
}
var target = s.edges[t-LexerATNSimulatorMIN_DFA_EDGE]
var target = s.edges[t-LexerATNSimulatorMinDFAEdge]
if target == nil {
target = nil
}
@ -272,51 +273,52 @@ func (this *LexerATNSimulator) getExistingTargetState(s *DFAState, t int) *DFASt
// @param t The next input symbol
//
// @return The computed target DFA state for the given input symbol
// {@code t}. If {@code t} does not lead to a valid DFA state, this method
// {@code t}. If {@code t} does not lead to a valid DFA state, l method
// returns {@link //ERROR}.
func (this *LexerATNSimulator) computeTargetState(input CharStream, s *DFAState, t int) *DFAState {
func (l *LexerATNSimulator) computeTargetState(input CharStream, s *DFAState, t int) *DFAState {
var reach = NewOrderedATNConfigSet()
// if we don't find an existing DFA state
// Fill reach starting from closure, following t transitions
this.getReachableConfigSet(input, s.configs, reach.BaseATNConfigSet, t)
l.getReachableConfigSet(input, s.configs, reach.BaseATNConfigSet, t)
if len(reach.configs) == 0 { // we got nowhere on t from s
if !reach.hasSemanticContext {
// we got nowhere on t, don't panic out this knowledge it'd
// we got nowhere on t, don't panic out l knowledge it'd
// cause a failover from DFA later.
this.addDFAEdge(s, t, ATNSimulatorError, nil)
l.addDFAEdge(s, t, ATNSimulatorError, nil)
}
// stop when we can't Match any more char
return ATNSimulatorError
}
// Add an edge from s to target DFA found/created for reach
return this.addDFAEdge(s, t, nil, reach.BaseATNConfigSet)
return l.addDFAEdge(s, t, nil, reach.BaseATNConfigSet)
}
func (this *LexerATNSimulator) failOrAccept(prevAccept *SimState, input CharStream, reach ATNConfigSet, t int) int {
if this.prevAccept.dfaState != nil {
func (l *LexerATNSimulator) failOrAccept(prevAccept *SimState, input CharStream, reach ATNConfigSet, t int) int {
if l.prevAccept.dfaState != nil {
var lexerActionExecutor = prevAccept.dfaState.lexerActionExecutor
this.accept(input, lexerActionExecutor, this.startIndex, prevAccept.index, prevAccept.line, prevAccept.column)
l.accept(input, lexerActionExecutor, l.startIndex, prevAccept.index, prevAccept.line, prevAccept.column)
if PortDebug {
fmt.Println(prevAccept.dfaState.prediction)
}
return prevAccept.dfaState.prediction
} else {
// if no accept and EOF is first char, return EOF
if t == TokenEOF && input.Index() == this.startIndex {
return TokenEOF
}
panic(NewLexerNoViableAltException(this.recog, input, this.startIndex, reach))
}
// if no accept and EOF is first char, return EOF
if t == TokenEOF && input.Index() == l.startIndex {
return TokenEOF
}
panic(NewLexerNoViableAltException(l.recog, input, l.startIndex, reach))
}
// Given a starting configuration set, figure out all ATN configurations
// we can reach upon input {@code t}. Parameter {@code reach} is a return
// parameter.
func (this *LexerATNSimulator) getReachableConfigSet(input CharStream, closure ATNConfigSet, reach ATNConfigSet, t int) {
// this is used to Skip processing for configs which have a lower priority
func (l *LexerATNSimulator) getReachableConfigSet(input CharStream, closure ATNConfigSet, reach ATNConfigSet, t int) {
// l is used to Skip processing for configs which have a lower priority
// than a config that already reached an accept state for the same rule
var SkipAlt = ATNInvalidAltNumber
@ -332,21 +334,21 @@ func (this *LexerATNSimulator) getReachableConfigSet(input CharStream, closure A
if LexerATNSimulatorDebug {
fmt.Printf("testing %s at %s\n", this.GetTokenName(t), cfg.String()) // this.recog, true))
fmt.Printf("testing %s at %s\n", l.GetTokenName(t), cfg.String()) // l.recog, true))
}
for _, trans := range cfg.GetState().GetTransitions() {
var target = this.getReachableTarget(trans, t)
var target = l.getReachableTarget(trans, t)
if target != nil {
var lexerActionExecutor = cfg.(*LexerATNConfig).lexerActionExecutor
if lexerActionExecutor != nil {
lexerActionExecutor = lexerActionExecutor.fixOffsetBeforeMatch(input.Index() - this.startIndex)
lexerActionExecutor = lexerActionExecutor.fixOffsetBeforeMatch(input.Index() - l.startIndex)
}
var treatEofAsEpsilon = (t == TokenEOF)
var treatEOFAsEpsilon = (t == TokenEOF)
var config = NewLexerATNConfig3(cfg.(*LexerATNConfig), target, lexerActionExecutor)
if this.closure(input, config, reach,
currentAltReachedAcceptState, true, treatEofAsEpsilon) {
// any remaining configs for this alt have a lower priority
if l.closure(input, config, reach,
currentAltReachedAcceptState, true, treatEOFAsEpsilon) {
// any remaining configs for l alt have a lower priority
// than the one that just reached an accept state.
SkipAlt = cfg.GetAlt()
}
@ -355,28 +357,28 @@ func (this *LexerATNSimulator) getReachableConfigSet(input CharStream, closure A
}
}
func (this *LexerATNSimulator) accept(input CharStream, lexerActionExecutor *LexerActionExecutor, startIndex, index, line, charPos int) {
func (l *LexerATNSimulator) accept(input CharStream, lexerActionExecutor *LexerActionExecutor, startIndex, index, line, charPos int) {
if LexerATNSimulatorDebug {
fmt.Printf("ACTION %s\n", lexerActionExecutor)
}
// seek to after last char in token
input.Seek(index)
this.line = line
this.column = charPos
if lexerActionExecutor != nil && this.recog != nil {
lexerActionExecutor.execute(this.recog, input, startIndex)
l.line = line
l.column = charPos
if lexerActionExecutor != nil && l.recog != nil {
lexerActionExecutor.execute(l.recog, input, startIndex)
}
}
func (this *LexerATNSimulator) getReachableTarget(trans Transition, t int) ATNState {
func (l *LexerATNSimulator) getReachableTarget(trans Transition, t int) ATNState {
if trans.Matches(t, 0, 0xFFFE) {
return trans.getTarget()
} else {
return nil
}
return nil
}
func (this *LexerATNSimulator) computeStartState(input CharStream, p ATNState) *OrderedATNConfigSet {
func (l *LexerATNSimulator) computeStartState(input CharStream, p ATNState) *OrderedATNConfigSet {
if PortDebug {
fmt.Println("Num transitions" + strconv.Itoa(len(p.GetTransitions())))
@ -386,33 +388,33 @@ func (this *LexerATNSimulator) computeStartState(input CharStream, p ATNState) *
for i := 0; i < len(p.GetTransitions()); i++ {
var target = p.GetTransitions()[i].getTarget()
var cfg = NewLexerATNConfig6(target, i+1, BasePredictionContextEMPTY)
this.closure(input, cfg, configs, false, false, false)
l.closure(input, cfg, configs, false, false, false)
}
return configs
}
// Since the alternatives within any lexer decision are ordered by
// preference, this method stops pursuing the closure as soon as an accept
// preference, l method stops pursuing the closure as soon as an accept
// state is reached. After the first accept state is reached by depth-first
// search from {@code config}, all other (potentially reachable) states for
// this rule would have a lower priority.
// l rule would have a lower priority.
//
// @return {@code true} if an accept state is reached, otherwise
// {@code false}.
func (this *LexerATNSimulator) closure(input CharStream, config *LexerATNConfig, configs ATNConfigSet,
currentAltReachedAcceptState, speculative, treatEofAsEpsilon bool) bool {
func (l *LexerATNSimulator) closure(input CharStream, config *LexerATNConfig, configs ATNConfigSet,
currentAltReachedAcceptState, speculative, treatEOFAsEpsilon bool) bool {
if LexerATNSimulatorDebug {
fmt.Println("closure(" + config.String() + ")") // config.String(this.recog, true) + ")")
fmt.Println("closure(" + config.String() + ")") // config.String(l.recog, true) + ")")
}
_, ok := config.state.(*RuleStopState)
if ok {
if LexerATNSimulatorDebug {
if this.recog != nil {
fmt.Printf("closure at %s rule stop %s\n", this.recog.GetRuleNames()[config.state.GetRuleIndex()], config)
if l.recog != nil {
fmt.Printf("closure at %s rule stop %s\n", l.recog.GetRuleNames()[config.state.GetRuleIndex()], config)
} else {
fmt.Printf("closure at rule stop %s\n", config)
}
@ -422,18 +424,18 @@ func (this *LexerATNSimulator) closure(input CharStream, config *LexerATNConfig,
if config.context == nil || config.context.isEmpty() {
configs.Add(config, nil)
return true
} else {
configs.Add(NewLexerATNConfig2(config, config.state, BasePredictionContextEMPTY), nil)
currentAltReachedAcceptState = true
}
configs.Add(NewLexerATNConfig2(config, config.state, BasePredictionContextEMPTY), nil)
currentAltReachedAcceptState = true
}
if config.context != nil && !config.context.isEmpty() {
for i := 0; i < config.context.length(); i++ {
if config.context.getReturnState(i) != BasePredictionContextEMPTY_RETURN_STATE {
if config.context.getReturnState(i) != BasePredictionContextEmptyReturnState {
var newContext = config.context.GetParent(i) // "pop" return state
var returnState = this.atn.states[config.context.getReturnState(i)]
var returnState = l.atn.states[config.context.getReturnState(i)]
cfg := NewLexerATNConfig2(config, returnState, newContext)
currentAltReachedAcceptState = this.closure(input, cfg, configs, currentAltReachedAcceptState, speculative, treatEofAsEpsilon)
currentAltReachedAcceptState = l.closure(input, cfg, configs, currentAltReachedAcceptState, speculative, treatEOFAsEpsilon)
}
}
}
@ -447,18 +449,18 @@ func (this *LexerATNSimulator) closure(input CharStream, config *LexerATNConfig,
}
for j := 0; j < len(config.state.GetTransitions()); j++ {
var trans = config.state.GetTransitions()[j]
cfg := this.getEpsilonTarget(input, config, trans, configs, speculative, treatEofAsEpsilon)
cfg := l.getEpsilonTarget(input, config, trans, configs, speculative, treatEOFAsEpsilon)
if cfg != nil {
currentAltReachedAcceptState = this.closure(input, cfg, configs,
currentAltReachedAcceptState, speculative, treatEofAsEpsilon)
currentAltReachedAcceptState = l.closure(input, cfg, configs,
currentAltReachedAcceptState, speculative, treatEOFAsEpsilon)
}
}
return currentAltReachedAcceptState
}
// side-effect: can alter configs.hasSemanticContext
func (this *LexerATNSimulator) getEpsilonTarget(input CharStream, config *LexerATNConfig, trans Transition,
configs ATNConfigSet, speculative, treatEofAsEpsilon bool) *LexerATNConfig {
func (l *LexerATNSimulator) getEpsilonTarget(input CharStream, config *LexerATNConfig, trans Transition,
configs ATNConfigSet, speculative, treatEOFAsEpsilon bool) *LexerATNConfig {
var cfg *LexerATNConfig
@ -472,13 +474,13 @@ func (this *LexerATNSimulator) getEpsilonTarget(input CharStream, config *LexerA
panic("Precedence predicates are not supported in lexers.")
} else if trans.getSerializationType() == TransitionPREDICATE {
// Track traversing semantic predicates. If we traverse,
// we cannot add a DFA state for this "reach" computation
// we cannot add a DFA state for l "reach" computation
// because the DFA would not test the predicate again in the
// future. Rather than creating collections of semantic predicates
// like v3 and testing them on prediction, v4 will test them on the
// fly all the time using the ATN not the DFA. This is slower but
// semantically it's not used that often. One of the key elements to
// this predicate mechanism is not adding DFA states that see
// l predicate mechanism is not adding DFA states that see
// predicates immediately afterwards in the ATN. For example,
// a : ID {p1}? | ID {p2}?
@ -486,7 +488,7 @@ func (this *LexerATNSimulator) getEpsilonTarget(input CharStream, config *LexerA
// should create the start state for rule 'a' (to save start state
// competition), but should not create target of ID state. The
// collection of ATN states the following ID references includes
// states reached by traversing predicates. Since this is when we
// states reached by traversing predicates. Since l is when we
// test them, we cannot cash the DFA state target of ID.
pt := trans.(*PredicateTransition)
@ -495,7 +497,7 @@ func (this *LexerATNSimulator) getEpsilonTarget(input CharStream, config *LexerA
fmt.Println("EVAL rule " + strconv.Itoa(trans.(*PredicateTransition).ruleIndex) + ":" + strconv.Itoa(pt.predIndex))
}
configs.SetHasSemanticContext(true)
if this.evaluatePredicate(input, pt.ruleIndex, pt.predIndex, speculative) {
if l.evaluatePredicate(input, pt.ruleIndex, pt.predIndex, speculative) {
cfg = NewLexerATNConfig4(config, trans.getTarget())
}
} else if trans.getSerializationType() == TransitionACTION {
@ -505,14 +507,14 @@ func (this *LexerATNSimulator) getEpsilonTarget(input CharStream, config *LexerA
// TODO: if the entry rule is invoked recursively, some
// actions may be executed during the recursive call. The
// problem can appear when hasEmptyPath() is true but
// isEmpty() is false. In this case, the config needs to be
// isEmpty() is false. In l case, the config needs to be
// split into two contexts - one with just the empty path
// and another with everything but the empty path.
// Unfortunately, the current algorithm does not allow
// getEpsilonTarget to return two configurations, so
// additional modifications are needed before we can support
// the split operation.
var lexerActionExecutor = LexerActionExecutorappend(config.lexerActionExecutor, this.atn.lexerActions[trans.(*ActionTransition).actionIndex])
var lexerActionExecutor = LexerActionExecutorappend(config.lexerActionExecutor, l.atn.lexerActions[trans.(*ActionTransition).actionIndex])
cfg = NewLexerATNConfig3(config, trans.getTarget(), lexerActionExecutor)
} else {
// ignore actions in referenced rules
@ -523,7 +525,7 @@ func (this *LexerATNSimulator) getEpsilonTarget(input CharStream, config *LexerA
} else if trans.getSerializationType() == TransitionATOM ||
trans.getSerializationType() == TransitionRANGE ||
trans.getSerializationType() == TransitionSET {
if treatEofAsEpsilon {
if treatEOFAsEpsilon {
if trans.Matches(TokenEOF, 0, 0xFFFF) {
cfg = NewLexerATNConfig4(config, trans.getTarget())
}
@ -534,7 +536,7 @@ func (this *LexerATNSimulator) getEpsilonTarget(input CharStream, config *LexerA
// Evaluate a predicate specified in the lexer.
//
// <p>If {@code speculative} is {@code true}, this method was called before
// <p>If {@code speculative} is {@code true}, l method was called before
// {@link //consume} for the Matched character. This method should call
// {@link //consume} before evaluating the predicate to ensure position
// sensitive values, including {@link Lexer//GetText}, {@link Lexer//GetLine},
@ -552,41 +554,41 @@ func (this *LexerATNSimulator) getEpsilonTarget(input CharStream, config *LexerA
// @return {@code true} if the specified predicate evaluates to
// {@code true}.
// /
func (this *LexerATNSimulator) evaluatePredicate(input CharStream, ruleIndex, predIndex int, speculative bool) bool {
func (l *LexerATNSimulator) evaluatePredicate(input CharStream, ruleIndex, predIndex int, speculative bool) bool {
// assume true if no recognizer was provided
if this.recog == nil {
if l.recog == nil {
return true
}
if !speculative {
return this.recog.Sempred(nil, ruleIndex, predIndex)
return l.recog.Sempred(nil, ruleIndex, predIndex)
}
var savedcolumn = this.column
var savedLine = this.line
var savedcolumn = l.column
var savedLine = l.line
var index = input.Index()
var marker = input.Mark()
defer func() {
this.column = savedcolumn
this.line = savedLine
l.column = savedcolumn
l.line = savedLine
input.Seek(index)
input.Release(marker)
}()
this.consume(input)
return this.recog.Sempred(nil, ruleIndex, predIndex)
l.consume(input)
return l.recog.Sempred(nil, ruleIndex, predIndex)
}
func (this *LexerATNSimulator) captureSimState(settings *SimState, input CharStream, dfaState *DFAState) {
func (l *LexerATNSimulator) captureSimState(settings *SimState, input CharStream, dfaState *DFAState) {
settings.index = input.Index()
settings.line = this.line
settings.column = this.column
settings.line = l.line
settings.column = l.column
settings.dfaState = dfaState
}
func (this *LexerATNSimulator) addDFAEdge(from_ *DFAState, tk int, to *DFAState, cfgs ATNConfigSet) *DFAState {
func (l *LexerATNSimulator) addDFAEdge(from *DFAState, tk int, to *DFAState, cfgs ATNConfigSet) *DFAState {
if to == nil && cfgs != nil {
// leading to this call, ATNConfigSet.hasSemanticContext is used as a
// marker indicating dynamic predicate evaluation makes this edge
// leading to l call, ATNConfigSet.hasSemanticContext is used as a
// marker indicating dynamic predicate evaluation makes l edge
// dependent on the specific input sequence, so the static edge in the
// DFA should be omitted. The target DFAState is still created since
// execATN has the ability to reSynchronize with the DFA state cache
@ -599,37 +601,37 @@ func (this *LexerATNSimulator) addDFAEdge(from_ *DFAState, tk int, to *DFAState,
var suppressEdge = cfgs.HasSemanticContext()
cfgs.SetHasSemanticContext(false)
to = this.addDFAState(cfgs)
to = l.addDFAState(cfgs)
if suppressEdge {
return to
}
}
// add the edge
if tk < LexerATNSimulatorMIN_DFA_EDGE || tk > LexerATNSimulatorMAX_DFA_EDGE {
if tk < LexerATNSimulatorMinDFAEdge || tk > LexerATNSimulatorMaxDFAEdge {
// Only track edges within the DFA bounds
return to
}
if LexerATNSimulatorDebug {
fmt.Println("EDGE " + from_.String() + " -> " + to.String() + " upon " + strconv.Itoa(tk))
fmt.Println("EDGE " + from.String() + " -> " + to.String() + " upon " + strconv.Itoa(tk))
}
if from_.edges == nil {
if from.edges == nil {
// make room for tokens 1..n and -1 masquerading as index 0
from_.edges = make([]*DFAState, LexerATNSimulatorMAX_DFA_EDGE-LexerATNSimulatorMIN_DFA_EDGE+1)
from.edges = make([]*DFAState, LexerATNSimulatorMaxDFAEdge-LexerATNSimulatorMinDFAEdge+1)
}
from_.edges[tk-LexerATNSimulatorMIN_DFA_EDGE] = to // connect
from.edges[tk-LexerATNSimulatorMinDFAEdge] = to // connect
return to
}
// Add a NewDFA state if there isn't one with this set of
// Add a NewDFA state if there isn't one with l set of
// configurations already. This method also detects the first
// configuration containing an ATN rule stop state. Later, when
// traversing the DFA, we will know which rule to accept.
func (this *LexerATNSimulator) addDFAState(configs ATNConfigSet) *DFAState {
func (l *LexerATNSimulator) addDFAState(configs ATNConfigSet) *DFAState {
var proposed = NewDFAState(-1, configs)
var firstConfigWithRuleStopState ATNConfig = nil
var firstConfigWithRuleStopState ATNConfig
for _, cfg := range configs.GetItems() {
@ -643,10 +645,10 @@ func (this *LexerATNSimulator) addDFAState(configs ATNConfigSet) *DFAState {
if firstConfigWithRuleStopState != nil {
proposed.isAcceptState = true
proposed.lexerActionExecutor = firstConfigWithRuleStopState.(*LexerATNConfig).lexerActionExecutor
proposed.setPrediction(this.atn.ruleToTokenType[firstConfigWithRuleStopState.GetState().GetRuleIndex()])
proposed.setPrediction(l.atn.ruleToTokenType[firstConfigWithRuleStopState.GetState().GetRuleIndex()])
}
var hash = proposed.Hash()
var dfa = this.DecisionToDFA[this.mode]
var dfa = l.DecisionToDFA[l.mode]
var existing = dfa.GetStates()[hash]
if existing != nil {
return existing
@ -659,34 +661,34 @@ func (this *LexerATNSimulator) addDFAState(configs ATNConfigSet) *DFAState {
return newState
}
func (this *LexerATNSimulator) getDFA(mode int) *DFA {
return this.DecisionToDFA[mode]
func (l *LexerATNSimulator) getDFA(mode int) *DFA {
return l.DecisionToDFA[mode]
}
// Get the text Matched so far for the current token.
func (this *LexerATNSimulator) GetText(input CharStream) string {
func (l *LexerATNSimulator) GetText(input CharStream) string {
// index is first lookahead char, don't include.
return input.GetTextFromInterval(NewInterval(this.startIndex, input.Index()-1))
return input.GetTextFromInterval(NewInterval(l.startIndex, input.Index()-1))
}
func (this *LexerATNSimulator) consume(input CharStream) {
func (l *LexerATNSimulator) consume(input CharStream) {
var curChar = input.LA(1)
if curChar == int('\n') {
this.line += 1
this.column = 0
l.line++
l.column = 0
} else {
this.column += 1
l.column++
}
input.Consume()
}
func (this *LexerATNSimulator) GetTokenName(tt int) string {
func (l *LexerATNSimulator) GetTokenName(tt int) string {
if PortDebug {
fmt.Println(tt)
}
if tt == -1 {
return "EOF"
} else {
return "'" + string(tt) + "'"
}
return "'" + string(tt) + "'"
}

View File

@ -18,7 +18,7 @@ func NewLL1Analyzer(atn *ATN) *LL1Analyzer {
// a predicate during analysis if {@code seeThruPreds==false}.
///
const (
LL1AnalyzerHIT_PRED = TokenInvalidType
LL1AnalyzerHitPred = TokenInvalidType
)
//*
@ -40,10 +40,10 @@ func (la *LL1Analyzer) getDecisionLookahead(s ATNState) []*IntervalSet {
look[alt] = NewIntervalSet()
var lookBusy = NewSet(nil, nil)
var seeThruPreds = false // fail to get lookahead upon pred
la._LOOK(s.GetTransitions()[alt].getTarget(), nil, BasePredictionContextEMPTY, look[alt], lookBusy, NewBitSet(), seeThruPreds, false)
la.look1(s.GetTransitions()[alt].getTarget(), nil, BasePredictionContextEMPTY, look[alt], lookBusy, NewBitSet(), seeThruPreds, false)
// Wipe out lookahead for la alternative if we found nothing
// or we had a predicate when we !seeThruPreds
if look[alt].length() == 0 || look[alt].contains(LL1AnalyzerHIT_PRED) {
if look[alt].length() == 0 || look[alt].contains(LL1AnalyzerHitPred) {
look[alt] = nil
}
}
@ -68,7 +68,7 @@ func (la *LL1Analyzer) getDecisionLookahead(s ATNState) []*IntervalSet {
// @return The set of tokens that can follow {@code s} in the ATN in the
// specified {@code ctx}.
///
func (la *LL1Analyzer) LOOK(s, stopState ATNState, ctx RuleContext) *IntervalSet {
func (la *LL1Analyzer) Look(s, stopState ATNState, ctx RuleContext) *IntervalSet {
var r = NewIntervalSet()
var seeThruPreds = true // ignore preds get all lookahead
var lookContext PredictionContext
@ -85,7 +85,7 @@ func (la *LL1Analyzer) LOOK(s, stopState ATNState, ctx RuleContext) *IntervalSet
fmt.Println(seeThruPreds)
fmt.Println("=====")
}
la._LOOK(s, stopState, lookContext, r, NewSet(nil, nil), NewBitSet(), seeThruPreds, true)
la.look1(s, stopState, lookContext, r, NewSet(nil, nil), NewBitSet(), seeThruPreds, true)
if PortDebug {
fmt.Println(r)
}
@ -116,13 +116,13 @@ func (la *LL1Analyzer) LOOK(s, stopState ATNState, ctx RuleContext) *IntervalSet
// {@code NewBitSet()} for la argument.
// @param seeThruPreds {@code true} to true semantic predicates as
// implicitly {@code true} and "see through them", otherwise {@code false}
// to treat semantic predicates as opaque and add {@link //HIT_PRED} to the
// to treat semantic predicates as opaque and add {@link //HitPred} to the
// result if one is encountered.
// @param addEOF Add {@link Token//EOF} to the result if the end of the
// outermost context is reached. This parameter has no effect if {@code ctx}
// is {@code nil}.
func (la *LL1Analyzer) __LOOK(s, stopState ATNState, ctx PredictionContext, look *IntervalSet, lookBusy *Set, calledRuleStack *BitSet, seeThruPreds, addEOF bool, i int) {
func (la *LL1Analyzer) look2(s, stopState ATNState, ctx PredictionContext, look *IntervalSet, lookBusy *Set, calledRuleStack *BitSet, seeThruPreds, addEOF bool, i int) {
returnState := la.atn.states[ctx.getReturnState(i)]
@ -135,11 +135,11 @@ func (la *LL1Analyzer) __LOOK(s, stopState ATNState, ctx PredictionContext, look
}()
calledRuleStack.remove(returnState.GetRuleIndex())
la._LOOK(returnState, stopState, ctx.GetParent(i), look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
la.look1(returnState, stopState, ctx.GetParent(i), look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
}
func (la *LL1Analyzer) _LOOK(s, stopState ATNState, ctx PredictionContext, look *IntervalSet, lookBusy *Set, calledRuleStack *BitSet, seeThruPreds, addEOF bool) {
func (la *LL1Analyzer) look1(s, stopState ATNState, ctx PredictionContext, look *IntervalSet, lookBusy *Set, calledRuleStack *BitSet, seeThruPreds, addEOF bool) {
c := NewBaseATNConfig6(s, 0, ctx)
@ -182,7 +182,7 @@ func (la *LL1Analyzer) _LOOK(s, stopState ATNState, ctx PredictionContext, look
for i := 0; i < ctx.length(); i++ {
returnState := la.atn.states[ctx.getReturnState(i)]
la.__LOOK(returnState, stopState, ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF, i)
la.look2(returnState, stopState, ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF, i)
}
return
@ -205,7 +205,7 @@ func (la *LL1Analyzer) _LOOK(s, stopState ATNState, ctx PredictionContext, look
newContext := SingletonBasePredictionContextCreate(ctx, t1.followState.GetStateNumber())
la.___LOOK(stopState, newContext, look, lookBusy, calledRuleStack, seeThruPreds, addEOF, t1)
la.look3(stopState, newContext, look, lookBusy, calledRuleStack, seeThruPreds, addEOF, t1)
if PortDebug {
fmt.Println(look)
@ -216,15 +216,15 @@ func (la *LL1Analyzer) _LOOK(s, stopState ATNState, ctx PredictionContext, look
fmt.Println("DEBUG 9")
}
if seeThruPreds {
la._LOOK(t2.getTarget(), stopState, ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
la.look1(t2.getTarget(), stopState, ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
} else {
look.addOne(LL1AnalyzerHIT_PRED)
look.addOne(LL1AnalyzerHitPred)
}
} else if t.getIsEpsilon() {
if PortDebug {
fmt.Println("DEBUG 10")
}
la._LOOK(t.getTarget(), stopState, ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
la.look1(t.getTarget(), stopState, ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
} else if _, ok := t.(*WildcardTransition); ok {
if PortDebug {
fmt.Println("DEBUG 11")
@ -248,7 +248,7 @@ func (la *LL1Analyzer) _LOOK(s, stopState ATNState, ctx PredictionContext, look
}
}
func (la *LL1Analyzer) ___LOOK(stopState ATNState, ctx PredictionContext, look *IntervalSet, lookBusy *Set, calledRuleStack *BitSet, seeThruPreds, addEOF bool, t1 *RuleTransition) {
func (la *LL1Analyzer) look3(stopState ATNState, ctx PredictionContext, look *IntervalSet, lookBusy *Set, calledRuleStack *BitSet, seeThruPreds, addEOF bool, t1 *RuleTransition) {
newContext := SingletonBasePredictionContextCreate(ctx, t1.followState.GetStateNumber())
@ -257,6 +257,6 @@ func (la *LL1Analyzer) ___LOOK(stopState ATNState, ctx PredictionContext, look *
}()
calledRuleStack.add(t1.getTarget().GetRuleIndex())
la._LOOK(t1.getTarget(), stopState, newContext, look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
la.look1(t1.getTarget(), stopState, newContext, look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
}

View File

@ -34,13 +34,13 @@ type BaseParser struct {
Interpreter *ParserATNSimulator
BuildParseTrees bool
_input TokenStream
_errHandler ErrorStrategy
_precedenceStack IntStack
_ctx ParserRuleContext
input TokenStream
errHandler ErrorStrategy
precedenceStack IntStack
ctx ParserRuleContext
_tracer *TraceListener
_parseListeners []ParseTreeListener
tracer *TraceListener
parseListeners []ParseTreeListener
_SyntaxErrors int
}
@ -53,15 +53,15 @@ func NewBaseParser(input TokenStream) *BaseParser {
p.BaseRecognizer = NewBaseRecognizer()
// The input stream.
p._input = nil
p.input = nil
// The error handling strategy for the parser. The default value is a new
// instance of {@link DefaultErrorStrategy}.
p._errHandler = NewDefaultErrorStrategy()
p._precedenceStack = make([]int, 0)
p._precedenceStack.Push(0)
p.errHandler = NewDefaultErrorStrategy()
p.precedenceStack = make([]int, 0)
p.precedenceStack.Push(0)
// The {@link ParserRuleContext} object for the currently executing rule.
// p.is always non-nil during the parsing process.
p._ctx = nil
p.ctx = nil
// Specifies whether or not the parser should construct a parse tree during
// the parsing process. The default value is {@code true}.
p.BuildParseTrees = true
@ -70,10 +70,10 @@ func NewBaseParser(input TokenStream) *BaseParser {
// later call to {@link //setTrace}{@code (false)}. The listener itself is
// implemented as a parser listener so p.field is not directly used by
// other parser methods.
p._tracer = nil
p.tracer = nil
// The list of {@link ParseTreeListener} listeners registered to receive
// events during the parse.
p._parseListeners = nil
p.parseListeners = nil
// The number of syntax errors Reported during parsing. p.value is
// incremented each time {@link //NotifyErrorListeners} is called.
p._SyntaxErrors = 0
@ -92,26 +92,26 @@ var bypassAltsAtnCache = make(map[string]int)
// reset the parser's state//
func (p *BaseParser) reset() {
if p._input != nil {
p._input.Seek(0)
if p.input != nil {
p.input.Seek(0)
}
p._errHandler.reset(p)
p._ctx = nil
p.errHandler.reset(p)
p.ctx = nil
p._SyntaxErrors = 0
p.SetTrace(nil)
p._precedenceStack = make([]int, 0)
p._precedenceStack.Push(0)
p.precedenceStack = make([]int, 0)
p.precedenceStack.Push(0)
if p.Interpreter != nil {
p.Interpreter.reset()
}
}
func (p *BaseParser) GetErrorHandler() ErrorStrategy {
return p._errHandler
return p.errHandler
}
func (p *BaseParser) SetErrorHandler(e ErrorStrategy) {
p._errHandler = e
p.errHandler = e
}
// Match current input symbol against {@code ttype}. If the symbol type
@ -143,15 +143,15 @@ func (p *BaseParser) Match(ttype int) Token {
}
if t.GetTokenType() == ttype {
p._errHandler.ReportMatch(p)
p.errHandler.ReportMatch(p)
p.Consume()
} else {
t = p._errHandler.RecoverInline(p)
t = p.errHandler.RecoverInline(p)
if p.BuildParseTrees && t.GetTokenIndex() == -1 {
// we must have conjured up a Newtoken during single token
// insertion
// if it's not the current symbol
p._ctx.AddErrorNode(t)
p.ctx.AddErrorNode(t)
}
}
@ -181,33 +181,33 @@ func (p *BaseParser) Match(ttype int) Token {
func (p *BaseParser) MatchWildcard() Token {
var t = p.GetCurrentToken()
if t.GetTokenType() > 0 {
p._errHandler.ReportMatch(p)
p.errHandler.ReportMatch(p)
p.Consume()
} else {
t = p._errHandler.RecoverInline(p)
t = p.errHandler.RecoverInline(p)
if p.BuildParseTrees && t.GetTokenIndex() == -1 {
// we must have conjured up a Newtoken during single token
// insertion
// if it's not the current symbol
p._ctx.AddErrorNode(t)
p.ctx.AddErrorNode(t)
}
}
return t
}
func (p *BaseParser) GetParserRuleContext() ParserRuleContext {
return p._ctx
return p.ctx
}
func (p *BaseParser) SetParserRuleContext(v ParserRuleContext) {
p._ctx = v
func (p *BaseParser) SetParserRuleContext(v ParserRuleContext) {
p.ctx = v
}
func (p *BaseParser) GetParseListeners() []ParseTreeListener {
if p._parseListeners == nil {
if p.parseListeners == nil {
return make([]ParseTreeListener, 0)
}
return p._parseListeners
return p.parseListeners
}
// Registers {@code listener} to receive events during the parsing process.
@ -242,10 +242,10 @@ func (p *BaseParser) AddParseListener(listener ParseTreeListener) {
if listener == nil {
panic("listener")
}
if p._parseListeners == nil {
p._parseListeners = make([]ParseTreeListener, 0)
if p.parseListeners == nil {
p.parseListeners = make([]ParseTreeListener, 0)
}
p._parseListeners = append(p._parseListeners, listener)
p.parseListeners = append(p.parseListeners, listener)
}
//
@ -257,10 +257,10 @@ func (p *BaseParser) AddParseListener(listener ParseTreeListener) {
//
func (p *BaseParser) RemoveParseListener(listener ParseTreeListener) {
if p._parseListeners != nil {
if p.parseListeners != nil {
idx := -1
for i, v := range p._parseListeners {
for i, v := range p.parseListeners {
if v == listener {
idx = i
break
@ -272,24 +272,24 @@ func (p *BaseParser) RemoveParseListener(listener ParseTreeListener) {
}
// remove the listener from the slice
p._parseListeners = append(p._parseListeners[0:idx], p._parseListeners[idx+1:]...)
p.parseListeners = append(p.parseListeners[0:idx], p.parseListeners[idx+1:]...)
if len(p._parseListeners) == 0 {
p._parseListeners = nil
if len(p.parseListeners) == 0 {
p.parseListeners = nil
}
}
}
// Remove all parse listeners.
func (p *BaseParser) removeParseListeners() {
p._parseListeners = nil
p.parseListeners = nil
}
// Notify any parse listeners of an enter rule event.
func (p *BaseParser) TriggerEnterRuleEvent() {
if p._parseListeners != nil {
var ctx = p._ctx
for _, listener := range p._parseListeners {
if p.parseListeners != nil {
var ctx = p.ctx
for _, listener := range p.parseListeners {
listener.EnterEveryRule(ctx)
ctx.EnterRule(listener)
}
@ -302,34 +302,34 @@ func (p *BaseParser) TriggerEnterRuleEvent() {
// @see //addParseListener
//
func (p *BaseParser) TriggerExitRuleEvent() {
if p._parseListeners != nil {
if p.parseListeners != nil {
// reverse order walk of listeners
ctx := p._ctx
l := len(p._parseListeners) - 1
ctx := p.ctx
l := len(p.parseListeners) - 1
for i := range p._parseListeners {
listener := p._parseListeners[l-i]
for i := range p.parseListeners {
listener := p.parseListeners[l-i]
ctx.ExitRule(listener)
listener.ExitEveryRule(ctx)
}
}
}
func (this *BaseParser) GetInterpreter() *ParserATNSimulator {
return this.Interpreter
func (p *BaseParser) GetInterpreter() *ParserATNSimulator {
return p.Interpreter
}
func (this *BaseParser) GetATN() *ATN {
return this.Interpreter.atn
func (p *BaseParser) GetATN() *ATN {
return p.Interpreter.atn
}
func (p *BaseParser) GetTokenFactory() TokenFactory {
return p._input.GetTokenSource().GetTokenFactory()
return p.input.GetTokenSource().GetTokenFactory()
}
// Tell our token source and error strategy about a Newway to create tokens.//
func (p *BaseParser) setTokenFactory(factory TokenFactory) {
p._input.GetTokenSource().setTokenFactory(factory)
p.input.GetTokenSource().setTokenFactory(factory)
}
// The ATN with bypass alternatives is expensive to create so we create it
@ -397,28 +397,28 @@ func (p *BaseParser) SetInputStream(input TokenStream) {
}
func (p *BaseParser) GetTokenStream() TokenStream {
return p._input
return p.input
}
// Set the token stream and reset the parser.//
func (p *BaseParser) SetTokenStream(input TokenStream) {
p._input = nil
p.input = nil
p.reset()
p._input = input
p.input = input
}
// Match needs to return the current input symbol, which gets put
// into the label for the associated token ref e.g., x=ID.
//
func (p *BaseParser) GetCurrentToken() Token {
return p._input.LT(1)
return p.input.LT(1)
}
func (p *BaseParser) NotifyErrorListeners(msg string, offendingToken Token, err RecognitionException) {
if offendingToken == nil {
offendingToken = p.GetCurrentToken()
}
p._SyntaxErrors += 1
p._SyntaxErrors++
var line = offendingToken.GetLine()
var column = offendingToken.GetColumn()
listener := p.GetErrorListenerDispatch()
@ -436,20 +436,20 @@ func (p *BaseParser) Consume() Token {
fmt.Println("Done consuming")
}
}
var hasListener = p._parseListeners != nil && len(p._parseListeners) > 0
var hasListener = p.parseListeners != nil && len(p.parseListeners) > 0
if p.BuildParseTrees || hasListener {
if p._errHandler.inErrorRecoveryMode(p) {
var node = p._ctx.AddErrorNode(o)
if p._parseListeners != nil {
for _, l := range p._parseListeners {
if p.errHandler.inErrorRecoveryMode(p) {
var node = p.ctx.AddErrorNode(o)
if p.parseListeners != nil {
for _, l := range p.parseListeners {
l.VisitErrorNode(node)
}
}
} else {
node := p._ctx.AddTokenNode(o)
if p._parseListeners != nil {
for _, l := range p._parseListeners {
node := p.ctx.AddTokenNode(o)
if p.parseListeners != nil {
for _, l := range p.parseListeners {
l.VisitTerminal(node)
}
}
@ -462,47 +462,47 @@ func (p *BaseParser) Consume() Token {
func (p *BaseParser) addContextToParseTree() {
// add current context to parent if we have a parent
if p._ctx.GetParent() != nil {
p._ctx.GetParent().(ParserRuleContext).AddChild(p._ctx)
if p.ctx.GetParent() != nil {
p.ctx.GetParent().(ParserRuleContext).AddChild(p.ctx)
}
}
func (p *BaseParser) EnterRule(localctx ParserRuleContext, state, ruleIndex int) {
p.SetState(state)
p._ctx = localctx
p._ctx.SetStart(p._input.LT(1))
p.ctx = localctx
p.ctx.SetStart(p.input.LT(1))
if p.BuildParseTrees {
p.addContextToParseTree()
}
if p._parseListeners != nil {
if p.parseListeners != nil {
p.TriggerEnterRuleEvent()
}
}
func (p *BaseParser) ExitRule() {
p._ctx.SetStop(p._input.LT(-1))
// trigger event on _ctx, before it reverts to parent
if p._parseListeners != nil {
p.ctx.SetStop(p.input.LT(-1))
// trigger event on ctx, before it reverts to parent
if p.parseListeners != nil {
p.TriggerExitRuleEvent()
}
p.SetState(p._ctx.GetInvokingState())
if p._ctx.GetParent() != nil {
p._ctx = p._ctx.GetParent().(ParserRuleContext)
p.SetState(p.ctx.GetInvokingState())
if p.ctx.GetParent() != nil {
p.ctx = p.ctx.GetParent().(ParserRuleContext)
} else {
p._ctx = nil
p.ctx = nil
}
}
func (p *BaseParser) EnterOuterAlt(localctx ParserRuleContext, altNum int) {
// if we have Newlocalctx, make sure we replace existing ctx
// that is previous child of parse tree
if p.BuildParseTrees && p._ctx != localctx {
if p._ctx.GetParent() != nil {
p._ctx.GetParent().(ParserRuleContext).RemoveLastChild()
p._ctx.GetParent().(ParserRuleContext).AddChild(localctx)
if p.BuildParseTrees && p.ctx != localctx {
if p.ctx.GetParent() != nil {
p.ctx.GetParent().(ParserRuleContext).RemoveLastChild()
p.ctx.GetParent().(ParserRuleContext).AddChild(localctx)
}
}
p._ctx = localctx
p.ctx = localctx
}
// Get the precedence level for the top-most precedence rule.
@ -511,19 +511,19 @@ func (p *BaseParser) EnterOuterAlt(localctx ParserRuleContext, altNum int) {
// the parser context is not nested within a precedence rule.
func (p *BaseParser) GetPrecedence() int {
if len(p._precedenceStack) == 0 {
if len(p.precedenceStack) == 0 {
return -1
} else {
return p._precedenceStack[len(p._precedenceStack)-1]
}
return p.precedenceStack[len(p.precedenceStack)-1]
}
func (p *BaseParser) EnterRecursionRule(localctx ParserRuleContext, state, ruleIndex, precedence int) {
p.SetState(state)
p._precedenceStack.Push(precedence)
p._ctx = localctx
p._ctx.SetStart(p._input.LT(1))
if p._parseListeners != nil {
p.precedenceStack.Push(precedence)
p.ctx = localctx
p.ctx.SetStart(p.input.LT(1))
if p.parseListeners != nil {
p.TriggerEnterRuleEvent() // simulates rule entry for
// left-recursive rules
}
@ -533,34 +533,34 @@ func (p *BaseParser) EnterRecursionRule(localctx ParserRuleContext, state, ruleI
// Like {@link //EnterRule} but for recursive rules.
func (p *BaseParser) PushNewRecursionContext(localctx ParserRuleContext, state, ruleIndex int) {
var previous = p._ctx
var previous = p.ctx
previous.SetParent(localctx)
previous.SetInvokingState(state)
previous.SetStop(p._input.LT(-1))
previous.SetStop(p.input.LT(-1))
p._ctx = localctx
p._ctx.SetStart(previous.GetStart())
p.ctx = localctx
p.ctx.SetStart(previous.GetStart())
if p.BuildParseTrees {
p._ctx.AddChild(previous)
p.ctx.AddChild(previous)
}
if p._parseListeners != nil {
if p.parseListeners != nil {
p.TriggerEnterRuleEvent() // simulates rule entry for
// left-recursive rules
}
}
func (p *BaseParser) UnrollRecursionContexts(parentCtx ParserRuleContext) {
p._precedenceStack.Pop()
p._ctx.SetStop(p._input.LT(-1))
var retCtx = p._ctx // save current ctx (return value)
// unroll so _ctx is as it was before call to recursive method
if p._parseListeners != nil {
for p._ctx != parentCtx {
p.precedenceStack.Pop()
p.ctx.SetStop(p.input.LT(-1))
var retCtx = p.ctx // save current ctx (return value)
// unroll so ctx is as it was before call to recursive method
if p.parseListeners != nil {
for p.ctx != parentCtx {
p.TriggerExitRuleEvent()
p._ctx = p._ctx.GetParent().(ParserRuleContext)
p.ctx = p.ctx.GetParent().(ParserRuleContext)
}
} else {
p._ctx = parentCtx
p.ctx = parentCtx
}
// hook into tree
retCtx.SetParent(parentCtx)
@ -571,7 +571,7 @@ func (p *BaseParser) UnrollRecursionContexts(parentCtx ParserRuleContext) {
}
func (p *BaseParser) GetInvokingContext(ruleIndex int) ParserRuleContext {
var ctx = p._ctx
var ctx = p.ctx
for ctx != nil {
if ctx.GetRuleIndex() == ruleIndex {
return ctx
@ -582,7 +582,7 @@ func (p *BaseParser) GetInvokingContext(ruleIndex int) ParserRuleContext {
}
func (p *BaseParser) Precpred(localctx RuleContext, precedence int) bool {
return precedence >= p._precedenceStack[len(p._precedenceStack)-1]
return precedence >= p.precedenceStack[len(p.precedenceStack)-1]
}
func (p *BaseParser) inContext(context ParserRuleContext) bool {
@ -605,8 +605,8 @@ func (p *BaseParser) inContext(context ParserRuleContext) bool {
// the ATN, otherwise {@code false}.
func (p *BaseParser) IsExpectedToken(symbol int) bool {
var atn *ATN = p.Interpreter.atn
var ctx = p._ctx
var atn = p.Interpreter.atn
var ctx = p.ctx
var s = atn.states[p.state]
var following = atn.NextTokens(s, nil)
if following.contains(symbol) {
@ -626,9 +626,9 @@ func (p *BaseParser) IsExpectedToken(symbol int) bool {
}
if following.contains(TokenEpsilon) && symbol == TokenEOF {
return true
} else {
return false
}
return false
}
// Computes the set of input symbols which could follow the current parser
@ -638,7 +638,7 @@ func (p *BaseParser) IsExpectedToken(symbol int) bool {
// @see ATN//getExpectedTokens(int, RuleContext)
//
func (p *BaseParser) GetExpectedTokens() *IntervalSet {
return p.Interpreter.atn.getExpectedTokens(p.state, p._ctx)
return p.Interpreter.atn.getExpectedTokens(p.state, p.ctx)
}
func (p *BaseParser) GetExpectedTokensWithinCurrentRule() *IntervalSet {
@ -652,9 +652,9 @@ func (p *BaseParser) GetRuleIndex(ruleName string) int {
var ruleIndex, ok = p.GetRuleIndexMap()[ruleName]
if ok {
return ruleIndex
} else {
return -1
}
return -1
}
// Return List&ltString&gt of the rule names in your parser instance
@ -664,27 +664,27 @@ func (p *BaseParser) GetRuleIndex(ruleName string) int {
//
// this very useful for error messages.
func (this *BaseParser) GetRuleInvocationStack(p ParserRuleContext) []string {
if p == nil {
p = this._ctx
func (p *BaseParser) GetRuleInvocationStack(c ParserRuleContext) []string {
if c == nil {
c = p.ctx
}
var stack = make([]string, 0)
for p != nil {
for c != nil {
// compute what follows who invoked us
var ruleIndex = p.GetRuleIndex()
var ruleIndex = c.GetRuleIndex()
if ruleIndex < 0 {
stack = append(stack, "n/a")
} else {
stack = append(stack, this.GetRuleNames()[ruleIndex])
stack = append(stack, p.GetRuleNames()[ruleIndex])
}
vp := p.GetParent()
vp := c.GetParent()
if vp == nil {
break
}
p = vp.(ParserRuleContext)
c = vp.(ParserRuleContext)
}
return stack
}
@ -698,8 +698,8 @@ func (p *BaseParser) GetDFAStrings() string {
func (p *BaseParser) DumpDFA() {
var seenOne = false
for _, dfa := range p.Interpreter.DecisionToDFA {
if ( len(dfa.GetStates()) > 0) {
if (seenOne) {
if len(dfa.GetStates()) > 0 {
if seenOne {
fmt.Println()
}
fmt.Println("Decision " + strconv.Itoa(dfa.decision) + ":")
@ -718,13 +718,13 @@ func (p *BaseParser) GetSourceName() string {
//
func (p *BaseParser) SetTrace(trace *TraceListener) {
if trace == nil {
p.RemoveParseListener(p._tracer)
p._tracer = nil
p.RemoveParseListener(p.tracer)
p.tracer = nil
} else {
if p._tracer != nil {
p.RemoveParseListener(p._tracer)
if p.tracer != nil {
p.RemoveParseListener(p.tracer)
}
p._tracer = NewTraceListener(p)
p.AddParseListener(p._tracer)
p.tracer = NewTraceListener(p)
p.AddParseListener(p.tracer)
}
}

View File

@ -3,7 +3,7 @@ package antlr
import (
"reflect"
"strconv"
// "fmt"
// "fmt"
)
type ParserRuleContext interface {
@ -32,7 +32,7 @@ type BaseParserRuleContext struct {
start, stop Token
exception RecognitionException
children []Tree
children []Tree
}
func NewBaseParserRuleContext(parent ParserRuleContext, invokingStateNumber int) *BaseParserRuleContext {
@ -76,17 +76,17 @@ func (prc *BaseParserRuleContext) CopyFrom(ctx *BaseParserRuleContext) {
prc.stop = ctx.stop
}
func (this *BaseParserRuleContext) GetText() string {
if this.GetChildCount() == 0 {
func (prc *BaseParserRuleContext) GetText() string {
if prc.GetChildCount() == 0 {
return ""
} else {
var s string
for _, child := range this.children {
s += child.(ParseTree).GetText()
}
return s
}
var s string
for _, child := range prc.children {
s += child.(ParseTree).GetText()
}
return s
}
// Double dispatch methods for listeners
@ -148,39 +148,40 @@ func (prc *BaseParserRuleContext) AddErrorNode(badToken Token) *ErrorNodeImpl {
func (prc *BaseParserRuleContext) GetChild(i int) Tree {
if prc.children != nil && len(prc.children) >= i {
return prc.children[i]
} else {
return nil
}
return nil
}
func (prc *BaseParserRuleContext) GetChildOfType(i int, childType reflect.Type) RuleContext {
if childType == nil {
return prc.GetChild(i).(RuleContext)
} else {
for j := 0; j < len(prc.children); j++ {
var child = prc.children[j]
if reflect.TypeOf(child) == childType {
if i == 0 {
return child.(RuleContext)
} else {
i -= 1
}
}
}
return nil
}
for j := 0; j < len(prc.children); j++ {
var child = prc.children[j]
if reflect.TypeOf(child) == childType {
if i == 0 {
return child.(RuleContext)
}
i--
}
}
return nil
}
func (this *BaseParserRuleContext) ToStringTree(ruleNames []string, recog Recognizer) string {
return TreesStringTree(this, ruleNames, recog)
func (prc *BaseParserRuleContext) ToStringTree(ruleNames []string, recog Recognizer) string {
return TreesStringTree(prc, ruleNames, recog)
}
func (prc *BaseParserRuleContext) GetRuleContext() RuleContext {
return prc
}
func (this *BaseParserRuleContext) Accept(visitor ParseTreeVisitor) interface{} {
return visitor.VisitChildren(this)
func (prc *BaseParserRuleContext) Accept(visitor ParseTreeVisitor) interface{} {
return visitor.VisitChildren(prc)
}
func (prc *BaseParserRuleContext) SetStart(t Token) {
@ -207,9 +208,9 @@ func (prc *BaseParserRuleContext) GetToken(ttype int, i int) TerminalNode {
if c2.GetSymbol().GetTokenType() == ttype {
if i == 0 {
return c2
} else {
i -= 1
}
i--
}
}
}
@ -219,43 +220,46 @@ func (prc *BaseParserRuleContext) GetToken(ttype int, i int) TerminalNode {
func (prc *BaseParserRuleContext) GetTokens(ttype int) []TerminalNode {
if prc.children == nil {
return make([]TerminalNode, 0)
} else {
var tokens = make([]TerminalNode, 0)
for j := 0; j < len(prc.children); j++ {
var child = prc.children[j]
if tchild, ok := child.(TerminalNode); ok {
if tchild.GetSymbol().GetTokenType() == ttype {
tokens = append(tokens, tchild)
}
}
var tokens = make([]TerminalNode, 0)
for j := 0; j < len(prc.children); j++ {
var child = prc.children[j]
if tchild, ok := child.(TerminalNode); ok {
if tchild.GetSymbol().GetTokenType() == ttype {
tokens = append(tokens, tchild)
}
}
return tokens
}
return tokens
}
func (prc *BaseParserRuleContext) GetPayload() interface{}{
func (prc *BaseParserRuleContext) GetPayload() interface{} {
return prc
}
func (prc *BaseParserRuleContext) getChild(ctxType reflect.Type, i int) RuleContext {
if ( prc.children==nil || i < 0 || i >= len(prc.children) ) {
if prc.children == nil || i < 0 || i >= len(prc.children) {
return nil
}
var j int = -1 // what element have we found with ctxType?
for _,o := range prc.children {
var j = -1 // what element have we found with ctxType?
for _, o := range prc.children {
childType := reflect.TypeOf(o)
if ( childType.Implements(ctxType) ) {
if childType.Implements(ctxType) {
j++
if ( j == i ) {
if j == i {
return o.(RuleContext)
}
}
}
return nil
}
// Go lacks generics, so it's not possible for us to return the child with the correct type, but we do
// check for convertibility
@ -270,7 +274,7 @@ func (prc *BaseParserRuleContext) GetTypedRuleContexts(ctxType reflect.Type) []R
var contexts = make([]RuleContext, 0)
for _,child := range prc.children {
for _, child := range prc.children {
childType := reflect.TypeOf(child)
if childType.ConvertibleTo(ctxType) {
@ -283,28 +287,28 @@ func (prc *BaseParserRuleContext) GetTypedRuleContexts(ctxType reflect.Type) []R
func (prc *BaseParserRuleContext) GetChildCount() int {
if prc.children == nil {
return 0
} else {
return len(prc.children)
}
return len(prc.children)
}
func (prc *BaseParserRuleContext) GetSourceInterval() *Interval {
if prc.start == nil || prc.stop == nil {
return TreeInvalidInterval
} else {
return NewInterval(prc.start.GetTokenIndex(), prc.stop.GetTokenIndex())
}
return NewInterval(prc.start.GetTokenIndex(), prc.stop.GetTokenIndex())
}
//need to manage circular dependencies, so export now
// Print out a whole tree, not just a node, in LISP format
// (root child1 .. childN). Print just a node if this is a leaf.
// (root child1 .. childN). Print just a node if b is a leaf.
//
func (this *BaseParserRuleContext) String(ruleNames []string, stop RuleContext) string {
func (prc *BaseParserRuleContext) String(ruleNames []string, stop RuleContext) string {
var p ParserRuleContext = this
var p ParserRuleContext = prc
var s = "["
for p != nil && p != stop {
if ruleNames == nil {
@ -335,11 +339,8 @@ func (this *BaseParserRuleContext) String(ruleNames []string, stop RuleContext)
return s
}
var RuleContextEmpty = NewBaseParserRuleContext(nil, -1)
type InterpreterRuleContext interface {
ParserRuleContext
}

View File

@ -32,12 +32,12 @@ func NewBasePredictionContext(cachedHashString string) *BasePredictionContext {
// {@code//+x =//}.
// /
const (
BasePredictionContextEMPTY_RETURN_STATE = 0x7FFFFFFF
BasePredictionContextEmptyReturnState = 0x7FFFFFFF
)
// Represents {@code $} in an array in full context mode, when {@code $}
// doesn't mean wildcard: {@code $ + x = [$,x]}. Here,
// {@code $} = {@link //EMPTY_RETURN_STATE}.
// {@code $} = {@link //EmptyReturnState}.
// /
var BasePredictionContextglobalNodeCount = 1
@ -68,12 +68,12 @@ var BasePredictionContextid = BasePredictionContextglobalNodeCount
// </pre>
//
func (this *BasePredictionContext) isEmpty() bool {
func (b *BasePredictionContext) isEmpty() bool {
return false
}
func (this *BasePredictionContext) Hash() string {
return this.cachedHashString
func (b *BasePredictionContext) Hash() string {
return b.cachedHashString
}
func calculateHashString(parent PredictionContext, returnState int) string {
@ -102,24 +102,24 @@ func NewPredictionContextCache() *PredictionContextCache {
// return that one instead and do not add a Newcontext to the cache.
// Protect shared cache from unsafe thread access.
//
func (this *PredictionContextCache) add(ctx PredictionContext) PredictionContext {
func (p *PredictionContextCache) add(ctx PredictionContext) PredictionContext {
if ctx == BasePredictionContextEMPTY {
return BasePredictionContextEMPTY
}
var existing = this.cache[ctx]
var existing = p.cache[ctx]
if existing != nil {
return existing
}
this.cache[ctx] = ctx
p.cache[ctx] = ctx
return ctx
}
func (this *PredictionContextCache) Get(ctx PredictionContext) PredictionContext {
return this.cache[ctx]
func (p *PredictionContextCache) Get(ctx PredictionContext) PredictionContext {
return p.cache[ctx]
}
func (this *PredictionContextCache) length() int {
return len(this.cache)
func (p *PredictionContextCache) length() int {
return len(p.cache)
}
type SingletonPredictionContext interface {
@ -151,73 +151,72 @@ func NewBaseSingletonPredictionContext(parent PredictionContext, returnState int
}
func SingletonBasePredictionContextCreate(parent PredictionContext, returnState int) PredictionContext {
if returnState == BasePredictionContextEMPTY_RETURN_STATE && parent == nil {
if returnState == BasePredictionContextEmptyReturnState && parent == nil {
// someone can pass in the bits of an array ctx that mean $
return BasePredictionContextEMPTY
} else {
return NewBaseSingletonPredictionContext(parent, returnState)
}
return NewBaseSingletonPredictionContext(parent, returnState)
}
func (this *BaseSingletonPredictionContext) length() int {
func (b *BaseSingletonPredictionContext) length() int {
return 1
}
func (this *BaseSingletonPredictionContext) GetParent(index int) PredictionContext {
return this.parentCtx
func (b *BaseSingletonPredictionContext) GetParent(index int) PredictionContext {
return b.parentCtx
}
func (this *BaseSingletonPredictionContext) getReturnState(index int) int {
return this.returnState
func (b *BaseSingletonPredictionContext) getReturnState(index int) int {
return b.returnState
}
func (this *BaseSingletonPredictionContext) hasEmptyPath() bool {
return this.returnState == BasePredictionContextEMPTY_RETURN_STATE
func (b *BaseSingletonPredictionContext) hasEmptyPath() bool {
return b.returnState == BasePredictionContextEmptyReturnState
}
func (this *BaseSingletonPredictionContext) equals(other PredictionContext) bool {
if this == other {
func (b *BaseSingletonPredictionContext) equals(other PredictionContext) bool {
if b == other {
return true
} else if _, ok := other.(*BaseSingletonPredictionContext); !ok {
return false
} else if this.Hash() != other.Hash() {
} else if b.Hash() != other.Hash() {
return false // can't be same if hash is different
} else {
otherP := other.(*BaseSingletonPredictionContext)
if this.returnState != other.getReturnState(0) {
return false
} else if this.parentCtx == nil {
return otherP.parentCtx == nil
} else {
return this.parentCtx.equals(otherP.parentCtx)
}
}
otherP := other.(*BaseSingletonPredictionContext)
if b.returnState != other.getReturnState(0) {
return false
} else if b.parentCtx == nil {
return otherP.parentCtx == nil
}
return b.parentCtx.equals(otherP.parentCtx)
}
func (this *BaseSingletonPredictionContext) Hash() string {
return this.cachedHashString
func (b *BaseSingletonPredictionContext) Hash() string {
return b.cachedHashString
}
func (this *BaseSingletonPredictionContext) String() string {
func (b *BaseSingletonPredictionContext) String() string {
var up string
if this.parentCtx == nil {
if b.parentCtx == nil {
up = ""
} else {
up = this.parentCtx.String()
up = b.parentCtx.String()
}
if len(up) == 0 {
if this.returnState == BasePredictionContextEMPTY_RETURN_STATE {
if b.returnState == BasePredictionContextEmptyReturnState {
return "$"
} else {
return strconv.Itoa(this.returnState)
}
} else {
return strconv.Itoa(this.returnState) + " " + up
return strconv.Itoa(b.returnState)
}
return strconv.Itoa(b.returnState) + " " + up
}
var BasePredictionContextEMPTY = NewEmptyPredictionContext()
@ -230,28 +229,28 @@ func NewEmptyPredictionContext() *EmptyPredictionContext {
p := new(EmptyPredictionContext)
p.BaseSingletonPredictionContext = NewBaseSingletonPredictionContext(nil, BasePredictionContextEMPTY_RETURN_STATE)
p.BaseSingletonPredictionContext = NewBaseSingletonPredictionContext(nil, BasePredictionContextEmptyReturnState)
return p
}
func (this *EmptyPredictionContext) isEmpty() bool {
func (e *EmptyPredictionContext) isEmpty() bool {
return true
}
func (this *EmptyPredictionContext) GetParent(index int) PredictionContext {
func (e *EmptyPredictionContext) GetParent(index int) PredictionContext {
return nil
}
func (this *EmptyPredictionContext) getReturnState(index int) int {
return this.returnState
func (e *EmptyPredictionContext) getReturnState(index int) int {
return e.returnState
}
func (this *EmptyPredictionContext) equals(other PredictionContext) bool {
return this == other
func (e *EmptyPredictionContext) equals(other PredictionContext) bool {
return e == other
}
func (this *EmptyPredictionContext) String() string {
func (e *EmptyPredictionContext) String() string {
return "$"
}
@ -266,7 +265,7 @@ func NewArrayPredictionContext(parents []PredictionContext, returnStates []int)
// Parent can be nil only if full ctx mode and we make an array
// from {@link //EMPTY} and non-empty. We merge {@link //EMPTY} by using
// nil parent and
// returnState == {@link //EMPTY_RETURN_STATE}.
// returnState == {@link //EmptyReturnState}.
c := new(ArrayPredictionContext)
c.BasePredictionContext = NewBasePredictionContext("")
@ -281,65 +280,66 @@ func NewArrayPredictionContext(parents []PredictionContext, returnStates []int)
return c
}
func (c *ArrayPredictionContext) GetReturnStates() []int {
return c.returnStates
func (a *ArrayPredictionContext) GetReturnStates() []int {
return a.returnStates
}
func (this *ArrayPredictionContext) hasEmptyPath() bool {
return this.getReturnState(this.length()-1) == BasePredictionContextEMPTY_RETURN_STATE
func (a *ArrayPredictionContext) hasEmptyPath() bool {
return a.getReturnState(a.length()-1) == BasePredictionContextEmptyReturnState
}
func (this *ArrayPredictionContext) isEmpty() bool {
// since EMPTY_RETURN_STATE can only appear in the last position, we
func (a *ArrayPredictionContext) isEmpty() bool {
// since EmptyReturnState can only appear in the last position, we
// don't need to verify that size==1
return this.returnStates[0] == BasePredictionContextEMPTY_RETURN_STATE
return a.returnStates[0] == BasePredictionContextEmptyReturnState
}
func (this *ArrayPredictionContext) length() int {
return len(this.returnStates)
func (a *ArrayPredictionContext) length() int {
return len(a.returnStates)
}
func (this *ArrayPredictionContext) GetParent(index int) PredictionContext {
return this.parents[index]
func (a *ArrayPredictionContext) GetParent(index int) PredictionContext {
return a.parents[index]
}
func (this *ArrayPredictionContext) getReturnState(index int) int {
return this.returnStates[index]
func (a *ArrayPredictionContext) getReturnState(index int) int {
return a.returnStates[index]
}
func (this *ArrayPredictionContext) equals(other PredictionContext) bool {
func (a *ArrayPredictionContext) equals(other PredictionContext) bool {
if _, ok := other.(*ArrayPredictionContext); !ok {
return false
} else if this.cachedHashString != other.Hash() {
} else if a.cachedHashString != other.Hash() {
return false // can't be same if hash is different
} else {
otherP := other.(*ArrayPredictionContext)
return &this.returnStates == &otherP.returnStates && &this.parents == &otherP.parents
return &a.returnStates == &otherP.returnStates && &a.parents == &otherP.parents
}
}
func (this *ArrayPredictionContext) String() string {
if this.isEmpty() {
func (a *ArrayPredictionContext) String() string {
if a.isEmpty() {
return "[]"
} else {
var s = "["
for i := 0; i < len(this.returnStates); i++ {
if i > 0 {
s = s + ", "
}
if this.returnStates[i] == BasePredictionContextEMPTY_RETURN_STATE {
s = s + "$"
continue
}
s = s + strconv.Itoa(this.returnStates[i])
if this.parents[i] != nil {
s = s + " " + this.parents[i].String()
} else {
s = s + "nil"
}
}
return s + "]"
}
var s = "["
for i := 0; i < len(a.returnStates); i++ {
if i > 0 {
s = s + ", "
}
if a.returnStates[i] == BasePredictionContextEmptyReturnState {
s = s + "$"
continue
}
s = s + strconv.Itoa(a.returnStates[i])
if a.parents[i] != nil {
s = s + " " + a.parents[i].String()
} else {
s = s + "nil"
}
}
return s + "]"
}
// Convert a {@link RuleContext} tree to a {@link BasePredictionContext} graph.
@ -477,44 +477,44 @@ func mergeSingletons(a, b *BaseSingletonPredictionContext, rootIsWildcard bool,
mergeCache.set(a.Hash(), b.Hash(), spc)
}
return spc
} else { // a != b payloads differ
// see if we can collapse parents due to $+x parents if local ctx
var singleParent PredictionContext = nil
if a == b || (a.parentCtx != nil && a.parentCtx == b.parentCtx) { // ax +
// bx =
// [a,b]x
singleParent = a.parentCtx
}
if singleParent != nil { // parents are same
// sort payloads and use same parent
var payloads = []int{a.returnState, b.returnState}
if a.returnState > b.returnState {
payloads[0] = b.returnState
payloads[1] = a.returnState
}
var parents = []PredictionContext{singleParent, singleParent}
var apc = NewArrayPredictionContext(parents, payloads)
if mergeCache != nil {
mergeCache.set(a.Hash(), b.Hash(), apc)
}
return apc
}
// parents differ and can't merge them. Just pack together
// into array can't merge.
// ax + by = [ax,by]
}
// a != b payloads differ
// see if we can collapse parents due to $+x parents if local ctx
var singleParent PredictionContext
if a == b || (a.parentCtx != nil && a.parentCtx == b.parentCtx) { // ax +
// bx =
// [a,b]x
singleParent = a.parentCtx
}
if singleParent != nil { // parents are same
// sort payloads and use same parent
var payloads = []int{a.returnState, b.returnState}
var parents = []PredictionContext{a.parentCtx, b.parentCtx}
if a.returnState > b.returnState { // sort by payload
if a.returnState > b.returnState {
payloads[0] = b.returnState
payloads[1] = a.returnState
parents = []PredictionContext{b.parentCtx, a.parentCtx}
}
var a_ = NewArrayPredictionContext(parents, payloads)
var parents = []PredictionContext{singleParent, singleParent}
var apc = NewArrayPredictionContext(parents, payloads)
if mergeCache != nil {
mergeCache.set(a.Hash(), b.Hash(), a_)
mergeCache.set(a.Hash(), b.Hash(), apc)
}
return a_
return apc
}
// parents differ and can't merge them. Just pack together
// into array can't merge.
// ax + by = [ax,by]
var payloads = []int{a.returnState, b.returnState}
var parents = []PredictionContext{a.parentCtx, b.parentCtx}
if a.returnState > b.returnState { // sort by payload
payloads[0] = b.returnState
payloads[1] = a.returnState
parents = []PredictionContext{b.parentCtx, a.parentCtx}
}
var apc = NewArrayPredictionContext(parents, payloads)
if mergeCache != nil {
mergeCache.set(a.Hash(), b.Hash(), apc)
}
return apc
}
//
@ -567,11 +567,11 @@ func mergeRoot(a, b SingletonPredictionContext, rootIsWildcard bool) PredictionC
if a == BasePredictionContextEMPTY && b == BasePredictionContextEMPTY {
return BasePredictionContextEMPTY // $ + $ = $
} else if a == BasePredictionContextEMPTY { // $ + x = [$,x]
var payloads = []int{b.getReturnState(-1), BasePredictionContextEMPTY_RETURN_STATE}
var payloads = []int{b.getReturnState(-1), BasePredictionContextEmptyReturnState}
var parents = []PredictionContext{b.GetParent(-1), nil}
return NewArrayPredictionContext(parents, payloads)
} else if b == BasePredictionContextEMPTY { // x + $ = [$,x] ($ is always first if present)
var payloads = []int{a.getReturnState(-1), BasePredictionContextEMPTY_RETURN_STATE}
var payloads = []int{a.getReturnState(-1), BasePredictionContextEmptyReturnState}
var parents = []PredictionContext{a.GetParent(-1), nil}
return NewArrayPredictionContext(parents, payloads)
}
@ -619,59 +619,59 @@ func mergeArrays(a, b *ArrayPredictionContext, rootIsWildcard bool, mergeCache *
var mergedParents = make([]PredictionContext, 0)
// walk and merge to yield mergedParents, mergedReturnStates
for i < len(a.returnStates) && j < len(b.returnStates) {
var a_parent = a.parents[i]
var b_parent = b.parents[j]
var aParent = a.parents[i]
var bParent = b.parents[j]
if a.returnStates[i] == b.returnStates[j] {
// same payload (stack tops are equal), must yield merged singleton
var payload = a.returnStates[i]
// $+$ = $
var bothDollars = payload == BasePredictionContextEMPTY_RETURN_STATE && a_parent == nil && b_parent == nil
var ax_ax = (a_parent != nil && b_parent != nil && a_parent == b_parent) // ax+ax
var bothDollars = payload == BasePredictionContextEmptyReturnState && aParent == nil && bParent == nil
var axAX = (aParent != nil && bParent != nil && aParent == bParent) // ax+ax
// ->
// ax
if bothDollars || ax_ax {
mergedParents[k] = a_parent // choose left
if bothDollars || axAX {
mergedParents[k] = aParent // choose left
mergedReturnStates[k] = payload
} else { // ax+ay -> a'[x,y]
var mergedParent = merge(a_parent, b_parent, rootIsWildcard, mergeCache)
var mergedParent = merge(aParent, bParent, rootIsWildcard, mergeCache)
mergedParents[k] = mergedParent
mergedReturnStates[k] = payload
}
i += 1 // hop over left one as usual
j += 1 // but also Skip one in right side since we merge
i++ // hop over left one as usual
j++ // but also Skip one in right side since we merge
} else if a.returnStates[i] < b.returnStates[j] { // copy a[i] to M
mergedParents[k] = a_parent
mergedParents[k] = aParent
mergedReturnStates[k] = a.returnStates[i]
i += 1
i++
} else { // b > a, copy b[j] to M
mergedParents[k] = b_parent
mergedParents[k] = bParent
mergedReturnStates[k] = b.returnStates[j]
j += 1
j++
}
k += 1
k++
}
// copy over any payloads remaining in either array
if i < len(a.returnStates) {
for p := i; p < len(a.returnStates); p++ {
mergedParents[k] = a.parents[p]
mergedReturnStates[k] = a.returnStates[p]
k += 1
k++
}
} else {
for p := j; p < len(b.returnStates); p++ {
mergedParents[k] = b.parents[p]
mergedReturnStates[k] = b.returnStates[p]
k += 1
k++
}
}
// trim merged if we combined a few that had same stack tops
if k < len(mergedParents) { // write index < last position trim
if k == 1 { // for just one merged element, return singleton top
var a_ = SingletonBasePredictionContextCreate(mergedParents[0], mergedReturnStates[0])
var pc = SingletonBasePredictionContextCreate(mergedParents[0], mergedReturnStates[0])
if mergeCache != nil {
mergeCache.set(a.Hash(), b.Hash(), a_)
mergeCache.set(a.Hash(), b.Hash(), pc)
}
return a_
return pc
}
mergedParents = mergedParents[0:k]
mergedReturnStates = mergedReturnStates[0:k]
@ -753,7 +753,7 @@ func getCachedBasePredictionContext(context PredictionContext, contextCache *Pre
visited[context] = context
return context
}
var updated PredictionContext = nil
var updated PredictionContext
if len(parents) == 0 {
updated = BasePredictionContextEMPTY
} else if len(parents) == 1 {

View File

@ -432,7 +432,7 @@ func PredictionModehasConflictingAltSet(altsets []*BitSet) bool {
// others, otherwise {@code false}
//
func PredictionModeallSubsetsEqual(altsets []*BitSet) bool {
var first *BitSet = nil
var first *BitSet
for i := 0; i < len(altsets); i++ {
var alts = altsets[i]
@ -457,9 +457,9 @@ func PredictionModegetUniqueAlt(altsets []*BitSet) int {
var all = PredictionModeGetAlts(altsets)
if all.length() == 1 {
return all.minValue()
} else {
return ATNInvalidAltNumber
}
return ATNInvalidAltNumber
}
// Gets the complete set of represented alternatives for a collection of
@ -501,7 +501,7 @@ func PredictionModegetConflictingAltSubsets(configs ATNConfigSet) []*BitSet {
var values = make([]*BitSet, 0)
for k, _ := range configToAlts {
for k := range configToAlts {
if strings.Index(k, "key_") != 0 {
continue
}

View File

@ -25,7 +25,7 @@ type Recognizer interface {
}
type BaseRecognizer struct {
_listeners []ErrorListener
listeners []ErrorListener
state int
RuleNames []string
@ -36,7 +36,7 @@ type BaseRecognizer struct {
func NewBaseRecognizer() *BaseRecognizer {
rec := new(BaseRecognizer)
rec._listeners = []ErrorListener{ConsoleErrorListenerINSTANCE}
rec.listeners = []ErrorListener{ConsoleErrorListenerINSTANCE}
rec.state = -1
return rec
}
@ -44,55 +44,55 @@ func NewBaseRecognizer() *BaseRecognizer {
var tokenTypeMapCache = make(map[string]int)
var ruleIndexMapCache = make(map[string]int)
func (this *BaseRecognizer) checkVersion(toolVersion string) {
func (b *BaseRecognizer) checkVersion(toolVersion string) {
var runtimeVersion = "4.5.2"
if runtimeVersion != toolVersion {
fmt.Println("ANTLR runtime and generated code versions disagree: " + runtimeVersion + "!=" + toolVersion)
}
}
func (this *BaseRecognizer) Action(context RuleContext, ruleIndex, actionIndex int) {
func (b *BaseRecognizer) Action(context RuleContext, ruleIndex, actionIndex int) {
panic("action not implemented on Recognizer!")
}
func (this *BaseRecognizer) AddErrorListener(listener ErrorListener) {
this._listeners = append(this._listeners, listener)
func (b *BaseRecognizer) AddErrorListener(listener ErrorListener) {
b.listeners = append(b.listeners, listener)
}
func (this *BaseRecognizer) RemoveErrorListeners() {
this._listeners = make([]ErrorListener, 0)
func (b *BaseRecognizer) RemoveErrorListeners() {
b.listeners = make([]ErrorListener, 0)
}
func (this *BaseRecognizer) GetRuleNames() []string {
return this.RuleNames
func (b *BaseRecognizer) GetRuleNames() []string {
return b.RuleNames
}
func (this *BaseRecognizer) GetTokenNames() []string {
return this.LiteralNames
func (b *BaseRecognizer) GetTokenNames() []string {
return b.LiteralNames
}
func (this *BaseRecognizer) GetSymbolicNames() []string {
return this.SymbolicNames
func (b *BaseRecognizer) GetSymbolicNames() []string {
return b.SymbolicNames
}
func (this *BaseRecognizer) GetLiteralNames() []string {
return this.LiteralNames
func (b *BaseRecognizer) GetLiteralNames() []string {
return b.LiteralNames
}
func (this *BaseRecognizer) GetState() int {
return this.state
func (b *BaseRecognizer) GetState() int {
return b.state
}
func (this *BaseRecognizer) SetState(v int) {
func (b *BaseRecognizer) SetState(v int) {
if PortDebug {
fmt.Println("SETTING STATE " + strconv.Itoa(v) + " from " + strconv.Itoa(this.state))
fmt.Println("SETTING STATE " + strconv.Itoa(v) + " from " + strconv.Itoa(b.state))
}
this.state = v
b.state = v
}
//func (this *Recognizer) GetTokenTypeMap() {
// var tokenNames = this.GetTokenNames()
//func (b *Recognizer) GetTokenTypeMap() {
// var tokenNames = b.GetTokenNames()
// if (tokenNames==nil) {
// panic("The current recognizer does not provide a list of token names.")
// }
@ -109,10 +109,10 @@ func (this *BaseRecognizer) SetState(v int) {
//
// <p>Used for XPath and tree pattern compilation.</p>
//
func (this *BaseRecognizer) GetRuleIndexMap() map[string]int {
func (b *BaseRecognizer) GetRuleIndexMap() map[string]int {
panic("Method not defined!")
// var ruleNames = this.GetRuleNames()
// var ruleNames = b.GetRuleNames()
// if (ruleNames==nil) {
// panic("The current recognizer does not provide a list of rule names.")
// }
@ -125,9 +125,9 @@ func (this *BaseRecognizer) GetRuleIndexMap() map[string]int {
// return result
}
func (this *BaseRecognizer) GetTokenType(tokenName string) int {
func (b *BaseRecognizer) GetTokenType(tokenName string) int {
panic("Method not defined!")
// var ttype = this.GetTokenTypeMap()[tokenName]
// var ttype = b.GetTokenTypeMap()[tokenName]
// if (ttype !=nil) {
// return ttype
// } else {
@ -135,7 +135,7 @@ func (this *BaseRecognizer) GetTokenType(tokenName string) int {
// }
}
//func (this *Recognizer) GetTokenTypeMap() map[string]int {
//func (b *Recognizer) GetTokenTypeMap() map[string]int {
// Vocabulary vocabulary = getVocabulary();
//
// Synchronized (tokenTypeMapCache) {
@ -164,7 +164,7 @@ func (this *BaseRecognizer) GetTokenType(tokenName string) int {
//}
// What is the error header, normally line/character position information?//
func (this *BaseRecognizer) GetErrorHeader(e RecognitionException) string {
func (b *BaseRecognizer) GetErrorHeader(e RecognitionException) string {
var line = e.GetOffendingToken().GetLine()
var column = e.GetOffendingToken().GetColumn()
return "line " + strconv.Itoa(line) + ":" + strconv.Itoa(column)
@ -183,7 +183,7 @@ func (this *BaseRecognizer) GetErrorHeader(e RecognitionException) string {
// feature when necessary. For example, see
// {@link DefaultErrorStrategy//GetTokenErrorDisplay}.
//
func (this *BaseRecognizer) GetTokenErrorDisplay(t Token) string {
func (b *BaseRecognizer) GetTokenErrorDisplay(t Token) string {
if t == nil {
return "<no token>"
}
@ -202,16 +202,16 @@ func (this *BaseRecognizer) GetTokenErrorDisplay(t Token) string {
return "'" + s + "'"
}
func (this *BaseRecognizer) GetErrorListenerDispatch() ErrorListener {
return NewProxyErrorListener(this._listeners)
func (b *BaseRecognizer) GetErrorListenerDispatch() ErrorListener {
return NewProxyErrorListener(b.listeners)
}
// subclass needs to override these if there are sempreds or actions
// that the ATN interp needs to execute
func (this *BaseRecognizer) Sempred(localctx RuleContext, ruleIndex int, actionIndex int) bool {
func (b *BaseRecognizer) Sempred(localctx RuleContext, ruleIndex int, actionIndex int) bool {
return true
}
func (this *BaseRecognizer) Precpred(localctx RuleContext, precedence int) bool {
func (b *BaseRecognizer) Precpred(localctx RuleContext, precedence int) bool {
return true
}

View File

@ -5,9 +5,9 @@ package antlr
// naturally the invoking state is not valid. The parent link
// provides a chain upwards from the current rule invocation to the root
// of the invocation tree, forming a stack. We actually carry no
// information about the rule associated with this context (except
// information about the rule associated with b context (except
// when parsing). We keep only the state number of the invoking state from
// the ATN submachine that invoked this. Contrast this with the s
// the ATN submachine that invoked b. Contrast b with the s
// pointer inside ParserRuleContext that tracks the current state
// being "executed" for the current rule.
//
@ -34,23 +34,21 @@ type RuleContext interface {
}
type BaseRuleContext struct {
parentCtx RuleContext
invokingState int
RuleIndex int
}
func NewBaseRuleContext(parent RuleContext, invokingState int) *BaseRuleContext {
rn := new(BaseRuleContext)
// What context invoked this rule?
// What context invoked b rule?
rn.parentCtx = parent
// What state invoked the rule associated with this context?
// What state invoked the rule associated with b context?
// The "return address" is the followState of invokingState
// If parent is nil, this should be -1.
// If parent is nil, b should be -1.
if parent == nil {
rn.invokingState = -1
} else {
@ -60,40 +58,40 @@ func NewBaseRuleContext(parent RuleContext, invokingState int) *BaseRuleContext
return rn
}
func (this *BaseRuleContext) GetBaseRuleContext() *BaseRuleContext {
return this
func (b *BaseRuleContext) GetBaseRuleContext() *BaseRuleContext {
return b
}
func (this *BaseRuleContext) SetParent(v Tree) {
this.parentCtx = v.(RuleContext)
func (b *BaseRuleContext) SetParent(v Tree) {
b.parentCtx = v.(RuleContext)
}
func (this *BaseRuleContext) GetInvokingState() int {
return this.invokingState
func (b *BaseRuleContext) GetInvokingState() int {
return b.invokingState
}
func (this *BaseRuleContext) SetInvokingState(t int) {
this.invokingState = t
func (b *BaseRuleContext) SetInvokingState(t int) {
b.invokingState = t
}
func (this *BaseRuleContext) GetRuleIndex() int {
return this.RuleIndex
func (b *BaseRuleContext) GetRuleIndex() int {
return b.RuleIndex
}
// A context is empty if there is no invoking state meaning nobody call
// current context.
func (this *BaseRuleContext) IsEmpty() bool {
return this.invokingState == -1
func (b *BaseRuleContext) IsEmpty() bool {
return b.invokingState == -1
}
// Return the combined text of all child nodes. This method only considers
// tokens which have been added to the parse tree.
// <p>
// Since tokens on hidden channels (e.g. whitespace or comments) are not
// added to the parse trees, they will not appear in the output of this
// added to the parse trees, they will not appear in the output of b
// method.
//
func (this *BaseRuleContext) GetParent() Tree {
return this.parentCtx
func (b *BaseRuleContext) GetParent() Tree {
return b.parentCtx
}

View File

@ -31,9 +31,9 @@ func SemanticContextandContext(a, b SemanticContext) SemanticContext {
var result = NewAND(a, b)
if len(result.opnds) == 1 {
return result.opnds[0]
} else {
return result
}
return result
}
func SemanticContextorContext(a, b SemanticContext) SemanticContext {
@ -49,9 +49,9 @@ func SemanticContextorContext(a, b SemanticContext) SemanticContext {
var result = NewOR(a, b)
if len(result.opnds) == 1 {
return result.opnds[0]
} else {
return result
}
return result
}
type Predicate struct {
@ -74,39 +74,39 @@ func NewPredicate(ruleIndex, predIndex int, isCtxDependent bool) *Predicate {
var SemanticContextNone SemanticContext = NewPredicate(-1, -1, false)
func (this *Predicate) evalPrecedence(parser Recognizer, outerContext RuleContext) SemanticContext {
return this
func (p *Predicate) evalPrecedence(parser Recognizer, outerContext RuleContext) SemanticContext {
return p
}
func (this *Predicate) evaluate(parser Recognizer, outerContext RuleContext) bool {
func (p *Predicate) evaluate(parser Recognizer, outerContext RuleContext) bool {
var localctx RuleContext = nil
var localctx RuleContext
if this.isCtxDependent {
if p.isCtxDependent {
localctx = outerContext
}
return parser.Sempred(localctx, this.ruleIndex, this.predIndex)
return parser.Sempred(localctx, p.ruleIndex, p.predIndex)
}
func (this *Predicate) Hash() string {
return strconv.Itoa(this.ruleIndex) + "/" + strconv.Itoa(this.predIndex) + "/" + fmt.Sprint(this.isCtxDependent)
func (p *Predicate) Hash() string {
return strconv.Itoa(p.ruleIndex) + "/" + strconv.Itoa(p.predIndex) + "/" + fmt.Sprint(p.isCtxDependent)
}
func (this *Predicate) equals(other interface{}) bool {
if this == other {
func (p *Predicate) equals(other interface{}) bool {
if p == other {
return true
} else if _, ok := other.(*Predicate); !ok {
return false
} else {
return this.ruleIndex == other.(*Predicate).ruleIndex &&
this.predIndex == other.(*Predicate).predIndex &&
this.isCtxDependent == other.(*Predicate).isCtxDependent
return p.ruleIndex == other.(*Predicate).ruleIndex &&
p.predIndex == other.(*Predicate).predIndex &&
p.isCtxDependent == other.(*Predicate).isCtxDependent
}
}
func (this *Predicate) String() string {
return "{" + strconv.Itoa(this.ruleIndex) + ":" + strconv.Itoa(this.predIndex) + "}?"
func (p *Predicate) String() string {
return "{" + strconv.Itoa(p.ruleIndex) + ":" + strconv.Itoa(p.predIndex) + "}?"
}
type PrecedencePredicate struct {
@ -115,44 +115,44 @@ type PrecedencePredicate struct {
func NewPrecedencePredicate(precedence int) *PrecedencePredicate {
this := new(PrecedencePredicate)
this.precedence = precedence
p := new(PrecedencePredicate)
p.precedence = precedence
return this
return p
}
func (this *PrecedencePredicate) evaluate(parser Recognizer, outerContext RuleContext) bool {
return parser.Precpred(outerContext, this.precedence)
func (p *PrecedencePredicate) evaluate(parser Recognizer, outerContext RuleContext) bool {
return parser.Precpred(outerContext, p.precedence)
}
func (this *PrecedencePredicate) evalPrecedence(parser Recognizer, outerContext RuleContext) SemanticContext {
if parser.Precpred(outerContext, this.precedence) {
func (p *PrecedencePredicate) evalPrecedence(parser Recognizer, outerContext RuleContext) SemanticContext {
if parser.Precpred(outerContext, p.precedence) {
return SemanticContextNone
} else {
return nil
}
return nil
}
func (this *PrecedencePredicate) compareTo(other *PrecedencePredicate) int {
return this.precedence - other.precedence
func (p *PrecedencePredicate) compareTo(other *PrecedencePredicate) int {
return p.precedence - other.precedence
}
func (this *PrecedencePredicate) Hash() string {
func (p *PrecedencePredicate) Hash() string {
return "31"
}
func (this *PrecedencePredicate) equals(other interface{}) bool {
if this == other {
func (p *PrecedencePredicate) equals(other interface{}) bool {
if p == other {
return true
} else if _, ok := other.(*PrecedencePredicate); !ok {
return false
} else {
return this.precedence == other.(*PrecedencePredicate).precedence
return p.precedence == other.(*PrecedencePredicate).precedence
}
}
func (this *PrecedencePredicate) String() string {
return "{" + strconv.Itoa(this.precedence) + ">=prec}?"
func (p *PrecedencePredicate) String() string {
return "{" + strconv.Itoa(p.precedence) + ">=prec}?"
}
func PrecedencePredicatefilterPrecedencePredicates(set *Set) []*PrecedencePredicate {
@ -195,7 +195,7 @@ func NewAND(a, b SemanticContext) *AND {
var precedencePredicates = PrecedencePredicatefilterPrecedencePredicates(operands)
if len(precedencePredicates) > 0 {
// interested in the transition with the lowest precedence
var reduced *PrecedencePredicate = nil
var reduced *PrecedencePredicate
for _, p := range precedencePredicates {
if reduced == nil || p.precedence < reduced.precedence {
@ -212,20 +212,20 @@ func NewAND(a, b SemanticContext) *AND {
vs[i] = v.(SemanticContext)
}
this := new(AND)
this.opnds = opnds
and := new(AND)
and.opnds = opnds
return this
return and
}
func (this *AND) equals(other interface{}) bool {
if this == other {
func (a *AND) equals(other interface{}) bool {
if a == other {
return true
} else if _, ok := other.(*AND); !ok {
return false
} else {
for i, v := range other.(*AND).opnds {
if !this.opnds[i].equals(v) {
if !a.opnds[i].equals(v) {
return false
}
}
@ -233,32 +233,32 @@ func (this *AND) equals(other interface{}) bool {
}
}
func (this *AND) Hash() string {
return fmt.Sprint(this.opnds) + "/AND"
func (a *AND) Hash() string {
return fmt.Sprint(a.opnds) + "/AND"
}
//
// {@inheritDoc}
//
// <p>
// The evaluation of predicates by this context is short-circuiting, but
// The evaluation of predicates by a context is short-circuiting, but
// unordered.</p>
//
func (this *AND) evaluate(parser Recognizer, outerContext RuleContext) bool {
for i := 0; i < len(this.opnds); i++ {
if !this.opnds[i].evaluate(parser, outerContext) {
func (a *AND) evaluate(parser Recognizer, outerContext RuleContext) bool {
for i := 0; i < len(a.opnds); i++ {
if !a.opnds[i].evaluate(parser, outerContext) {
return false
}
}
return true
}
func (this *AND) evalPrecedence(parser Recognizer, outerContext RuleContext) SemanticContext {
func (a *AND) evalPrecedence(parser Recognizer, outerContext RuleContext) SemanticContext {
var differs = false
var operands = make([]SemanticContext, 0)
for i := 0; i < len(this.opnds); i++ {
var context = this.opnds[i]
for i := 0; i < len(a.opnds); i++ {
var context = a.opnds[i]
var evaluated = context.evalPrecedence(parser, outerContext)
differs = differs || (evaluated != context)
if evaluated == nil {
@ -270,7 +270,7 @@ func (this *AND) evalPrecedence(parser Recognizer, outerContext RuleContext) Sem
}
}
if !differs {
return this
return a
}
if len(operands) == 0 {
@ -278,7 +278,7 @@ func (this *AND) evalPrecedence(parser Recognizer, outerContext RuleContext) Sem
return SemanticContextNone
}
var result SemanticContext = nil
var result SemanticContext
for _, o := range operands {
if result == nil {
@ -291,18 +291,18 @@ func (this *AND) evalPrecedence(parser Recognizer, outerContext RuleContext) Sem
return result
}
func (this *AND) String() string {
func (a *AND) String() string {
var s = ""
for _, o := range this.opnds {
for _, o := range a.opnds {
s += "&& " + o.String()
}
if len(s) > 3 {
return s[0:3]
} else {
return s
}
return s
}
//
@ -335,7 +335,7 @@ func NewOR(a, b SemanticContext) *OR {
var precedencePredicates = PrecedencePredicatefilterPrecedencePredicates(operands)
if len(precedencePredicates) > 0 {
// interested in the transition with the lowest precedence
var reduced *PrecedencePredicate = nil
var reduced *PrecedencePredicate
for _, p := range precedencePredicates {
if reduced == nil || p.precedence > reduced.precedence {
@ -353,20 +353,20 @@ func NewOR(a, b SemanticContext) *OR {
vs[i] = v.(SemanticContext)
}
this := new(OR)
this.opnds = opnds
o := new(OR)
o.opnds = opnds
return this
return o
}
func (this *OR) equals(other interface{}) bool {
if this == other {
func (o *OR) equals(other interface{}) bool {
if o == other {
return true
} else if _, ok := other.(*OR); !ok {
return false
} else {
for i, v := range other.(*OR).opnds {
if !this.opnds[i].equals(v) {
if !o.opnds[i].equals(v) {
return false
}
}
@ -374,28 +374,28 @@ func (this *OR) equals(other interface{}) bool {
}
}
func (this *OR) Hash() string {
return fmt.Sprint(this.opnds) + "/OR"
func (o *OR) Hash() string {
return fmt.Sprint(o.opnds) + "/OR"
}
// <p>
// The evaluation of predicates by this context is short-circuiting, but
// The evaluation of predicates by o context is short-circuiting, but
// unordered.</p>
//
func (this *OR) evaluate(parser Recognizer, outerContext RuleContext) bool {
for i := 0; i < len(this.opnds); i++ {
if this.opnds[i].evaluate(parser, outerContext) {
func (o *OR) evaluate(parser Recognizer, outerContext RuleContext) bool {
for i := 0; i < len(o.opnds); i++ {
if o.opnds[i].evaluate(parser, outerContext) {
return true
}
}
return false
}
func (this *OR) evalPrecedence(parser Recognizer, outerContext RuleContext) SemanticContext {
func (o *OR) evalPrecedence(parser Recognizer, outerContext RuleContext) SemanticContext {
var differs = false
var operands = make([]SemanticContext, 0)
for i := 0; i < len(this.opnds); i++ {
var context = this.opnds[i]
for i := 0; i < len(o.opnds); i++ {
var context = o.opnds[i]
var evaluated = context.evalPrecedence(parser, outerContext)
differs = differs || (evaluated != context)
if evaluated == SemanticContextNone {
@ -407,13 +407,13 @@ func (this *OR) evalPrecedence(parser Recognizer, outerContext RuleContext) Sema
}
}
if !differs {
return this
return o
}
if len(operands) == 0 {
// all elements were false, so the OR context is false
return nil
}
var result SemanticContext = nil
var result SemanticContext
for _, o := range operands {
if result == nil {
@ -426,16 +426,16 @@ func (this *OR) evalPrecedence(parser Recognizer, outerContext RuleContext) Sema
return result
}
func (this *OR) String() string {
func (o *OR) String() string {
var s = ""
for _, o := range this.opnds {
for _, o := range o.opnds {
s += "|| " + o.String()
}
if len(s) > 3 {
return s[0:3]
} else {
return s
}
return s
}

View File

@ -42,7 +42,7 @@ type BaseToken struct {
tokenIndex int // from 0..n-1 of the token object in the input stream
line int // line=1..n of the 1st character
column int // beginning of the line at which it occurs, 0..n-1
_text string // text of the token.
text string // text of the token.
readOnly bool
}
@ -69,48 +69,48 @@ const (
TokenHiddenChannel = 1
)
func (this *BaseToken) GetChannel() int {
return this.channel
func (b *BaseToken) GetChannel() int {
return b.channel
}
func (this *BaseToken) GetStart() int {
return this.start
func (b *BaseToken) GetStart() int {
return b.start
}
func (this *BaseToken) GetStop() int {
return this.stop
func (b *BaseToken) GetStop() int {
return b.stop
}
func (this *BaseToken) GetLine() int {
return this.line
func (b *BaseToken) GetLine() int {
return b.line
}
func (this *BaseToken) GetColumn() int {
return this.column
func (b *BaseToken) GetColumn() int {
return b.column
}
func (this *BaseToken) GetTokenType() int {
return this.tokenType
func (b *BaseToken) GetTokenType() int {
return b.tokenType
}
func (this *BaseToken) GetSource() *TokenSourceCharStreamPair {
return this.source
func (b *BaseToken) GetSource() *TokenSourceCharStreamPair {
return b.source
}
func (this *BaseToken) GetTokenIndex() int {
return this.tokenIndex
func (b *BaseToken) GetTokenIndex() int {
return b.tokenIndex
}
func (this *BaseToken) SetTokenIndex(v int) {
this.tokenIndex = v
func (b *BaseToken) SetTokenIndex(v int) {
b.tokenIndex = v
}
func (this *BaseToken) GetTokenSource() TokenSource {
return this.source.tokenSource
func (b *BaseToken) GetTokenSource() TokenSource {
return b.source.tokenSource
}
func (this *BaseToken) GetInputStream() CharStream {
return this.source.charStream
func (b *BaseToken) GetInputStream() CharStream {
return b.source.charStream
}
type CommonToken struct {
@ -155,37 +155,36 @@ func NewCommonToken(source *TokenSourceCharStreamPair, tokenType, channel, start
//
// @param oldToken The token to copy.
//
func (ct *CommonToken) clone() *CommonToken {
var t = NewCommonToken(ct.source, ct.tokenType, ct.channel, ct.start, ct.stop)
t.tokenIndex = ct.GetTokenIndex()
t.line = ct.GetLine()
t.column = ct.GetColumn()
t._text = ct.GetText()
func (c *CommonToken) clone() *CommonToken {
var t = NewCommonToken(c.source, c.tokenType, c.channel, c.start, c.stop)
t.tokenIndex = c.GetTokenIndex()
t.line = c.GetLine()
t.column = c.GetColumn()
t.text = c.GetText()
return t
}
func (this *CommonToken) GetText() string {
if this._text != "" {
return this._text
func (c *CommonToken) GetText() string {
if c.text != "" {
return c.text
}
var input = this.GetInputStream()
var input = c.GetInputStream()
if input == nil {
return ""
}
var n = input.Size()
if this.start < n && this.stop < n {
return input.GetTextFromInterval(NewInterval(this.start, this.stop))
} else {
return "<EOF>"
if c.start < n && c.stop < n {
return input.GetTextFromInterval(NewInterval(c.start, c.stop))
}
return "<EOF>"
}
func (this *CommonToken) SetText(text string) {
this._text = text
func (c *CommonToken) SetText(text string) {
c.text = text
}
func (this *CommonToken) String() string {
var txt = this.GetText()
func (c *CommonToken) String() string {
var txt = c.GetText()
if txt != "" {
txt = strings.Replace(txt, "\n", "\\n", -1)
txt = strings.Replace(txt, "\r", "\\r", -1)
@ -195,13 +194,13 @@ func (this *CommonToken) String() string {
}
var ch string
if this.channel > 0 {
ch = ",channel=" + strconv.Itoa(this.channel)
if c.channel > 0 {
ch = ",channel=" + strconv.Itoa(c.channel)
} else {
ch = ""
}
return "[@" + strconv.Itoa(this.tokenIndex) + "," + strconv.Itoa(this.start) + ":" + strconv.Itoa(this.stop) + "='" +
txt + "',<" + strconv.Itoa(this.tokenType) + ">" +
ch + "," + strconv.Itoa(this.line) + ":" + strconv.Itoa(this.column) + "]"
return "[@" + strconv.Itoa(c.tokenIndex) + "," + strconv.Itoa(c.start) + ":" + strconv.Itoa(c.stop) + "='" +
txt + "',<" + strconv.Itoa(c.tokenType) + ">" +
ch + "," + strconv.Itoa(c.line) + ":" + strconv.Itoa(c.column) + "]"
}

View File

@ -26,14 +26,14 @@ type Transition interface {
type BaseTransition struct {
target ATNState
isEpsilon bool
label_ int
label *IntervalSet
label int
intervalSet *IntervalSet
serializationType int
}
func NewBaseTransition(target ATNState) *BaseTransition {
if target == nil || target == nil {
if target == nil {
panic("target cannot be nil.")
}
@ -42,7 +42,7 @@ func NewBaseTransition(target ATNState) *BaseTransition {
t.target = target
// Are we epsilon, action, sempred?
t.isEpsilon = false
t.label = nil
t.intervalSet = nil
return t
}
@ -60,7 +60,7 @@ func (t *BaseTransition) getIsEpsilon() bool {
}
func (t *BaseTransition) getLabel() *IntervalSet {
return t.label
return t.intervalSet
}
func (t *BaseTransition) getSerializationType() int {
@ -79,7 +79,7 @@ const (
TransitionATOM = 5
TransitionACTION = 6
TransitionSET = 7 // ~(A|B) or ~atom, wildcard, which convert to next 2
TransitionNOT_SET = 8
TransitionNOTSET = 8
TransitionWILDCARD = 9
TransitionPRECEDENCE = 10
)
@ -117,7 +117,7 @@ var TransitionserializationNames = []string{
// TransitionATOM,
// TransitionACTION,
// TransitionSET,
// TransitionNOT_SET,
// TransitionNOTSET,
// TransitionWILDCARD,
// TransitionPRECEDENCE
//}
@ -127,13 +127,13 @@ type AtomTransition struct {
*BaseTransition
}
func NewAtomTransition(target ATNState, label int) *AtomTransition {
func NewAtomTransition(target ATNState, intervalSet int) *AtomTransition {
t := new(AtomTransition)
t.BaseTransition = NewBaseTransition(target)
t.label_ = label // The token type or character value or, signifies special label.
t.label = t.makeLabel()
t.label = intervalSet // The token type or character value or, signifies special intervalSet.
t.intervalSet = t.makeLabel()
t.serializationType = TransitionATOM
return t
@ -141,16 +141,16 @@ func NewAtomTransition(target ATNState, label int) *AtomTransition {
func (t *AtomTransition) makeLabel() *IntervalSet {
var s = NewIntervalSet()
s.addOne(t.label_)
s.addOne(t.label)
return s
}
func (t *AtomTransition) Matches(symbol, minVocabSymbol, maxVocabSymbol int) bool {
return t.label_ == symbol
return t.label == symbol
}
func (t *AtomTransition) String() string {
return strconv.Itoa(t.label_)
return strconv.Itoa(t.label)
}
type RuleTransition struct {
@ -217,7 +217,7 @@ func NewRangeTransition(target ATNState, start, stop int) *RangeTransition {
t.serializationType = TransitionRANGE
t.start = start
t.stop = stop
t.label = t.makeLabel()
t.intervalSet = t.makeLabel()
return t
}
@ -324,22 +324,22 @@ func NewSetTransition(target ATNState, set *IntervalSet) *SetTransition {
t.BaseTransition = NewBaseTransition(target)
t.serializationType = TransitionSET
if set != nil && set != nil {
t.label = set
if set != nil {
t.intervalSet = set
} else {
t.label = NewIntervalSet()
t.label.addOne(TokenInvalidType)
t.intervalSet = NewIntervalSet()
t.intervalSet.addOne(TokenInvalidType)
}
return t
}
func (t *SetTransition) Matches(symbol, minVocabSymbol, maxVocabSymbol int) bool {
return t.label.contains(symbol)
return t.intervalSet.contains(symbol)
}
func (t *SetTransition) String() string {
return t.label.String()
return t.intervalSet.String()
}
type NotSetTransition struct {
@ -352,17 +352,17 @@ func NewNotSetTransition(target ATNState, set *IntervalSet) *NotSetTransition {
t.SetTransition = NewSetTransition(target, set)
t.serializationType = TransitionNOT_SET
t.serializationType = TransitionNOTSET
return t
}
func (t *NotSetTransition) Matches(symbol, minVocabSymbol, maxVocabSymbol int) bool {
return symbol >= minVocabSymbol && symbol <= maxVocabSymbol && !t.label.contains(symbol)
return symbol >= minVocabSymbol && symbol <= maxVocabSymbol && !t.intervalSet.contains(symbol)
}
func (t *NotSetTransition) String() string {
return "~" + t.label.String()
return "~" + t.intervalSet.String()
}
type WildcardTransition struct {

View File

@ -54,12 +54,12 @@ type ParseTreeVisitor interface {
VisitErrorNode(node ErrorNode) interface{}
}
type BaseParseTreeVisitor struct {}
type BaseParseTreeVisitor struct{}
func (v *BaseParseTreeVisitor) Visit(tree ParseTree) interface{} { return nil }
func (v *BaseParseTreeVisitor) VisitChildren(node RuleNode) interface{} { return nil }
func (v *BaseParseTreeVisitor) Visit(tree ParseTree) interface{} { return nil }
func (v *BaseParseTreeVisitor) VisitChildren(node RuleNode) interface{} { return nil }
func (v *BaseParseTreeVisitor) VisitTerminal(node TerminalNode) interface{} { return nil }
func (v *BaseParseTreeVisitor) VisitErrorNode(node ErrorNode) interface{} { return nil }
func (v *BaseParseTreeVisitor) VisitErrorNode(node ErrorNode) interface{} { return nil }
// TODO
//func (this ParseTreeVisitor) Visit(ctx) {
@ -89,11 +89,12 @@ type ParseTreeListener interface {
ExitEveryRule(ctx ParserRuleContext)
}
type BaseParseTreeListener struct {}
func (l *BaseParseTreeListener) VisitTerminal(node TerminalNode){}
func (l *BaseParseTreeListener) VisitErrorNode(node ErrorNode){}
func (l *BaseParseTreeListener) EnterEveryRule(ctx ParserRuleContext){}
func (l *BaseParseTreeListener) ExitEveryRule(ctx ParserRuleContext){}
type BaseParseTreeListener struct{}
func (l *BaseParseTreeListener) VisitTerminal(node TerminalNode) {}
func (l *BaseParseTreeListener) VisitErrorNode(node ErrorNode) {}
func (l *BaseParseTreeListener) EnterEveryRule(ctx ParserRuleContext) {}
func (l *BaseParseTreeListener) ExitEveryRule(ctx ParserRuleContext) {}
type TerminalNodeImpl struct {
parentCtx RuleContext
@ -110,64 +111,64 @@ func NewTerminalNodeImpl(symbol Token) *TerminalNodeImpl {
return tn
}
func (this *TerminalNodeImpl) GetChild(i int) Tree {
func (t *TerminalNodeImpl) GetChild(i int) Tree {
return nil
}
func (this *TerminalNodeImpl) GetChildren() []Tree {
func (t *TerminalNodeImpl) GetChildren() []Tree {
return nil
}
func (this *TerminalNodeImpl) SetChildren(t []Tree) {
func (t *TerminalNodeImpl) SetChildren(tree []Tree) {
panic("Cannot set children on terminal node")
}
func (this *TerminalNodeImpl) GetSymbol() Token {
return this.symbol
func (t *TerminalNodeImpl) GetSymbol() Token {
return t.symbol
}
func (this *TerminalNodeImpl) GetParent() Tree {
return this.parentCtx
func (t *TerminalNodeImpl) GetParent() Tree {
return t.parentCtx
}
func (this *TerminalNodeImpl) SetParent(t Tree) {
this.parentCtx = t.(RuleContext)
func (t *TerminalNodeImpl) SetParent(tree Tree) {
t.parentCtx = tree.(RuleContext)
}
func (this *TerminalNodeImpl) GetPayload() interface{} {
return this.symbol
func (t *TerminalNodeImpl) GetPayload() interface{} {
return t.symbol
}
func (this *TerminalNodeImpl) GetSourceInterval() *Interval {
if this.symbol == nil {
func (t *TerminalNodeImpl) GetSourceInterval() *Interval {
if t.symbol == nil {
return TreeInvalidInterval
}
var tokenIndex = this.symbol.GetTokenIndex()
var tokenIndex = t.symbol.GetTokenIndex()
return NewInterval(tokenIndex, tokenIndex)
}
func (this *TerminalNodeImpl) GetChildCount() int {
func (t *TerminalNodeImpl) GetChildCount() int {
return 0
}
func (this *TerminalNodeImpl) Accept(v ParseTreeVisitor) interface{} {
return v.VisitTerminal(this)
func (t *TerminalNodeImpl) Accept(v ParseTreeVisitor) interface{} {
return v.VisitTerminal(t)
}
func (this *TerminalNodeImpl) GetText() string {
return this.symbol.GetText()
func (t *TerminalNodeImpl) GetText() string {
return t.symbol.GetText()
}
func (this *TerminalNodeImpl) String() string {
if this.symbol.GetTokenType() == TokenEOF {
func (t *TerminalNodeImpl) String() string {
if t.symbol.GetTokenType() == TokenEOF {
return "<EOF>"
} else {
return this.symbol.GetText()
}
return t.symbol.GetText()
}
func (this *TerminalNodeImpl) ToStringTree(s []string, r Recognizer) string {
return this.String()
func (t *TerminalNodeImpl) ToStringTree(s []string, r Recognizer) string {
return t.String()
}
// Represents a token that was consumed during reSynchronization
@ -186,12 +187,12 @@ func NewErrorNodeImpl(token Token) *ErrorNodeImpl {
return en
}
func (this *ErrorNodeImpl) IsErrorNode() bool {
func (e *ErrorNodeImpl) IsErrorNode() bool {
return true
}
func (this *ErrorNodeImpl) Accept(v ParseTreeVisitor) interface{} {
return v.VisitErrorNode(this)
func (e *ErrorNodeImpl) Accept(v ParseTreeVisitor) interface{} {
return v.VisitErrorNode(e)
}
type ParseTreeWalker struct {
@ -201,19 +202,19 @@ func NewParseTreeWalker() *ParseTreeWalker {
return new(ParseTreeWalker)
}
func (this *ParseTreeWalker) Walk(listener ParseTreeListener, t Tree) {
func (p *ParseTreeWalker) Walk(listener ParseTreeListener, t Tree) {
if errorNode, ok := t.(ErrorNode); ok {
listener.VisitErrorNode(errorNode)
} else if term, ok := t.(TerminalNode); ok {
listener.VisitTerminal(term)
} else {
this.EnterRule(listener, t.(RuleNode))
p.EnterRule(listener, t.(RuleNode))
for i := 0; i < t.GetChildCount(); i++ {
var child = t.GetChild(i)
this.Walk(listener, child)
p.Walk(listener, child)
}
this.ExitRule(listener, t.(RuleNode))
p.ExitRule(listener, t.(RuleNode))
}
}
@ -223,13 +224,13 @@ func (this *ParseTreeWalker) Walk(listener ParseTreeListener, t Tree) {
// {@link RuleContext}-specific event. First we trigger the generic and then
// the rule specific. We to them in reverse order upon finishing the node.
//
func (this *ParseTreeWalker) EnterRule(listener ParseTreeListener, r RuleNode) {
func (p *ParseTreeWalker) EnterRule(listener ParseTreeListener, r RuleNode) {
var ctx = r.GetRuleContext().(ParserRuleContext)
listener.EnterEveryRule(ctx)
ctx.EnterRule(listener)
}
func (this *ParseTreeWalker) ExitRule(listener ParseTreeListener, r RuleNode) {
func (p *ParseTreeWalker) ExitRule(listener ParseTreeListener, r RuleNode) {
var ctx = r.GetRuleContext().(ParserRuleContext)
ctx.ExitRule(listener)
listener.ExitEveryRule(ctx)

View File

@ -93,11 +93,11 @@ func TreesfindAllRuleNodes(t ParseTree, ruleIndex int) []ParseTree {
func TreesfindAllNodes(t ParseTree, index int, findTokens bool) []ParseTree {
var nodes = make([]ParseTree, 0)
Trees_findAllNodes(t, index, findTokens, nodes)
TreesFindAllNodes(t, index, findTokens, nodes)
return nodes
}
func Trees_findAllNodes(t ParseTree, index int, findTokens bool, nodes []ParseTree) {
func TreesFindAllNodes(t ParseTree, index int, findTokens bool, nodes []ParseTree) {
// check this node (the root) first
t2, ok := t.(TerminalNode)
@ -114,7 +114,7 @@ func Trees_findAllNodes(t ParseTree, index int, findTokens bool, nodes []ParseTr
}
// check children
for i := 0; i < t.GetChildCount(); i++ {
Trees_findAllNodes(t.GetChild(i).(ParseTree), index, findTokens, nodes)
TreesFindAllNodes(t.GetChild(i).(ParseTree), index, findTokens, nodes)
}
}

View File

@ -114,44 +114,43 @@ func hashCode(s string) string {
return fmt.Sprint(h.Sum32())
}
func (this *Set) length() int {
return len(this.data)
func (s *Set) length() int {
return len(s.data)
}
func (this *Set) add(value interface{}) interface{} {
func (s *Set) add(value interface{}) interface{} {
var hash = this.hashFunction(value)
var hash = s.hashFunction(value)
var key = "hash_" + hashCode(hash)
values := this.data[key]
values := s.data[key]
if this.data[key] != nil {
if s.data[key] != nil {
for i := 0; i < len(values); i++ {
if this.equalsFunction(value, values[i]) {
if s.equalsFunction(value, values[i]) {
return values[i]
}
}
this.data[key] = append(this.data[key], value)
s.data[key] = append(s.data[key], value)
return value
}
this.data[key] = []interface{}{value}
s.data[key] = []interface{}{value}
return value
}
func (this *Set) contains(value interface{}) bool {
func (s *Set) contains(value interface{}) bool {
hash := this.hashFunction(value)
hash := s.hashFunction(value)
key := "hash_" + hashCode(hash)
values := this.data[key]
values := s.data[key]
if this.data[key] != nil {
if s.data[key] != nil {
for i := 0; i < len(values); i++ {
if this.equalsFunction(value, values[i]) {
if s.equalsFunction(value, values[i]) {
return true
}
}
@ -159,28 +158,28 @@ func (this *Set) contains(value interface{}) bool {
return false
}
func (this *Set) values() []interface{} {
func (s *Set) values() []interface{} {
var l = make([]interface{}, 0)
for key, _ := range this.data {
for key := range s.data {
if strings.Index(key, "hash_") == 0 {
l = append(l, this.data[key]...)
l = append(l, s.data[key]...)
}
}
return l
}
func (this *Set) String() string {
func (s *Set) String() string {
s := ""
r := ""
for _, av := range this.data {
for _, av := range s.data {
for _, v := range av {
s += fmt.Sprint(v)
r += fmt.Sprint(v)
}
}
return s
return r
}
type BitSet struct {
@ -193,42 +192,42 @@ func NewBitSet() *BitSet {
return b
}
func (this *BitSet) add(value int) {
this.data[value] = true
func (b *BitSet) add(value int) {
b.data[value] = true
}
func (this *BitSet) clear(index int) {
delete(this.data, index)
func (b *BitSet) clear(index int) {
delete(b.data, index)
}
func (this *BitSet) or(set *BitSet) {
for k, _ := range set.data {
this.add(k)
func (b *BitSet) or(set *BitSet) {
for k := range set.data {
b.add(k)
}
}
func (this *BitSet) remove(value int) {
delete(this.data, value)
func (b *BitSet) remove(value int) {
delete(b.data, value)
}
func (this *BitSet) contains(value int) bool {
return this.data[value] == true
func (b *BitSet) contains(value int) bool {
return b.data[value] == true
}
func (this *BitSet) values() []int {
ks := make([]int, len(this.data))
func (b *BitSet) values() []int {
ks := make([]int, len(b.data))
i := 0
for k, _ := range this.data {
for k := range b.data {
ks[i] = k
i++
}
return ks
}
func (this *BitSet) minValue() int {
func (b *BitSet) minValue() int {
min := 2147483647
for k, _ := range this.data {
for k := range b.data {
if k < min {
min = k
}
@ -237,17 +236,17 @@ func (this *BitSet) minValue() int {
return min
}
func (this *BitSet) equals(other interface{}) bool {
func (b *BitSet) equals(other interface{}) bool {
otherBitSet, ok := other.(*BitSet)
if !ok {
return false
}
if len(this.data) != len(otherBitSet.data) {
if len(b.data) != len(otherBitSet.data) {
return false
}
for k, v := range this.data {
for k, v := range b.data {
if otherBitSet.data[k] != v {
return false
}
@ -256,18 +255,18 @@ func (this *BitSet) equals(other interface{}) bool {
return true
}
func (this *BitSet) length() int {
return len(this.data)
func (b *BitSet) length() int {
return len(b.data)
}
func (this *BitSet) String() string {
vals := this.values()
func (b *BitSet) String() string {
vals := b.values()
valsS := make([]string, len(vals))
for i,val := range vals {
for i, val := range vals {
valsS[i] = strconv.Itoa(val)
}
return "{" + strings.Join(valsS, ", ") + "}";
return "{" + strings.Join(valsS, ", ") + "}"
}
type AltDict struct {
@ -280,20 +279,20 @@ func NewAltDict() *AltDict {
return d
}
func (this *AltDict) Get(key string) interface{} {
func (a *AltDict) Get(key string) interface{} {
key = "k-" + key
return this.data[key]
return a.data[key]
}
func (this *AltDict) put(key string, value interface{}) {
func (a *AltDict) put(key string, value interface{}) {
key = "k-" + key
this.data[key] = value
a.data[key] = value
}
func (this *AltDict) values() []interface{} {
vs := make([]interface{}, len(this.data))
func (a *AltDict) values() []interface{} {
vs := make([]interface{}, len(a.data))
i := 0
for _, v := range this.data {
for _, v := range a.data {
vs[i] = v
i++
}
@ -310,25 +309,25 @@ func NewDoubleDict() *DoubleDict {
return dd
}
func (this *DoubleDict) Get(a string, b string) interface{} {
var d = this.data[a]
func (d *DoubleDict) Get(a string, b string) interface{} {
var data = d.data[a]
if d == nil {
if data == nil {
return nil
}
return d[b]
return data[b]
}
func (this *DoubleDict) set(a, b string, o interface{}) {
var d = this.data[a]
func (d *DoubleDict) set(a, b string, o interface{}) {
var data = d.data[a]
if d == nil {
d = make(map[string]interface{})
this.data[a] = d
if data == nil {
data = make(map[string]interface{})
d.data[a] = data
}
d[b] = o
data[b] = o
}
func EscapeWhitespace(s string, escapeSpaces bool) string {
@ -353,6 +352,5 @@ func TitleCase(str string) string {
// return re.ReplaceAllStringFunc(str, func(s string) {
// return strings.ToUpper(s[0:1]) + s[1:2]
// })
return ""
}

View File

@ -7,10 +7,11 @@ ParserFile(file, parser, namedActions) ::= <<
package parser // <file.grammarName>
import (
"github.com/antlr/antlr4/runtime/Go/src/antlr"
"reflect"
"fmt"
"strconv"
"github.com/antlr/antlr4/runtime/Go/antlr"
)
// Stopgap to suppress unused import error. We aren't certain
@ -30,7 +31,7 @@ ListenerFile(file, header) ::= <<
<fileHeader(file.grammarFileName, file.ANTLRVersion)>
package parser // <file.grammarName>
import "github.com/antlr/antlr4/runtime/Go/src/antlr"
import "github.com/antlr/antlr4/runtime/Go/antlr"
// A complete listener for a parse tree produced by <file.parserName>
@ -49,7 +50,7 @@ BaseListenerFile(file, header) ::= <<
<fileHeader(file.grammarFileName, file.ANTLRVersion)>
package parser // <file.grammarName>
import "github.com/antlr/antlr4/runtime/Go/src/antlr"
import "github.com/antlr/antlr4/runtime/Go/antlr"
// A complete base listener for a parse tree produced by <file.parserName>
@ -77,7 +78,7 @@ VisitorFile(file, header) ::= <<
<fileHeader(file.grammarFileName, file.ANTLRVersion)>
package parser // <file.grammarName>
import "github.com/antlr/antlr4/runtime/Go/src/antlr"
import "github.com/antlr/antlr4/runtime/Go/antlr"
<header>
@ -97,7 +98,7 @@ BaseVisitorFile(file, header) ::= <<
<fileHeader(file.grammarFileName, file.ANTLRVersion)>
package parser // <file.grammarName>
import "github.com/antlr/antlr4/runtime/Go/src/antlr"
import "github.com/antlr/antlr4/runtime/Go/antlr"
type Base<file.grammarName>Visitor struct {
*antlr.BaseParseTreeVisitor
@ -894,8 +895,9 @@ LexerFile(lexerFile, lexer, namedActions) ::= <<
package parser
import (
"github.com/antlr/antlr4/runtime/Go/src/antlr"
"fmt"
"github.com/antlr/antlr4/runtime/Go/antlr"
)
// suppress unused import error, many tests