Lint: Rename generic "this" var

This commit is contained in:
Will Faught 2016-05-20 18:13:49 -07:00
parent 5361aedc0b
commit 3b742ed8cb
28 changed files with 1565 additions and 1565 deletions

View File

@ -60,8 +60,8 @@ func NewATN(grammarType int, maxTokenType int) *ATN {
// If {@code ctx} is nil, the set of tokens will not include what can follow
// the rule surrounding {@code s}. In other words, the set will be
// restricted to tokens reachable staying within {@code s}'s rule.
func (this *ATN) NextTokensInContext(s ATNState, ctx RuleContext) *IntervalSet {
var anal = NewLL1Analyzer(this)
func (a *ATN) NextTokensInContext(s ATNState, ctx RuleContext) *IntervalSet {
var anal = NewLL1Analyzer(a)
var res = anal.LOOK(s, nil, ctx)
return res
}
@ -69,7 +69,7 @@ func (this *ATN) NextTokensInContext(s ATNState, ctx RuleContext) *IntervalSet {
// Compute the set of valid tokens that can occur starting in {@code s} and
// staying in same rule. {@link Token//EPSILON} is in set if we reach end of
// rule.
func (this *ATN) NextTokensNoContext(s ATNState) *IntervalSet {
func (a *ATN) NextTokensNoContext(s ATNState) *IntervalSet {
if s.GetNextTokenWithinRule() != nil {
if PortDebug {
fmt.Println("DEBUG A")
@ -78,44 +78,44 @@ func (this *ATN) NextTokensNoContext(s ATNState) *IntervalSet {
}
if PortDebug {
fmt.Println("DEBUG 2")
fmt.Println(this.NextTokensInContext(s, nil))
fmt.Println(a.NextTokensInContext(s, nil))
}
s.SetNextTokenWithinRule(this.NextTokensInContext(s, nil))
s.SetNextTokenWithinRule(a.NextTokensInContext(s, nil))
s.GetNextTokenWithinRule().readOnly = true
return s.GetNextTokenWithinRule()
}
func (this *ATN) NextTokens(s ATNState, ctx RuleContext) *IntervalSet {
func (a *ATN) NextTokens(s ATNState, ctx RuleContext) *IntervalSet {
if ctx == nil {
return this.NextTokensNoContext(s)
return a.NextTokensNoContext(s)
} else {
return this.NextTokensInContext(s, ctx)
return a.NextTokensInContext(s, ctx)
}
}
func (this *ATN) addState(state ATNState) {
func (a *ATN) addState(state ATNState) {
if state != nil {
state.SetATN(this)
state.SetStateNumber(len(this.states))
state.SetATN(a)
state.SetStateNumber(len(a.states))
}
this.states = append(this.states, state)
a.states = append(a.states, state)
}
func (this *ATN) removeState(state ATNState) {
this.states[state.GetStateNumber()] = nil // just free mem, don't shift states in list
func (a *ATN) removeState(state ATNState) {
a.states[state.GetStateNumber()] = nil // just free mem, don't shift states in list
}
func (this *ATN) defineDecisionState(s DecisionState) int {
this.DecisionToState = append(this.DecisionToState, s)
s.setDecision(len(this.DecisionToState) - 1)
func (a *ATN) defineDecisionState(s DecisionState) int {
a.DecisionToState = append(a.DecisionToState, s)
s.setDecision(len(a.DecisionToState) - 1)
return s.getDecision()
}
func (this *ATN) getDecisionState(decision int) DecisionState {
if len(this.DecisionToState) == 0 {
func (a *ATN) getDecisionState(decision int) DecisionState {
if len(a.DecisionToState) == 0 {
return nil
} else {
return this.DecisionToState[decision]
return a.DecisionToState[decision]
}
}
@ -137,12 +137,12 @@ func (this *ATN) getDecisionState(decision int) DecisionState {
// @panics IllegalArgumentException if the ATN does not contain a state with
// number {@code stateNumber}
func (this *ATN) getExpectedTokens(stateNumber int, ctx RuleContext) *IntervalSet {
if stateNumber < 0 || stateNumber >= len(this.states) {
func (a *ATN) getExpectedTokens(stateNumber int, ctx RuleContext) *IntervalSet {
if stateNumber < 0 || stateNumber >= len(a.states) {
panic("Invalid state number.")
}
var s = this.states[stateNumber]
var following = this.NextTokens(s, nil)
var s = a.states[stateNumber]
var following = a.NextTokens(s, nil)
if !following.contains(TokenEpsilon) {
return following
}
@ -150,9 +150,9 @@ func (this *ATN) getExpectedTokens(stateNumber int, ctx RuleContext) *IntervalSe
expected.addSet(following)
expected.removeOne(TokenEpsilon)
for ctx != nil && ctx.GetInvokingState() >= 0 && following.contains(TokenEpsilon) {
var invokingState = this.states[ctx.GetInvokingState()]
var invokingState = a.states[ctx.GetInvokingState()]
var rt = invokingState.GetTransitions()[0]
following = this.NextTokens(rt.(*RuleTransition).followState, nil)
following = a.NextTokens(rt.(*RuleTransition).followState, nil)
expected.addSet(following)
expected.removeOne(TokenEpsilon)
ctx = ctx.GetParent().(RuleContext)

View File

@ -110,48 +110,48 @@ func NewBaseATNConfig(c ATNConfig, state ATNState, context PredictionContext, se
return a
}
func (this *BaseATNConfig) getPrecedenceFilterSuppressed() bool {
return this.precedenceFilterSuppressed
func (b *BaseATNConfig) getPrecedenceFilterSuppressed() bool {
return b.precedenceFilterSuppressed
}
func (this *BaseATNConfig) setPrecedenceFilterSuppressed(v bool) {
this.precedenceFilterSuppressed = v
func (b *BaseATNConfig) setPrecedenceFilterSuppressed(v bool) {
b.precedenceFilterSuppressed = v
}
func (this *BaseATNConfig) GetState() ATNState {
return this.state
func (b *BaseATNConfig) GetState() ATNState {
return b.state
}
func (this *BaseATNConfig) GetAlt() int {
return this.alt
func (b *BaseATNConfig) GetAlt() int {
return b.alt
}
func (this *BaseATNConfig) SetContext(v PredictionContext) {
this.context = v
func (b *BaseATNConfig) SetContext(v PredictionContext) {
b.context = v
}
func (this *BaseATNConfig) GetContext() PredictionContext {
return this.context
func (b *BaseATNConfig) GetContext() PredictionContext {
return b.context
}
func (this *BaseATNConfig) GetSemanticContext() SemanticContext {
return this.semanticContext
func (b *BaseATNConfig) GetSemanticContext() SemanticContext {
return b.semanticContext
}
func (this *BaseATNConfig) GetReachesIntoOuterContext() int {
return this.reachesIntoOuterContext
func (b *BaseATNConfig) GetReachesIntoOuterContext() int {
return b.reachesIntoOuterContext
}
func (this *BaseATNConfig) SetReachesIntoOuterContext(v int) {
this.reachesIntoOuterContext = v
func (b *BaseATNConfig) SetReachesIntoOuterContext(v int) {
b.reachesIntoOuterContext = v
}
// An ATN configuration is equal to another if both have
// the same state, they predict the same alternative, and
// syntactic/semantic contexts are the same.
///
func (this *BaseATNConfig) equals(o interface{}) bool {
func (b *BaseATNConfig) equals(o interface{}) bool {
if this == o {
if b == o {
return true
}
@ -161,54 +161,54 @@ func (this *BaseATNConfig) equals(o interface{}) bool {
return false
}
var b bool
if this.context == nil {
b = other.context == nil
var equal bool
if b.context == nil {
equal = other.context == nil
} else {
b = this.context.equals(other.context)
equal = b.context.equals(other.context)
}
return this.state.GetStateNumber() == other.state.GetStateNumber() &&
this.alt == other.alt &&
this.semanticContext.equals(other.semanticContext) &&
this.precedenceFilterSuppressed == other.precedenceFilterSuppressed &&
b
return b.state.GetStateNumber() == other.state.GetStateNumber() &&
b.alt == other.alt &&
b.semanticContext.equals(other.semanticContext) &&
b.precedenceFilterSuppressed == other.precedenceFilterSuppressed &&
equal
}
func (this *BaseATNConfig) shortHash() string {
return strconv.Itoa(this.state.GetStateNumber()) + "/" + strconv.Itoa(this.alt) + "/" + this.semanticContext.String()
func (b *BaseATNConfig) shortHash() string {
return strconv.Itoa(b.state.GetStateNumber()) + "/" + strconv.Itoa(b.alt) + "/" + b.semanticContext.String()
}
func (this *BaseATNConfig) Hash() string {
func (b *BaseATNConfig) Hash() string {
var c string
if this.context == nil {
if b.context == nil {
c = ""
} else {
c = this.context.Hash()
c = b.context.Hash()
}
return strconv.Itoa(this.state.GetStateNumber()) + "/" + strconv.Itoa(this.alt) + "/" + c + "/" + this.semanticContext.String()
return strconv.Itoa(b.state.GetStateNumber()) + "/" + strconv.Itoa(b.alt) + "/" + c + "/" + b.semanticContext.String()
}
func (this *BaseATNConfig) String() string {
func (b *BaseATNConfig) String() string {
var a string
if this.context != nil {
a = ",[" + fmt.Sprint(this.context) + "]"
var s1 string
if b.context != nil {
s1 = ",[" + fmt.Sprint(b.context) + "]"
}
var b string
if this.semanticContext != SemanticContextNone {
b = "," + fmt.Sprint(this.semanticContext)
var s2 string
if b.semanticContext != SemanticContextNone {
s2 = "," + fmt.Sprint(b.semanticContext)
}
var c string
if this.reachesIntoOuterContext > 0 {
c = ",up=" + fmt.Sprint(this.reachesIntoOuterContext)
var s3 string
if b.reachesIntoOuterContext > 0 {
s3 = ",up=" + fmt.Sprint(b.reachesIntoOuterContext)
}
return "(" + fmt.Sprint(this.state) + "," + strconv.Itoa(this.alt) + a + b + c + ")"
return "(" + fmt.Sprint(b.state) + "," + strconv.Itoa(b.alt) + s1 + s2 + s3 + ")"
}
type LexerATNConfig struct {
@ -220,95 +220,95 @@ type LexerATNConfig struct {
func NewLexerATNConfig6(state ATNState, alt int, context PredictionContext) *LexerATNConfig {
this := new(LexerATNConfig)
l := new(LexerATNConfig)
this.BaseATNConfig = NewBaseATNConfig5(state, alt, context, SemanticContextNone)
l.BaseATNConfig = NewBaseATNConfig5(state, alt, context, SemanticContextNone)
this.passedThroughNonGreedyDecision = false
this.lexerActionExecutor = nil
return this
l.passedThroughNonGreedyDecision = false
l.lexerActionExecutor = nil
return l
}
func NewLexerATNConfig5(state ATNState, alt int, context PredictionContext, lexerActionExecutor *LexerActionExecutor) *LexerATNConfig {
this := new(LexerATNConfig)
l := new(LexerATNConfig)
this.BaseATNConfig = NewBaseATNConfig5(state, alt, context, SemanticContextNone)
this.lexerActionExecutor = lexerActionExecutor
this.passedThroughNonGreedyDecision = false
return this
l.BaseATNConfig = NewBaseATNConfig5(state, alt, context, SemanticContextNone)
l.lexerActionExecutor = lexerActionExecutor
l.passedThroughNonGreedyDecision = false
return l
}
func NewLexerATNConfig4(c *LexerATNConfig, state ATNState) *LexerATNConfig {
this := new(LexerATNConfig)
l := new(LexerATNConfig)
this.BaseATNConfig = NewBaseATNConfig(c, state, c.GetContext(), c.GetSemanticContext())
this.lexerActionExecutor = c.lexerActionExecutor
this.passedThroughNonGreedyDecision = checkNonGreedyDecision(c, state)
return this
l.BaseATNConfig = NewBaseATNConfig(c, state, c.GetContext(), c.GetSemanticContext())
l.lexerActionExecutor = c.lexerActionExecutor
l.passedThroughNonGreedyDecision = checkNonGreedyDecision(c, state)
return l
}
func NewLexerATNConfig3(c *LexerATNConfig, state ATNState, lexerActionExecutor *LexerActionExecutor) *LexerATNConfig {
this := new(LexerATNConfig)
l := new(LexerATNConfig)
this.BaseATNConfig = NewBaseATNConfig(c, state, c.GetContext(), c.GetSemanticContext())
this.lexerActionExecutor = lexerActionExecutor
this.passedThroughNonGreedyDecision = checkNonGreedyDecision(c, state)
return this
l.BaseATNConfig = NewBaseATNConfig(c, state, c.GetContext(), c.GetSemanticContext())
l.lexerActionExecutor = lexerActionExecutor
l.passedThroughNonGreedyDecision = checkNonGreedyDecision(c, state)
return l
}
func NewLexerATNConfig2(c *LexerATNConfig, state ATNState, context PredictionContext) *LexerATNConfig {
this := new(LexerATNConfig)
l := new(LexerATNConfig)
this.BaseATNConfig = NewBaseATNConfig(c, state, context, c.GetSemanticContext())
this.lexerActionExecutor = c.lexerActionExecutor
this.passedThroughNonGreedyDecision = checkNonGreedyDecision(c, state)
return this
l.BaseATNConfig = NewBaseATNConfig(c, state, context, c.GetSemanticContext())
l.lexerActionExecutor = c.lexerActionExecutor
l.passedThroughNonGreedyDecision = checkNonGreedyDecision(c, state)
return l
}
func NewLexerATNConfig1(state ATNState, alt int, context PredictionContext) *LexerATNConfig {
this := new(LexerATNConfig)
l := new(LexerATNConfig)
this.BaseATNConfig = NewBaseATNConfig5(state, alt, context, SemanticContextNone)
l.BaseATNConfig = NewBaseATNConfig5(state, alt, context, SemanticContextNone)
this.lexerActionExecutor = nil
this.passedThroughNonGreedyDecision = false
l.lexerActionExecutor = nil
l.passedThroughNonGreedyDecision = false
return this
return l
}
func (this *LexerATNConfig) Hash() string {
func (l *LexerATNConfig) Hash() string {
var f string
if this.passedThroughNonGreedyDecision {
if l.passedThroughNonGreedyDecision {
f = "1"
} else {
f = "0"
}
return strconv.Itoa(this.state.GetStateNumber()) + strconv.Itoa(this.alt) + fmt.Sprint(this.context) +
fmt.Sprint(this.semanticContext) + f + fmt.Sprint(this.lexerActionExecutor)
return strconv.Itoa(l.state.GetStateNumber()) + strconv.Itoa(l.alt) + fmt.Sprint(l.context) +
fmt.Sprint(l.semanticContext) + f + fmt.Sprint(l.lexerActionExecutor)
}
func (this *LexerATNConfig) equals(other interface{}) bool {
func (l *LexerATNConfig) equals(other interface{}) bool {
othert, ok := other.(*LexerATNConfig)
if this == other {
if l == other {
return true
} else if !ok {
return false
} else if this.passedThroughNonGreedyDecision != othert.passedThroughNonGreedyDecision {
} else if l.passedThroughNonGreedyDecision != othert.passedThroughNonGreedyDecision {
return false
}
var b bool
if this.lexerActionExecutor != nil {
b = !this.lexerActionExecutor.equals(othert.lexerActionExecutor)
if l.lexerActionExecutor != nil {
b = !l.lexerActionExecutor.equals(othert.lexerActionExecutor)
} else {
b = othert.lexerActionExecutor != nil
}
@ -316,7 +316,7 @@ func (this *LexerATNConfig) equals(other interface{}) bool {
if b {
return false
} else {
return this.BaseATNConfig.equals(othert.BaseATNConfig)
return l.BaseATNConfig.equals(othert.BaseATNConfig)
}
}

View File

@ -113,25 +113,25 @@ func NewBaseATNConfigSet(fullCtx bool) *BaseATNConfigSet {
// <p>This method updates {@link //dipsIntoOuterContext} and
// {@link //hasSemanticContext} when necessary.</p>
// /
func (this *BaseATNConfigSet) Add(config ATNConfig, mergeCache *DoubleDict) bool {
func (b *BaseATNConfigSet) Add(config ATNConfig, mergeCache *DoubleDict) bool {
if this.readOnly {
if b.readOnly {
panic("This set is readonly")
}
if config.GetSemanticContext() != SemanticContextNone {
this.hasSemanticContext = true
b.hasSemanticContext = true
}
if config.GetReachesIntoOuterContext() > 0 {
this.dipsIntoOuterContext = true
b.dipsIntoOuterContext = true
}
var existing = this.configLookup.add(config).(ATNConfig)
var existing = b.configLookup.add(config).(ATNConfig)
if existing == config {
this.cachedHashString = "-1"
this.configs = append(this.configs, config) // track order here
b.cachedHashString = "-1"
b.configs = append(b.configs, config) // track order here
return true
}
// a previous (s,i,pi,_), merge with it and save result
var rootIsWildcard = !this.fullCtx
var rootIsWildcard = !b.fullCtx
var merged = merge(existing.GetContext(), config.GetContext(), rootIsWildcard, mergeCache)
// no need to check for existing.context, config.context in cache
// since only way to create Newgraphs is "call rule" and here. We
@ -146,26 +146,26 @@ func (this *BaseATNConfigSet) Add(config ATNConfig, mergeCache *DoubleDict) bool
return true
}
func (this *BaseATNConfigSet) GetStates() *Set {
func (b *BaseATNConfigSet) GetStates() *Set {
var states = NewSet(nil, nil)
for i := 0; i < len(this.configs); i++ {
states.add(this.configs[i].GetState())
for i := 0; i < len(b.configs); i++ {
states.add(b.configs[i].GetState())
}
return states
}
func (this *BaseATNConfigSet) HasSemanticContext() bool {
return this.hasSemanticContext
func (b *BaseATNConfigSet) HasSemanticContext() bool {
return b.hasSemanticContext
}
func (this *BaseATNConfigSet) SetHasSemanticContext(v bool) {
this.hasSemanticContext = v
func (b *BaseATNConfigSet) SetHasSemanticContext(v bool) {
b.hasSemanticContext = v
}
func (this *BaseATNConfigSet) GetPredicates() []SemanticContext {
func (b *BaseATNConfigSet) GetPredicates() []SemanticContext {
var preds = make([]SemanticContext, 0)
for i := 0; i < len(this.configs); i++ {
c := this.configs[i].GetSemanticContext()
for i := 0; i < len(b.configs); i++ {
c := b.configs[i].GetSemanticContext()
if c != SemanticContextNone {
preds = append(preds, c)
}
@ -173,32 +173,32 @@ func (this *BaseATNConfigSet) GetPredicates() []SemanticContext {
return preds
}
func (this *BaseATNConfigSet) GetItems() []ATNConfig {
return this.configs
func (b *BaseATNConfigSet) GetItems() []ATNConfig {
return b.configs
}
func (this *BaseATNConfigSet) OptimizeConfigs(interpreter *BaseATNSimulator) {
if this.readOnly {
func (b *BaseATNConfigSet) OptimizeConfigs(interpreter *BaseATNSimulator) {
if b.readOnly {
panic("This set is readonly")
}
if this.configLookup.length() == 0 {
if b.configLookup.length() == 0 {
return
}
for i := 0; i < len(this.configs); i++ {
var config = this.configs[i]
for i := 0; i < len(b.configs); i++ {
var config = b.configs[i]
config.SetContext(interpreter.getCachedContext(config.GetContext()))
}
}
func (this *BaseATNConfigSet) AddAll(coll []ATNConfig) bool {
func (b *BaseATNConfigSet) AddAll(coll []ATNConfig) bool {
for i := 0; i < len(coll); i++ {
this.Add(coll[i], nil)
b.Add(coll[i], nil)
}
return false
}
func (this *BaseATNConfigSet) Equals(other interface{}) bool {
if this == other {
func (b *BaseATNConfigSet) Equals(other interface{}) bool {
if b == other {
return true
} else if _, ok := other.(*BaseATNConfigSet); !ok {
return false
@ -206,129 +206,129 @@ func (this *BaseATNConfigSet) Equals(other interface{}) bool {
other2 := other.(*BaseATNConfigSet)
return this.configs != nil &&
// this.configs.equals(other2.configs) && // TODO is this necessary?
this.fullCtx == other2.fullCtx &&
this.uniqueAlt == other2.uniqueAlt &&
this.conflictingAlts == other2.conflictingAlts &&
this.hasSemanticContext == other2.hasSemanticContext &&
this.dipsIntoOuterContext == other2.dipsIntoOuterContext
return b.configs != nil &&
// b.configs.equals(other2.configs) && // TODO is b necessary?
b.fullCtx == other2.fullCtx &&
b.uniqueAlt == other2.uniqueAlt &&
b.conflictingAlts == other2.conflictingAlts &&
b.hasSemanticContext == other2.hasSemanticContext &&
b.dipsIntoOuterContext == other2.dipsIntoOuterContext
}
func (this *BaseATNConfigSet) Hash() string {
if this.readOnly {
if this.cachedHashString == "-1" {
this.cachedHashString = this.hashConfigs()
func (b *BaseATNConfigSet) Hash() string {
if b.readOnly {
if b.cachedHashString == "-1" {
b.cachedHashString = b.hashConfigs()
}
return this.cachedHashString
return b.cachedHashString
} else {
return this.hashConfigs()
return b.hashConfigs()
}
}
func (this *BaseATNConfigSet) hashConfigs() string {
func (b *BaseATNConfigSet) hashConfigs() string {
var s = ""
for _, c := range this.configs {
for _, c := range b.configs {
s += fmt.Sprint(c)
}
return s
}
func (this *BaseATNConfigSet) Length() int {
return len(this.configs)
func (b *BaseATNConfigSet) Length() int {
return len(b.configs)
}
func (this *BaseATNConfigSet) IsEmpty() bool {
return len(this.configs) == 0
func (b *BaseATNConfigSet) IsEmpty() bool {
return len(b.configs) == 0
}
func (this *BaseATNConfigSet) Contains(item ATNConfig) bool {
if this.configLookup == nil {
func (b *BaseATNConfigSet) Contains(item ATNConfig) bool {
if b.configLookup == nil {
panic("This method is not implemented for readonly sets.")
}
return this.configLookup.contains(item)
return b.configLookup.contains(item)
}
func (this *BaseATNConfigSet) ContainsFast(item ATNConfig) bool {
if this.configLookup == nil {
func (b *BaseATNConfigSet) ContainsFast(item ATNConfig) bool {
if b.configLookup == nil {
panic("This method is not implemented for readonly sets.")
}
return this.configLookup.contains(item) // TODO containsFast is not implemented for Set
return b.configLookup.contains(item) // TODO containsFast is not implemented for Set
}
func (this *BaseATNConfigSet) Clear() {
if this.readOnly {
func (b *BaseATNConfigSet) Clear() {
if b.readOnly {
panic("This set is readonly")
}
this.configs = make([]ATNConfig, 0)
this.cachedHashString = "-1"
this.configLookup = NewSet(hashATNConfig, equalATNConfigs)
b.configs = make([]ATNConfig, 0)
b.cachedHashString = "-1"
b.configLookup = NewSet(hashATNConfig, equalATNConfigs)
}
func (this *BaseATNConfigSet) FullContext() bool {
return this.fullCtx
func (b *BaseATNConfigSet) FullContext() bool {
return b.fullCtx
}
func (this *BaseATNConfigSet) GetDipsIntoOuterContext() bool {
return this.dipsIntoOuterContext
func (b *BaseATNConfigSet) GetDipsIntoOuterContext() bool {
return b.dipsIntoOuterContext
}
func (this *BaseATNConfigSet) SetDipsIntoOuterContext(v bool) {
this.dipsIntoOuterContext = v
func (b *BaseATNConfigSet) SetDipsIntoOuterContext(v bool) {
b.dipsIntoOuterContext = v
}
func (this *BaseATNConfigSet) GetUniqueAlt() int {
return this.uniqueAlt
func (b *BaseATNConfigSet) GetUniqueAlt() int {
return b.uniqueAlt
}
func (this *BaseATNConfigSet) SetUniqueAlt(v int) {
this.uniqueAlt = v
func (b *BaseATNConfigSet) SetUniqueAlt(v int) {
b.uniqueAlt = v
}
func (this *BaseATNConfigSet) GetConflictingAlts() *BitSet {
return this.conflictingAlts
func (b *BaseATNConfigSet) GetConflictingAlts() *BitSet {
return b.conflictingAlts
}
func (this *BaseATNConfigSet) SetConflictingAlts(v *BitSet) {
this.conflictingAlts = v
func (b *BaseATNConfigSet) SetConflictingAlts(v *BitSet) {
b.conflictingAlts = v
}
func (this *BaseATNConfigSet) ReadOnly() bool {
return this.readOnly
func (b *BaseATNConfigSet) ReadOnly() bool {
return b.readOnly
}
func (this *BaseATNConfigSet) SetReadOnly(readOnly bool) {
this.readOnly = readOnly
func (b *BaseATNConfigSet) SetReadOnly(readOnly bool) {
b.readOnly = readOnly
if readOnly {
this.configLookup = nil // can't mod, no need for lookup cache
b.configLookup = nil // can't mod, no need for lookup cache
}
}
func (this *BaseATNConfigSet) String() string {
func (b *BaseATNConfigSet) String() string {
s := "["
for i, c := range this.configs {
for i, c := range b.configs {
s += c.String()
if i != len(this.configs)-1 {
if i != len(b.configs)-1 {
s += ", "
}
}
s += "]"
if this.hasSemanticContext {
s += ",hasSemanticContext=" + fmt.Sprint(this.hasSemanticContext)
if b.hasSemanticContext {
s += ",hasSemanticContext=" + fmt.Sprint(b.hasSemanticContext)
}
if this.uniqueAlt != ATNInvalidAltNumber {
s += ",uniqueAlt=" + fmt.Sprint(this.uniqueAlt)
if b.uniqueAlt != ATNInvalidAltNumber {
s += ",uniqueAlt=" + fmt.Sprint(b.uniqueAlt)
}
if this.conflictingAlts != nil {
s += ",conflictingAlts=" + this.conflictingAlts.String()
if b.conflictingAlts != nil {
s += ",conflictingAlts=" + b.conflictingAlts.String()
}
if this.dipsIntoOuterContext {
if b.dipsIntoOuterContext {
s += ",dipsIntoOuterContext"
}
@ -341,12 +341,12 @@ type OrderedATNConfigSet struct {
func NewOrderedATNConfigSet() *OrderedATNConfigSet {
this := new(OrderedATNConfigSet)
o := new(OrderedATNConfigSet)
this.BaseATNConfigSet = NewBaseATNConfigSet(false)
this.configLookup = NewSet(nil, nil)
o.BaseATNConfigSet = NewBaseATNConfigSet(false)
o.configLookup = NewSet(nil, nil)
return this
return o
}
func hashATNConfig(c interface{}) string {

View File

@ -13,7 +13,7 @@ import (
var BaseSerializedUUID = "AADB8D7E-AEEF-4415-AD2B-8204D6CF042E"
// This list contains all of the currently supported UUIDs, ordered by when
// the feature first appeared in this branch.
// the feature first appeared in a branch.
var SupportedUUIDs = []string{BaseSerializedUUID}
var SerializedVersion = 3
@ -44,11 +44,11 @@ func NewATNDeserializer(options *ATNDeserializationOptions) *ATNDeserializer {
options = ATNDeserializationOptionsdefaultOptions
}
this := new(ATNDeserializer)
a := new(ATNDeserializer)
this.deserializationOptions = options
a.deserializationOptions = options
return this
return a
}
func stringInSlice(a string, list []string) int {
@ -72,7 +72,7 @@ func stringInSlice(a string, list []string) int {
// serialized ATN at or after the feature identified by {@code feature} was
// introduced otherwise, {@code false}.
func (this *ATNDeserializer) isFeatureSupported(feature, actualUuid string) bool {
func (a *ATNDeserializer) isFeatureSupported(feature, actualUuid string) bool {
var idx1 = stringInSlice(feature, SupportedUUIDs)
if idx1 < 0 {
return false
@ -81,31 +81,31 @@ func (this *ATNDeserializer) isFeatureSupported(feature, actualUuid string) bool
return idx2 >= idx1
}
func (this *ATNDeserializer) DeserializeFromUInt16(data []uint16) *ATN {
func (a *ATNDeserializer) DeserializeFromUInt16(data []uint16) *ATN {
this.reset(utf16.Decode(data))
this.checkVersion()
this.checkUUID()
var atn = this.readATN()
this.readStates(atn)
this.readRules(atn)
this.readModes(atn)
var sets = this.readSets(atn)
this.readEdges(atn, sets)
this.readDecisions(atn)
this.readLexerActions(atn)
this.markPrecedenceDecisions(atn)
this.verifyATN(atn)
if this.deserializationOptions.generateRuleBypassTransitions && atn.grammarType == ATNTypeParser {
this.generateRuleBypassTransitions(atn)
a.reset(utf16.Decode(data))
a.checkVersion()
a.checkUUID()
var atn = a.readATN()
a.readStates(atn)
a.readRules(atn)
a.readModes(atn)
var sets = a.readSets(atn)
a.readEdges(atn, sets)
a.readDecisions(atn)
a.readLexerActions(atn)
a.markPrecedenceDecisions(atn)
a.verifyATN(atn)
if a.deserializationOptions.generateRuleBypassTransitions && atn.grammarType == ATNTypeParser {
a.generateRuleBypassTransitions(atn)
// re-verify after modification
this.verifyATN(atn)
a.verifyATN(atn)
}
return atn
}
func (this *ATNDeserializer) reset(data []rune) {
func (a *ATNDeserializer) reset(data []rune) {
temp := make([]rune, len(data))
@ -118,55 +118,55 @@ func (this *ATNDeserializer) reset(data []rune) {
}
}
this.data = temp
this.pos = 0
a.data = temp
a.pos = 0
}
func (this *ATNDeserializer) checkVersion() {
var version = this.readInt()
func (a *ATNDeserializer) checkVersion() {
var version = a.readInt()
if version != SerializedVersion {
panic("Could not deserialize ATN with version " + strconv.Itoa(version) + " (expected " + strconv.Itoa(SerializedVersion) + ").")
}
}
func (this *ATNDeserializer) checkUUID() {
var uuid = this.readUUID()
func (a *ATNDeserializer) checkUUID() {
var uuid = a.readUUID()
if stringInSlice(uuid, SupportedUUIDs) < 0 {
panic("Could not deserialize ATN with UUID: " + uuid + " (expected " + SerializedUUID + " or a legacy UUID).")
}
this.uuid = uuid
a.uuid = uuid
}
func (this *ATNDeserializer) readATN() *ATN {
var grammarType = this.readInt()
var maxTokenType = this.readInt()
func (a *ATNDeserializer) readATN() *ATN {
var grammarType = a.readInt()
var maxTokenType = a.readInt()
return NewATN(grammarType, maxTokenType)
}
func (this *ATNDeserializer) readStates(atn *ATN) {
func (a *ATNDeserializer) readStates(atn *ATN) {
var loopBackStateNumbers = make([]LoopEndStateIntPair, 0)
var endStateNumbers = make([]BlockStartStateIntPair, 0)
var nstates = this.readInt()
var nstates = a.readInt()
for i := 0; i < nstates; i++ {
var stype = this.readInt()
var stype = a.readInt()
// ignore bad type of states
if stype == ATNStateInvalidType {
atn.addState(nil)
continue
}
var ruleIndex = this.readInt()
var ruleIndex = a.readInt()
if ruleIndex == 0xFFFF {
ruleIndex = -1
}
var s = this.stateFactory(stype, ruleIndex)
var s = a.stateFactory(stype, ruleIndex)
if stype == ATNStateLoopEnd {
var loopBackStateNumber = this.readInt()
var loopBackStateNumber = a.readInt()
loopBackStateNumbers = append(loopBackStateNumbers, LoopEndStateIntPair{s.(*LoopEndState), loopBackStateNumber})
} else if s2, ok := s.(BlockStartState); ok {
var endStateNumber = this.readInt()
var endStateNumber = a.readInt()
endStateNumbers = append(endStateNumbers, BlockStartStateIntPair{s2, endStateNumber})
}
atn.addState(s)
@ -183,32 +183,32 @@ func (this *ATNDeserializer) readStates(atn *ATN) {
pair.item0.setEndState(atn.states[pair.item1].(*BlockEndState))
}
var numNonGreedyStates = this.readInt()
var numNonGreedyStates = a.readInt()
for j := 0; j < numNonGreedyStates; j++ {
stateNumber := this.readInt()
stateNumber := a.readInt()
atn.states[stateNumber].(DecisionState).setNonGreedy(true)
}
var numPrecedenceStates = this.readInt()
var numPrecedenceStates = a.readInt()
for j := 0; j < numPrecedenceStates; j++ {
stateNumber := this.readInt()
stateNumber := a.readInt()
atn.states[stateNumber].(*RuleStartState).isPrecedenceRule = true
}
}
func (this *ATNDeserializer) readRules(atn *ATN) {
func (a *ATNDeserializer) readRules(atn *ATN) {
var nrules = this.readInt()
var nrules = a.readInt()
if atn.grammarType == ATNTypeLexer {
atn.ruleToTokenType = make([]int, nrules) // initIntArray(nrules, 0)
}
atn.ruleToStartState = make([]*RuleStartState, nrules) // initIntArray(nrules, 0)
for i := 0; i < nrules; i++ {
var s = this.readInt()
var s = a.readInt()
var startState = atn.states[s].(*RuleStartState)
atn.ruleToStartState[i] = startState
if atn.grammarType == ATNTypeLexer {
var tokenType = this.readInt()
var tokenType = a.readInt()
if tokenType == 0xFFFF {
tokenType = TokenEOF
}
@ -225,45 +225,45 @@ func (this *ATNDeserializer) readRules(atn *ATN) {
}
}
func (this *ATNDeserializer) readModes(atn *ATN) {
var nmodes = this.readInt()
func (a *ATNDeserializer) readModes(atn *ATN) {
var nmodes = a.readInt()
for i := 0; i < nmodes; i++ {
var s = this.readInt()
var s = a.readInt()
atn.modeToStartState = append(atn.modeToStartState, atn.states[s].(*TokensStartState))
}
}
func (this *ATNDeserializer) readSets(atn *ATN) []*IntervalSet {
func (a *ATNDeserializer) readSets(atn *ATN) []*IntervalSet {
var sets = make([]*IntervalSet, 0)
var m = this.readInt()
var m = a.readInt()
for i := 0; i < m; i++ {
var iset = NewIntervalSet()
sets = append(sets, iset)
var n = this.readInt()
var containsEof = this.readInt()
var n = a.readInt()
var containsEof = a.readInt()
if containsEof != 0 {
iset.addOne(-1)
}
for j := 0; j < n; j++ {
var i1 = this.readInt()
var i2 = this.readInt()
var i1 = a.readInt()
var i2 = a.readInt()
iset.addRange(i1, i2)
}
}
return sets
}
func (this *ATNDeserializer) readEdges(atn *ATN, sets []*IntervalSet) {
func (a *ATNDeserializer) readEdges(atn *ATN, sets []*IntervalSet) {
var nedges = this.readInt()
var nedges = a.readInt()
for i := 0; i < nedges; i++ {
var src = this.readInt()
var trg = this.readInt()
var ttype = this.readInt()
var arg1 = this.readInt()
var arg2 = this.readInt()
var arg3 = this.readInt()
trans := this.edgeFactory(atn, ttype, src, trg, arg1, arg2, arg3, sets)
var src = a.readInt()
var trg = a.readInt()
var ttype = a.readInt()
var arg1 = a.readInt()
var arg2 = a.readInt()
var arg3 = a.readInt()
trans := a.edgeFactory(atn, ttype, src, trg, arg1, arg2, arg3, sets)
var srcState = atn.states[src]
srcState.AddTransition(trans, -1)
}
@ -319,47 +319,47 @@ func (this *ATNDeserializer) readEdges(atn *ATN, sets []*IntervalSet) {
}
}
func (this *ATNDeserializer) readDecisions(atn *ATN) {
var ndecisions = this.readInt()
func (a *ATNDeserializer) readDecisions(atn *ATN) {
var ndecisions = a.readInt()
for i := 0; i < ndecisions; i++ {
var s = this.readInt()
var s = a.readInt()
var decState = atn.states[s].(DecisionState)
atn.DecisionToState = append(atn.DecisionToState, decState)
decState.setDecision(i)
}
}
func (this *ATNDeserializer) readLexerActions(atn *ATN) {
func (a *ATNDeserializer) readLexerActions(atn *ATN) {
if atn.grammarType == ATNTypeLexer {
var count = this.readInt()
var count = a.readInt()
atn.lexerActions = make([]LexerAction, count) // initIntArray(count, nil)
for i := 0; i < count; i++ {
var actionType = this.readInt()
var data1 = this.readInt()
var actionType = a.readInt()
var data1 = a.readInt()
if data1 == 0xFFFF {
data1 = -1
}
var data2 = this.readInt()
var data2 = a.readInt()
if data2 == 0xFFFF {
data2 = -1
}
var lexerAction = this.lexerActionFactory(actionType, data1, data2)
var lexerAction = a.lexerActionFactory(actionType, data1, data2)
atn.lexerActions[i] = lexerAction
}
}
}
func (this *ATNDeserializer) generateRuleBypassTransitions(atn *ATN) {
func (a *ATNDeserializer) generateRuleBypassTransitions(atn *ATN) {
var count = len(atn.ruleToStartState)
for i := 0; i < count; i++ {
atn.ruleToTokenType[i] = atn.maxTokenType + i + 1
}
for i := 0; i < count; i++ {
this.generateRuleBypassTransition(atn, i)
a.generateRuleBypassTransition(atn, i)
}
}
func (this *ATNDeserializer) generateRuleBypassTransition(atn *ATN, idx int) {
func (a *ATNDeserializer) generateRuleBypassTransition(atn *ATN, idx int) {
var bypassStart = NewBasicBlockStartState()
bypassStart.ruleIndex = idx
@ -383,7 +383,7 @@ func (this *ATNDeserializer) generateRuleBypassTransition(atn *ATN, idx int) {
endState = nil
for i := 0; i < len(atn.states); i++ {
state := atn.states[i]
if this.stateIsEndStateFor(state, idx) != nil {
if a.stateIsEndStateFor(state, idx) != nil {
endState = state
excludeTransition = state.(*StarLoopEntryState).loopBackState.GetTransitions()[0]
break
@ -429,7 +429,7 @@ func (this *ATNDeserializer) generateRuleBypassTransition(atn *ATN, idx int) {
bypassStart.AddTransition(NewEpsilonTransition(MatchState, -1), -1)
}
func (this *ATNDeserializer) stateIsEndStateFor(state ATNState, idx int) ATNState {
func (a *ATNDeserializer) stateIsEndStateFor(state ATNState, idx int) ATNState {
if state.GetRuleIndex() != idx {
return nil
}
@ -457,12 +457,12 @@ func (this *ATNDeserializer) stateIsEndStateFor(state ATNState, idx int) ATNStat
//
// @param atn The ATN.
//
func (this *ATNDeserializer) markPrecedenceDecisions(atn *ATN) {
func (a *ATNDeserializer) markPrecedenceDecisions(atn *ATN) {
for _, state := range atn.states {
if _, ok := state.(*StarLoopEntryState); !ok {
continue
}
// We analyze the ATN to determine if this ATN decision state is the
// We analyze the ATN to determine if a ATN decision state is the
// decision for the closure block that determines whether a
// precedence rule should continue or complete.
//
@ -482,8 +482,8 @@ func (this *ATNDeserializer) markPrecedenceDecisions(atn *ATN) {
}
}
func (this *ATNDeserializer) verifyATN(atn *ATN) {
if !this.deserializationOptions.verifyATN {
func (a *ATNDeserializer) verifyATN(atn *ATN) {
if !a.deserializationOptions.verifyATN {
return
}
// verify assumptions
@ -493,52 +493,52 @@ func (this *ATNDeserializer) verifyATN(atn *ATN) {
if state == nil {
continue
}
this.checkCondition(state.GetEpsilonOnlyTransitions() || len(state.GetTransitions()) <= 1, "")
a.checkCondition(state.GetEpsilonOnlyTransitions() || len(state.GetTransitions()) <= 1, "")
switch s2 := state.(type) {
case *PlusBlockStartState:
this.checkCondition(s2.loopBackState != nil, "")
a.checkCondition(s2.loopBackState != nil, "")
case *StarLoopEntryState:
this.checkCondition(s2.loopBackState != nil, "")
this.checkCondition(len(s2.GetTransitions()) == 2, "")
a.checkCondition(s2.loopBackState != nil, "")
a.checkCondition(len(s2.GetTransitions()) == 2, "")
switch s2 := state.(type) {
case *StarBlockStartState:
_, ok2 := s2.GetTransitions()[1].getTarget().(*LoopEndState)
this.checkCondition(ok2, "")
this.checkCondition(!s2.nonGreedy, "")
a.checkCondition(ok2, "")
a.checkCondition(!s2.nonGreedy, "")
case *LoopEndState:
s3, ok2 := s2.GetTransitions()[1].getTarget().(*StarBlockStartState)
this.checkCondition(ok2, "")
this.checkCondition(s3.nonGreedy, "")
a.checkCondition(ok2, "")
a.checkCondition(s3.nonGreedy, "")
default:
panic("IllegalState")
}
case *StarLoopbackState:
this.checkCondition(len(state.GetTransitions()) == 1, "")
a.checkCondition(len(state.GetTransitions()) == 1, "")
_, ok2 := state.GetTransitions()[0].getTarget().(*StarLoopEntryState)
this.checkCondition(ok2, "")
a.checkCondition(ok2, "")
case *LoopEndState:
this.checkCondition(s2.loopBackState != nil, "")
a.checkCondition(s2.loopBackState != nil, "")
case *RuleStartState:
this.checkCondition(s2.stopState != nil, "")
a.checkCondition(s2.stopState != nil, "")
case *BaseBlockStartState:
this.checkCondition(s2.endState != nil, "")
a.checkCondition(s2.endState != nil, "")
case *BlockEndState:
this.checkCondition(s2.startState != nil, "")
a.checkCondition(s2.startState != nil, "")
case DecisionState:
this.checkCondition(len(s2.GetTransitions()) <= 1 || s2.getDecision() >= 0, "")
a.checkCondition(len(s2.GetTransitions()) <= 1 || s2.getDecision() >= 0, "")
default:
_, ok := s2.(*RuleStopState)
this.checkCondition(len(s2.GetTransitions()) <= 1 || ok, "")
a.checkCondition(len(s2.GetTransitions()) <= 1 || ok, "")
}
}
}
func (this *ATNDeserializer) checkCondition(condition bool, message string) {
func (a *ATNDeserializer) checkCondition(condition bool, message string) {
if !condition {
if message == "" {
message = "IllegalState"
@ -547,16 +547,16 @@ func (this *ATNDeserializer) checkCondition(condition bool, message string) {
}
}
func (this *ATNDeserializer) readInt() int {
v := this.data[this.pos]
this.pos += 1
func (a *ATNDeserializer) readInt() int {
v := a.data[a.pos]
a.pos += 1
return int(v)
}
//func (this *ATNDeserializer) readLong() int64 {
//func (a *ATNDeserializer) readLong() int64 {
// panic("Not implemented")
// var low = this.readInt32()
// var high = this.readInt32()
// var low = a.readInt32()
// var high = a.readInt32()
// return (low & 0x00000000FFFFFFFF) | (high << int32)
//}
@ -570,10 +570,10 @@ func createByteToHex() []string {
var byteToHex = createByteToHex()
func (this *ATNDeserializer) readUUID() string {
func (a *ATNDeserializer) readUUID() string {
var bb = make([]int, 16)
for i := 7; i >= 0; i-- {
var integer = this.readInt()
var integer = a.readInt()
bb[(2*i)+1] = integer & 0xFF
bb[2*i] = (integer >> 8) & 0xFF
}
@ -587,7 +587,7 @@ func (this *ATNDeserializer) readUUID() string {
byteToHex[bb[14]] + byteToHex[bb[15]]
}
func (this *ATNDeserializer) edgeFactory(atn *ATN, typeIndex, src, trg, arg1, arg2, arg3 int, sets []*IntervalSet) Transition {
func (a *ATNDeserializer) edgeFactory(atn *ATN, typeIndex, src, trg, arg1, arg2, arg3 int, sets []*IntervalSet) Transition {
var target = atn.states[trg]
@ -625,7 +625,7 @@ func (this *ATNDeserializer) edgeFactory(atn *ATN, typeIndex, src, trg, arg1, ar
panic("The specified transition type is not valid.")
}
func (this *ATNDeserializer) stateFactory(typeIndex, ruleIndex int) ATNState {
func (a *ATNDeserializer) stateFactory(typeIndex, ruleIndex int) ATNState {
var s ATNState
switch typeIndex {
@ -664,7 +664,7 @@ func (this *ATNDeserializer) stateFactory(typeIndex, ruleIndex int) ATNState {
return s
}
func (this *ATNDeserializer) lexerActionFactory(typeIndex, data1, data2 int) LexerAction {
func (a *ATNDeserializer) lexerActionFactory(typeIndex, data1, data2 int) LexerAction {
switch typeIndex {
case LexerActionTypeChannel:
return NewLexerChannelAction(data1)

View File

@ -7,20 +7,20 @@ type BaseATNSimulator struct {
func NewBaseATNSimulator(atn *ATN, sharedContextCache *PredictionContextCache) *BaseATNSimulator {
this := new(BaseATNSimulator)
b := new(BaseATNSimulator)
this.atn = atn
this.sharedContextCache = sharedContextCache
b.atn = atn
b.sharedContextCache = sharedContextCache
return this
return b
}
var ATNSimulatorError = NewDFAState(0x7FFFFFFF, NewBaseATNConfigSet(false))
func (this *BaseATNSimulator) getCachedContext(context PredictionContext) PredictionContext {
if this.sharedContextCache == nil {
func (b *BaseATNSimulator) getCachedContext(context PredictionContext) PredictionContext {
if b.sharedContextCache == nil {
return context
}
var visited = make(map[PredictionContext]PredictionContext)
return getCachedBasePredictionContext(context, this.sharedContextCache, visited)
return getCachedBasePredictionContext(context, b.sharedContextCache, visited)
}

View File

@ -125,33 +125,33 @@ func (as *BaseATNState) SetNextTokenWithinRule(v *IntervalSet) {
as.NextTokenWithinRule = v
}
func (this *BaseATNState) String() string {
return strconv.Itoa(this.stateNumber)
func (as *BaseATNState) String() string {
return strconv.Itoa(as.stateNumber)
}
func (this *BaseATNState) equals(other interface{}) bool {
func (as *BaseATNState) equals(other interface{}) bool {
if ot, ok := other.(ATNState); ok {
return this.stateNumber == ot.GetStateNumber()
return as.stateNumber == ot.GetStateNumber()
} else {
return false
}
}
func (this *BaseATNState) isNonGreedyExitState() bool {
func (as *BaseATNState) isNonGreedyExitState() bool {
return false
}
func (this *BaseATNState) AddTransition(trans Transition, index int) {
if len(this.transitions) == 0 {
this.epsilonOnlyTransitions = trans.getIsEpsilon()
} else if this.epsilonOnlyTransitions != trans.getIsEpsilon() {
this.epsilonOnlyTransitions = false
func (as *BaseATNState) AddTransition(trans Transition, index int) {
if len(as.transitions) == 0 {
as.epsilonOnlyTransitions = trans.getIsEpsilon()
} else if as.epsilonOnlyTransitions != trans.getIsEpsilon() {
as.epsilonOnlyTransitions = false
}
if index == -1 {
this.transitions = append(this.transitions, trans)
as.transitions = append(as.transitions, trans)
} else {
this.transitions = append(this.transitions[:index], append([]Transition{trans}, this.transitions[index:]...)...)
// this.transitions.splice(index, 1, trans)
as.transitions = append(as.transitions[:index], append([]Transition{trans}, as.transitions[index:]...)...)
// as.transitions.splice(index, 1, trans)
}
}
@ -160,11 +160,11 @@ type BasicState struct {
}
func NewBasicState() *BasicState {
this := new(BasicState)
this.BaseATNState = NewBaseATNState()
b := new(BasicState)
b.BaseATNState = NewBaseATNState()
this.stateType = ATNStateBasic
return this
b.stateType = ATNStateBasic
return b
}
type DecisionState interface {
@ -186,14 +186,14 @@ type BaseDecisionState struct {
func NewBaseDecisionState() *BaseDecisionState {
this := new(BaseDecisionState)
b := new(BaseDecisionState)
this.BaseATNState = NewBaseATNState()
b.BaseATNState = NewBaseATNState()
this.decision = -1
this.nonGreedy = false
b.decision = -1
b.nonGreedy = false
return this
return b
}
func (s *BaseDecisionState) getDecision() int {
@ -228,12 +228,12 @@ type BaseBlockStartState struct {
func NewBlockStartState() *BaseBlockStartState {
this := new(BaseBlockStartState)
b := new(BaseBlockStartState)
this.BaseDecisionState = NewBaseDecisionState()
this.endState = nil
b.BaseDecisionState = NewBaseDecisionState()
b.endState = nil
return this
return b
}
func (s *BaseBlockStartState) getEndState() *BlockEndState {
@ -250,12 +250,12 @@ type BasicBlockStartState struct {
func NewBasicBlockStartState() *BasicBlockStartState {
this := new(BasicBlockStartState)
b := new(BasicBlockStartState)
this.BaseBlockStartState = NewBlockStartState()
b.BaseBlockStartState = NewBlockStartState()
this.stateType = ATNStateBlockStart
return this
b.stateType = ATNStateBlockStart
return b
}
// Terminal node of a simple {@code (a|b|c)} block.
@ -267,13 +267,13 @@ type BlockEndState struct {
func NewBlockEndState() *BlockEndState {
this := new(BlockEndState)
b := new(BlockEndState)
this.BaseATNState = NewBaseATNState()
this.stateType = ATNStateBlockEnd
this.startState = nil
b.BaseATNState = NewBaseATNState()
b.stateType = ATNStateBlockEnd
b.startState = nil
return this
return b
}
// The last node in the ATN for a rule, unless that rule is the start symbol.
@ -286,11 +286,11 @@ type RuleStopState struct {
}
func NewRuleStopState() *RuleStopState {
this := new(RuleStopState)
r := new(RuleStopState)
this.BaseATNState = NewBaseATNState()
this.stateType = ATNStateRuleStop
return this
r.BaseATNState = NewBaseATNState()
r.stateType = ATNStateRuleStop
return r
}
type RuleStartState struct {
@ -302,14 +302,14 @@ type RuleStartState struct {
func NewRuleStartState() *RuleStartState {
this := new(RuleStartState)
r := new(RuleStartState)
this.BaseATNState = NewBaseATNState()
this.stateType = ATNStateRuleStart
this.stopState = nil
this.isPrecedenceRule = false
r.BaseATNState = NewBaseATNState()
r.stateType = ATNStateRuleStart
r.stopState = nil
r.isPrecedenceRule = false
return this
return r
}
// Decision state for {@code A+} and {@code (A|B)+}. It has two transitions:
@ -321,12 +321,12 @@ type PlusLoopbackState struct {
func NewPlusLoopbackState() *PlusLoopbackState {
this := new(PlusLoopbackState)
p := new(PlusLoopbackState)
this.BaseDecisionState = NewBaseDecisionState()
p.BaseDecisionState = NewBaseDecisionState()
this.stateType = ATNStatePlusLoopBack
return this
p.stateType = ATNStatePlusLoopBack
return p
}
// Start of {@code (A|B|...)+} loop. Technically a decision state, but
@ -342,14 +342,14 @@ type PlusBlockStartState struct {
func NewPlusBlockStartState() *PlusBlockStartState {
this := new(PlusBlockStartState)
p := new(PlusBlockStartState)
this.BaseBlockStartState = NewBlockStartState()
p.BaseBlockStartState = NewBlockStartState()
this.stateType = ATNStatePlusBlockStart
this.loopBackState = nil
p.stateType = ATNStatePlusBlockStart
p.loopBackState = nil
return this
return p
}
// The block that begins a closure loop.
@ -359,13 +359,13 @@ type StarBlockStartState struct {
func NewStarBlockStartState() *StarBlockStartState {
this := new(StarBlockStartState)
s := new(StarBlockStartState)
this.BaseBlockStartState = NewBlockStartState()
s.BaseBlockStartState = NewBlockStartState()
this.stateType = ATNStateStarBlockStart
s.stateType = ATNStateStarBlockStart
return this
return s
}
type StarLoopbackState struct {
@ -374,12 +374,12 @@ type StarLoopbackState struct {
func NewStarLoopbackState() *StarLoopbackState {
this := new(StarLoopbackState)
s := new(StarLoopbackState)
this.BaseATNState = NewBaseATNState()
s.BaseATNState = NewBaseATNState()
this.stateType = ATNStateStarLoopBack
return this
s.stateType = ATNStateStarLoopBack
return s
}
type StarLoopEntryState struct {
@ -391,17 +391,17 @@ type StarLoopEntryState struct {
func NewStarLoopEntryState() *StarLoopEntryState {
this := new(StarLoopEntryState)
s := new(StarLoopEntryState)
this.BaseDecisionState = NewBaseDecisionState()
s.BaseDecisionState = NewBaseDecisionState()
this.stateType = ATNStateStarLoopEntry
this.loopBackState = nil
s.stateType = ATNStateStarLoopEntry
s.loopBackState = nil
// Indicates whether this state can benefit from a precedence DFA during SLL decision making.
this.precedenceRuleDecision = false
// Indicates whether s state can benefit from a precedence DFA during SLL decision making.
s.precedenceRuleDecision = false
return this
return s
}
// Mark the end of a * or + loop.
@ -413,14 +413,14 @@ type LoopEndState struct {
func NewLoopEndState() *LoopEndState {
this := new(LoopEndState)
l := new(LoopEndState)
this.BaseATNState = NewBaseATNState()
l.BaseATNState = NewBaseATNState()
this.stateType = ATNStateLoopEnd
this.loopBackState = nil
l.stateType = ATNStateLoopEnd
l.loopBackState = nil
return this
return l
}
// The Tokens rule start state linking to each lexer rule start state */
@ -430,10 +430,10 @@ type TokensStartState struct {
func NewTokensStartState() *TokensStartState {
this := new(TokensStartState)
t := new(TokensStartState)
this.BaseDecisionState = NewBaseDecisionState()
t.BaseDecisionState = NewBaseDecisionState()
this.stateType = ATNStateTokenStart
return this
t.stateType = ATNStateTokenStart
return t
}

View File

@ -47,7 +47,7 @@ func NewCommonTokenFactory(copyText bool) *CommonTokenFactory {
//
var CommonTokenFactoryDEFAULT = NewCommonTokenFactory(false)
func (this *CommonTokenFactory) Create(source *TokenSourceCharStreamPair, ttype int, text string, channel, start, stop, line, column int) Token {
func (c *CommonTokenFactory) Create(source *TokenSourceCharStreamPair, ttype int, text string, channel, start, stop, line, column int) Token {
if PortDebug {
fmt.Println("Token factory creating: " + text)
@ -58,7 +58,7 @@ func (this *CommonTokenFactory) Create(source *TokenSourceCharStreamPair, ttype
t.column = column
if text != "" {
t.SetText(text)
} else if this.copyText && source.charStream != nil {
} else if c.copyText && source.charStream != nil {
t.SetText(source.charStream.GetTextFromInterval(NewInterval(start, stop)))
}
@ -66,7 +66,7 @@ func (this *CommonTokenFactory) Create(source *TokenSourceCharStreamPair, ttype
}
func (this *CommonTokenFactory) createThin(ttype int, text string) Token {
func (c *CommonTokenFactory) createThin(ttype int, text string) Token {
if PortDebug {
fmt.Println("Token factory creating: " + text)

View File

@ -12,21 +12,21 @@ type DFA struct {
func NewDFA(atnStartState DecisionState, decision int) *DFA {
this := new(DFA)
d := new(DFA)
// From which ATN state did we create this DFA?
this.atnStartState = atnStartState
this.decision = decision
// From which ATN state did we create d DFA?
d.atnStartState = atnStartState
d.decision = decision
// A set of all DFA states. Use {@link Map} so we can get old state back
// ({@link Set} only allows you to see if it's there).
this._states = make(map[string]*DFAState)
this.s0 = nil
// {@code true} if this DFA is for a precedence decision otherwise,
d._states = make(map[string]*DFAState)
d.s0 = nil
// {@code true} if d DFA is for a precedence decision otherwise,
// {@code false}. This is the backing field for {@link //isPrecedenceDfa},
// {@link //setPrecedenceDfa}.
this.precedenceDfa = false
d.precedenceDfa = false
return this
return d
}
// Get the start state for a specific precedence value.
@ -35,18 +35,18 @@ func NewDFA(atnStartState DecisionState, decision int) *DFA {
// @return The start state corresponding to the specified precedence, or
// {@code nil} if no start state exists for the specified precedence.
//
// @panics IllegalStateException if this is not a precedence DFA.
// @panics IllegalStateException if d is not a precedence DFA.
// @see //isPrecedenceDfa()
func (this *DFA) getPrecedenceStartState(precedence int) *DFAState {
if !(this.precedenceDfa) {
func (d *DFA) getPrecedenceStartState(precedence int) *DFAState {
if !(d.precedenceDfa) {
panic("Only precedence DFAs may contain a precedence start state.")
}
// s0.edges is never nil for a precedence DFA
if precedence < 0 || precedence >= len(this.s0.edges) {
if precedence < 0 || precedence >= len(d.s0.edges) {
return nil
}
return this.s0.edges[precedence]
return d.s0.edges[precedence]
}
// Set the start state for a specific precedence value.
@ -55,11 +55,11 @@ func (this *DFA) getPrecedenceStartState(precedence int) *DFAState {
// @param startState The start state corresponding to the specified
// precedence.
//
// @panics IllegalStateException if this is not a precedence DFA.
// @panics IllegalStateException if d is not a precedence DFA.
// @see //isPrecedenceDfa()
//
func (this *DFA) setPrecedenceStartState(precedence int, startState *DFAState) {
if !(this.precedenceDfa) {
func (d *DFA) setPrecedenceStartState(precedence int, startState *DFAState) {
if !(d.precedenceDfa) {
panic("Only precedence DFAs may contain a precedence start state.")
}
if precedence < 0 {
@ -71,16 +71,16 @@ func (this *DFA) setPrecedenceStartState(precedence int, startState *DFAState) {
// s0.edges is never nil for a precedence DFA
// s0.edges is never null for a precedence DFA
if precedence >= len(this.s0.edges) {
if precedence >= len(d.s0.edges) {
// enlarge the slice
this.s0.edges = append(this.s0.edges, make([]*DFAState, precedence+1-len(this.s0.edges))...)
d.s0.edges = append(d.s0.edges, make([]*DFAState, precedence+1-len(d.s0.edges))...)
}
this.s0.edges[precedence] = startState
d.s0.edges[precedence] = startState
}
//
// Sets whether this is a precedence DFA. If the specified value differs
// Sets whether d is a precedence DFA. If the specified value differs
// from the current DFA configuration, the following actions are taken
// otherwise no changes are made to the current DFA.
//
@ -93,27 +93,27 @@ func (this *DFA) setPrecedenceStartState(precedence int, startState *DFAState) {
// <li>The {@link //precedenceDfa} field is updated</li>
// </ul>
//
// @param precedenceDfa {@code true} if this is a precedence DFA otherwise,
// @param precedenceDfa {@code true} if d is a precedence DFA otherwise,
// {@code false}
func (this *DFA) setPrecedenceDfa(precedenceDfa bool) {
if this.precedenceDfa != precedenceDfa {
this._states = make(map[string]*DFAState)
func (d *DFA) setPrecedenceDfa(precedenceDfa bool) {
if d.precedenceDfa != precedenceDfa {
d._states = make(map[string]*DFAState)
if precedenceDfa {
var precedenceState = NewDFAState(-1, NewBaseATNConfigSet(false))
precedenceState.edges = make([]*DFAState, 0)
precedenceState.isAcceptState = false
precedenceState.requiresFullContext = false
this.s0 = precedenceState
d.s0 = precedenceState
} else {
this.s0 = nil
d.s0 = nil
}
this.precedenceDfa = precedenceDfa
d.precedenceDfa = precedenceDfa
}
}
func (this *DFA) GetStates() map[string]*DFAState {
return this._states
func (d *DFA) GetStates() map[string]*DFAState {
return d._states
}
type DFAStateList []*DFAState
@ -122,13 +122,13 @@ func (a DFAStateList) Len() int { return len(a) }
func (a DFAStateList) Swap(i, j int) { a[i], a[j] = a[j], a[i] }
func (a DFAStateList) Less(i, j int) bool { return a[i].stateNumber < a[j].stateNumber }
// Return a list of all states in this DFA, ordered by state number.
func (this *DFA) sortedStates() []*DFAState {
// Return a list of all states in d DFA, ordered by state number.
func (d *DFA) sortedStates() []*DFAState {
// extract the values
vs := make([]*DFAState, len(this._states))
vs := make([]*DFAState, len(d._states))
i := 0
for _, v := range this._states {
for _, v := range d._states {
vs[i] = v
i++
}
@ -137,18 +137,18 @@ func (this *DFA) sortedStates() []*DFAState {
return vs
}
func (this *DFA) String(literalNames []string, symbolicNames []string) string {
if this.s0 == nil {
func (d *DFA) String(literalNames []string, symbolicNames []string) string {
if d.s0 == nil {
return ""
}
var serializer = NewDFASerializer(this, literalNames, symbolicNames)
var serializer = NewDFASerializer(d, literalNames, symbolicNames)
return serializer.String()
}
func (this *DFA) ToLexerString() string {
if this.s0 == nil {
func (d *DFA) ToLexerString() string {
if d.s0 == nil {
return ""
}
var serializer = NewLexerDFASerializer(this)
var serializer = NewLexerDFASerializer(d)
return serializer.String()
}

View File

@ -22,34 +22,34 @@ func NewDFASerializer(dfa *DFA, literalNames, symbolicNames []string) *DFASerial
symbolicNames = make([]string, 0)
}
this := new(DFASerializer)
d := new(DFASerializer)
this.dfa = dfa
this.literalNames = literalNames
this.symbolicNames = symbolicNames
d.dfa = dfa
d.literalNames = literalNames
d.symbolicNames = symbolicNames
return this
return d
}
func (this *DFASerializer) String() string {
func (d *DFASerializer) String() string {
if this.dfa.s0 == nil {
if d.dfa.s0 == nil {
return ""
}
var buf = ""
var states = this.dfa.sortedStates()
var states = d.dfa.sortedStates()
for _, s := range states {
if s.edges != nil {
var n = len(s.edges)
for j := 0; j < n; j++ {
var t = s.edges[j]
if t != nil && t.stateNumber != 0x7FFFFFFF {
buf += this.GetStateString(s)
buf += d.GetStateString(s)
buf += "-"
buf += this.getEdgeLabel(j)
buf += d.getEdgeLabel(j)
buf += "->"
buf += this.GetStateString(t)
buf += d.GetStateString(t)
buf += "\n"
}
}
@ -62,19 +62,19 @@ func (this *DFASerializer) String() string {
return buf
}
func (this *DFASerializer) getEdgeLabel(i int) string {
func (d *DFASerializer) getEdgeLabel(i int) string {
if i == 0 {
return "EOF"
} else if this.literalNames != nil && i-1 < len(this.literalNames) {
return this.literalNames[i-1]
} else if this.symbolicNames != nil && i-1 < len(this.symbolicNames) {
return this.symbolicNames[i-1]
} else if d.literalNames != nil && i-1 < len(d.literalNames) {
return d.literalNames[i-1]
} else if d.symbolicNames != nil && i-1 < len(d.symbolicNames) {
return d.symbolicNames[i-1]
} else {
return strconv.Itoa(i - 1)
}
}
func (this *DFASerializer) GetStateString(s *DFAState) string {
func (d *DFASerializer) GetStateString(s *DFAState) string {
var a, b string
@ -104,25 +104,25 @@ type LexerDFASerializer struct {
func NewLexerDFASerializer(dfa *DFA) *LexerDFASerializer {
this := new(LexerDFASerializer)
l := new(LexerDFASerializer)
this.DFASerializer = NewDFASerializer(dfa, nil, nil)
l.DFASerializer = NewDFASerializer(dfa, nil, nil)
return this
return l
}
func (this *LexerDFASerializer) getEdgeLabel(i int) string {
func (l *LexerDFASerializer) getEdgeLabel(i int) string {
return "'" + string(i) + "'"
}
func (this *LexerDFASerializer) String() string {
func (l *LexerDFASerializer) String() string {
if this.dfa.s0 == nil {
if l.dfa.s0 == nil {
return ""
}
var buf = ""
var states = this.dfa.sortedStates()
var states = l.dfa.sortedStates()
for i := 0; i < len(states); i++ {
var s = states[i]
if s.edges != nil {
@ -130,11 +130,11 @@ func (this *LexerDFASerializer) String() string {
for j := 0; j < n; j++ {
var t = s.edges[j]
if t != nil && t.stateNumber != 0x7FFFFFFF {
buf += this.GetStateString(s)
buf += l.GetStateString(s)
buf += "-"
buf += this.getEdgeLabel(j)
buf += l.getEdgeLabel(j)
buf += "->"
buf += this.GetStateString(t)
buf += l.GetStateString(t)
buf += "\n"
}
}

View File

@ -13,16 +13,16 @@ type PredPrediction struct {
}
func NewPredPrediction(pred SemanticContext, alt int) *PredPrediction {
this := new(PredPrediction)
p := new(PredPrediction)
this.alt = alt
this.pred = pred
p.alt = alt
p.pred = pred
return this
return p
}
func (this *PredPrediction) String() string {
return "(" + fmt.Sprint(this.pred) + ", " + fmt.Sprint(this.alt) + ")"
func (p *PredPrediction) String() string {
return "(" + fmt.Sprint(p.pred) + ", " + fmt.Sprint(p.alt) + ")"
}
// A DFA state represents a set of possible ATN configurations.
@ -67,30 +67,30 @@ func NewDFAState(stateNumber int, configs ATNConfigSet) *DFAState {
configs = NewBaseATNConfigSet(false)
}
this := new(DFAState)
d := new(DFAState)
this.stateNumber = stateNumber
this.configs = configs
d.stateNumber = stateNumber
d.configs = configs
// {@code edges[symbol]} points to target of symbol. Shift up by 1 so (-1)
// {@link Token//EOF} maps to {@code edges[0]}.
this.edges = nil
this.isAcceptState = false
d.edges = nil
d.isAcceptState = false
// if accept state, what ttype do we Match or alt do we predict?
// This is set to {@link ATN//INVALID_ALT_NUMBER} when {@link
// //predicates}{@code !=nil} or
// {@link //requiresFullContext}.
this.prediction = 0
this.lexerActionExecutor = nil
// Indicates that this state was created during SLL prediction that
d.prediction = 0
d.lexerActionExecutor = nil
// Indicates that d state was created during SLL prediction that
// discovered a conflict between the configurations in the state. Future
// {@link ParserATNSimulator//execATN} invocations immediately jumped doing
// full context prediction if this field is true.
this.requiresFullContext = false
// During SLL parsing, this is a list of predicates associated with the
// full context prediction if d field is true.
d.requiresFullContext = false
// During SLL parsing, d is a list of predicates associated with the
// ATN configurations of the DFA state. When we have predicates,
// {@link //requiresFullContext} is {@code false} since full context
// prediction evaluates predicates
// on-the-fly. If this is not nil, then {@link //prediction} is
// on-the-fly. If d is not nil, then {@link //prediction} is
// {@link ATN//INVALID_ALT_NUMBER}.
//
// <p>We only use these for non-{@link //requiresFullContext} but
@ -100,16 +100,16 @@ func NewDFAState(stateNumber int, configs ATNConfigSet) *DFAState {
//
// <p>This list is computed by {@link
// ParserATNSimulator//predicateDFAState}.</p>
this.predicates = nil
return this
d.predicates = nil
return d
}
// Get the set of all alts mentioned by all ATN configurations in this
// Get the set of all alts mentioned by all ATN configurations in d
// DFA state.
func (this *DFAState) GetAltSet() *Set {
func (d *DFAState) GetAltSet() *Set {
var alts = NewSet(nil, nil)
if this.configs != nil {
for _, c := range this.configs.GetItems() {
if d.configs != nil {
for _, c := range d.configs.GetItems() {
alts.add(c.GetAlt())
}
}
@ -120,8 +120,8 @@ func (this *DFAState) GetAltSet() *Set {
}
}
func (this *DFAState) setPrediction(v int) {
this.prediction = v
func (d *DFAState) setPrediction(v int) {
d.prediction = v
}
// Two {@link DFAState} instances are equal if their ATN configuration sets
@ -133,34 +133,34 @@ func (this *DFAState) setPrediction(v int) {
//
// <p>Cannot test the DFA state numbers here because in
// {@link ParserATNSimulator//addDFAState} we need to know if any other state
// exists that has this exact set of ATN configurations. The
// exists that has d exact set of ATN configurations. The
// {@link //stateNumber} is irrelevant.</p>
func (this *DFAState) equals(other interface{}) bool {
func (d *DFAState) equals(other interface{}) bool {
if this == other {
if d == other {
return true
} else if _, ok := other.(*DFAState); !ok {
return false
}
return this.configs.Equals(other.(*DFAState).configs)
return d.configs.Equals(other.(*DFAState).configs)
}
func (this *DFAState) String() string {
return strconv.Itoa(this.stateNumber) + ":" + this.Hash()
func (d *DFAState) String() string {
return strconv.Itoa(d.stateNumber) + ":" + d.Hash()
}
func (this *DFAState) Hash() string {
func (d *DFAState) Hash() string {
var s string
if this.isAcceptState {
if this.predicates != nil {
s = "=>" + fmt.Sprint(this.predicates)
if d.isAcceptState {
if d.predicates != nil {
s = "=>" + fmt.Sprint(d.predicates)
} else {
s = "=>" + fmt.Sprint(this.prediction)
s = "=>" + fmt.Sprint(d.prediction)
}
}
return fmt.Sprint(this.configs) + s
return fmt.Sprint(d.configs) + s
}

View File

@ -20,7 +20,7 @@ import (
// full-context prediction resolved an SLL conflict to a unique alternative,
// <em>and</em> the minimum alternative of the SLL conflict was found to not be
// a truly viable alternative. Two-stage parsing cannot be used for inputs where
// this situation occurs.</li>
// d situation occurs.</li>
// </ul>
type DiagnosticErrorListener struct {
@ -38,37 +38,37 @@ func NewDiagnosticErrorListener(exactOnly bool) *DiagnosticErrorListener {
return n
}
func (this *DiagnosticErrorListener) ReportAmbiguity(recognizer Parser, dfa *DFA, startIndex, stopIndex int, exact bool, ambigAlts *BitSet, configs ATNConfigSet) {
if this.exactOnly && !exact {
func (d *DiagnosticErrorListener) ReportAmbiguity(recognizer Parser, dfa *DFA, startIndex, stopIndex int, exact bool, ambigAlts *BitSet, configs ATNConfigSet) {
if d.exactOnly && !exact {
return
}
var msg = "reportAmbiguity d=" +
this.getDecisionDescription(recognizer, dfa) +
d.getDecisionDescription(recognizer, dfa) +
": ambigAlts=" +
this.getConflictingAlts(ambigAlts, configs).String() +
d.getConflictingAlts(ambigAlts, configs).String() +
", input='" +
recognizer.GetTokenStream().GetTextFromInterval(NewInterval(startIndex, stopIndex)) + "'"
recognizer.NotifyErrorListeners(msg, nil, nil)
}
func (this *DiagnosticErrorListener) ReportAttemptingFullContext(recognizer Parser, dfa *DFA, startIndex, stopIndex int, conflictingAlts *BitSet, configs ATNConfigSet) {
func (d *DiagnosticErrorListener) ReportAttemptingFullContext(recognizer Parser, dfa *DFA, startIndex, stopIndex int, conflictingAlts *BitSet, configs ATNConfigSet) {
var msg = "reportAttemptingFullContext d=" +
this.getDecisionDescription(recognizer, dfa) +
d.getDecisionDescription(recognizer, dfa) +
", input='" +
recognizer.GetTokenStream().GetTextFromInterval(NewInterval(startIndex, stopIndex)) + "'"
recognizer.NotifyErrorListeners(msg, nil, nil)
}
func (this *DiagnosticErrorListener) ReportContextSensitivity(recognizer Parser, dfa *DFA, startIndex, stopIndex, prediction int, configs ATNConfigSet) {
func (d *DiagnosticErrorListener) ReportContextSensitivity(recognizer Parser, dfa *DFA, startIndex, stopIndex, prediction int, configs ATNConfigSet) {
var msg = "reportContextSensitivity d=" +
this.getDecisionDescription(recognizer, dfa) +
d.getDecisionDescription(recognizer, dfa) +
", input='" +
recognizer.GetTokenStream().GetTextFromInterval(NewInterval(startIndex, stopIndex)) + "'"
recognizer.NotifyErrorListeners(msg, nil, nil)
}
func (this *DiagnosticErrorListener) getDecisionDescription(recognizer Parser, dfa *DFA) string {
func (d *DiagnosticErrorListener) getDecisionDescription(recognizer Parser, dfa *DFA) string {
var decision = dfa.decision
var ruleIndex = dfa.atnStartState.GetRuleIndex()
@ -94,7 +94,7 @@ func (this *DiagnosticErrorListener) getDecisionDescription(recognizer Parser, d
// @return Returns {@code ReportedAlts} if it is not {@code nil}, otherwise
// returns the set of alternatives represented in {@code configs}.
//
func (this *DiagnosticErrorListener) getConflictingAlts(ReportedAlts *BitSet, set ATNConfigSet) *BitSet {
func (d *DiagnosticErrorListener) getConflictingAlts(ReportedAlts *BitSet, set ATNConfigSet) *BitSet {
if ReportedAlts != nil {
return ReportedAlts
}

View File

@ -24,25 +24,25 @@ func NewDefaultErrorListener() *DefaultErrorListener {
return new(DefaultErrorListener)
}
func (this *DefaultErrorListener) SyntaxError(recognizer Recognizer, offendingSymbol interface{}, line, column int, msg string, e RecognitionException) {
func (d *DefaultErrorListener) SyntaxError(recognizer Recognizer, offendingSymbol interface{}, line, column int, msg string, e RecognitionException) {
if PortDebug {
fmt.Println("SyntaxError!")
}
}
func (this *DefaultErrorListener) ReportAmbiguity(recognizer Parser, dfa *DFA, startIndex, stopIndex int, exact bool, ambigAlts *BitSet, configs ATNConfigSet) {
func (d *DefaultErrorListener) ReportAmbiguity(recognizer Parser, dfa *DFA, startIndex, stopIndex int, exact bool, ambigAlts *BitSet, configs ATNConfigSet) {
if PortDebug {
fmt.Println("ReportAmbiguity!")
}
}
func (this *DefaultErrorListener) ReportAttemptingFullContext(recognizer Parser, dfa *DFA, startIndex, stopIndex int, conflictingAlts *BitSet, configs ATNConfigSet) {
func (d *DefaultErrorListener) ReportAttemptingFullContext(recognizer Parser, dfa *DFA, startIndex, stopIndex int, conflictingAlts *BitSet, configs ATNConfigSet) {
if PortDebug {
fmt.Println("ReportAttemptingFullContext!")
}
}
func (this *DefaultErrorListener) ReportContextSensitivity(recognizer Parser, dfa *DFA, startIndex, stopIndex, prediction int, configs ATNConfigSet) {
func (d *DefaultErrorListener) ReportContextSensitivity(recognizer Parser, dfa *DFA, startIndex, stopIndex, prediction int, configs ATNConfigSet) {
if PortDebug {
fmt.Println("ReportContextSensitivity!")
}
@ -73,7 +73,7 @@ var ConsoleErrorListenerINSTANCE = NewConsoleErrorListener()
// line <em>line</em>:<em>charPositionInLine</em> <em>msg</em>
// </pre>
//
func (this *ConsoleErrorListener) SyntaxError(recognizer Recognizer, offendingSymbol interface{}, line, column int, msg string, e RecognitionException) {
func (c *ConsoleErrorListener) SyntaxError(recognizer Recognizer, offendingSymbol interface{}, line, column int, msg string, e RecognitionException) {
fmt.Fprintln(os.Stderr, "line "+strconv.Itoa(line)+":"+strconv.Itoa(column)+" "+msg)
}
@ -91,26 +91,26 @@ func NewProxyErrorListener(delegates []ErrorListener) *ProxyErrorListener {
return l
}
func (this *ProxyErrorListener) SyntaxError(recognizer Recognizer, offendingSymbol interface{}, line, column int, msg string, e RecognitionException) {
for _, d := range this.delegates {
func (p *ProxyErrorListener) SyntaxError(recognizer Recognizer, offendingSymbol interface{}, line, column int, msg string, e RecognitionException) {
for _, d := range p.delegates {
d.SyntaxError(recognizer, offendingSymbol, line, column, msg, e)
}
}
func (this *ProxyErrorListener) ReportAmbiguity(recognizer Parser, dfa *DFA, startIndex, stopIndex int, exact bool, ambigAlts *BitSet, configs ATNConfigSet) {
for _, d := range this.delegates {
func (p *ProxyErrorListener) ReportAmbiguity(recognizer Parser, dfa *DFA, startIndex, stopIndex int, exact bool, ambigAlts *BitSet, configs ATNConfigSet) {
for _, d := range p.delegates {
d.ReportAmbiguity(recognizer, dfa, startIndex, stopIndex, exact, ambigAlts, configs)
}
}
func (this *ProxyErrorListener) ReportAttemptingFullContext(recognizer Parser, dfa *DFA, startIndex, stopIndex int, conflictingAlts *BitSet, configs ATNConfigSet) {
for _, d := range this.delegates {
func (p *ProxyErrorListener) ReportAttemptingFullContext(recognizer Parser, dfa *DFA, startIndex, stopIndex int, conflictingAlts *BitSet, configs ATNConfigSet) {
for _, d := range p.delegates {
d.ReportAttemptingFullContext(recognizer, dfa, startIndex, stopIndex, conflictingAlts, configs)
}
}
func (this *ProxyErrorListener) ReportContextSensitivity(recognizer Parser, dfa *DFA, startIndex, stopIndex, prediction int, configs ATNConfigSet) {
for _, d := range this.delegates {
func (p *ProxyErrorListener) ReportContextSensitivity(recognizer Parser, dfa *DFA, startIndex, stopIndex, prediction int, configs ATNConfigSet) {
for _, d := range p.delegates {
d.ReportContextSensitivity(recognizer, dfa, startIndex, stopIndex, prediction, configs)
}
}

View File

@ -51,8 +51,8 @@ func NewDefaultErrorStrategy() *DefaultErrorStrategy {
// <p>The default implementation simply calls {@link //endErrorCondition} to
// ensure that the handler is not in error recovery mode.</p>
func (this *DefaultErrorStrategy) reset(recognizer Parser) {
this.endErrorCondition(recognizer)
func (d *DefaultErrorStrategy) reset(recognizer Parser) {
d.endErrorCondition(recognizer)
}
//
@ -61,12 +61,12 @@ func (this *DefaultErrorStrategy) reset(recognizer Parser) {
//
// @param recognizer the parser instance
//
func (this *DefaultErrorStrategy) beginErrorCondition(recognizer Parser) {
this.errorRecoveryMode = true
func (d *DefaultErrorStrategy) beginErrorCondition(recognizer Parser) {
d.errorRecoveryMode = true
}
func (this *DefaultErrorStrategy) inErrorRecoveryMode(recognizer Parser) bool {
return this.errorRecoveryMode
func (d *DefaultErrorStrategy) inErrorRecoveryMode(recognizer Parser) bool {
return d.errorRecoveryMode
}
//
@ -75,10 +75,10 @@ func (this *DefaultErrorStrategy) inErrorRecoveryMode(recognizer Parser) bool {
//
// @param recognizer
//
func (this *DefaultErrorStrategy) endErrorCondition(recognizer Parser) {
this.errorRecoveryMode = false
this.lastErrorStates = nil
this.lastErrorIndex = -1
func (d *DefaultErrorStrategy) endErrorCondition(recognizer Parser) {
d.errorRecoveryMode = false
d.lastErrorStates = nil
d.lastErrorIndex = -1
}
//
@ -86,8 +86,8 @@ func (this *DefaultErrorStrategy) endErrorCondition(recognizer Parser) {
//
// <p>The default implementation simply calls {@link //endErrorCondition}.</p>
//
func (this *DefaultErrorStrategy) ReportMatch(recognizer Parser) {
this.endErrorCondition(recognizer)
func (d *DefaultErrorStrategy) ReportMatch(recognizer Parser) {
d.endErrorCondition(recognizer)
}
//
@ -109,13 +109,13 @@ func (this *DefaultErrorStrategy) ReportMatch(recognizer Parser) {
// the exception</li>
// </ul>
//
func (this *DefaultErrorStrategy) ReportError(recognizer Parser, e RecognitionException) {
func (d *DefaultErrorStrategy) ReportError(recognizer Parser, e RecognitionException) {
// if we've already Reported an error and have not Matched a token
// yet successfully, don't Report any errors.
if this.inErrorRecoveryMode(recognizer) {
if d.inErrorRecoveryMode(recognizer) {
return // don't Report spurious errors
}
this.beginErrorCondition(recognizer)
d.beginErrorCondition(recognizer)
switch t := e.(type) {
default:
@ -123,11 +123,11 @@ func (this *DefaultErrorStrategy) ReportError(recognizer Parser, e RecognitionEx
// fmt.Println(e.stack)
recognizer.NotifyErrorListeners(e.GetMessage(), e.GetOffendingToken(), e)
case *NoViableAltException:
this.ReportNoViableAlternative(recognizer, t)
d.ReportNoViableAlternative(recognizer, t)
case *InputMisMatchException:
this.ReportInputMisMatch(recognizer, t)
d.ReportInputMisMatch(recognizer, t)
case *FailedPredicateException:
this.ReportFailedPredicate(recognizer, t)
d.ReportFailedPredicate(recognizer, t)
}
}
@ -137,28 +137,28 @@ func (this *DefaultErrorStrategy) ReportError(recognizer Parser, e RecognitionEx
// until we find one in the reSynchronization set--loosely the set of tokens
// that can follow the current rule.</p>
//
func (this *DefaultErrorStrategy) Recover(recognizer Parser, e RecognitionException) {
func (d *DefaultErrorStrategy) Recover(recognizer Parser, e RecognitionException) {
if this.lastErrorIndex == recognizer.GetInputStream().Index() &&
this.lastErrorStates != nil && this.lastErrorStates.contains(recognizer.GetState()) {
if d.lastErrorIndex == recognizer.GetInputStream().Index() &&
d.lastErrorStates != nil && d.lastErrorStates.contains(recognizer.GetState()) {
// uh oh, another error at same token index and previously-Visited
// state in ATN must be a case where LT(1) is in the recovery
// token set so nothing got consumed. Consume a single token
// at least to prevent an infinite loop this is a failsafe.
// at least to prevent an infinite loop d is a failsafe.
recognizer.Consume()
}
this.lastErrorIndex = recognizer.GetInputStream().Index()
if this.lastErrorStates == nil {
this.lastErrorStates = NewIntervalSet()
d.lastErrorIndex = recognizer.GetInputStream().Index()
if d.lastErrorStates == nil {
d.lastErrorStates = NewIntervalSet()
}
this.lastErrorStates.addOne(recognizer.GetState())
var followSet = this.getErrorRecoverySet(recognizer)
this.consumeUntil(recognizer, followSet)
d.lastErrorStates.addOne(recognizer.GetState())
var followSet = d.getErrorRecoverySet(recognizer)
d.consumeUntil(recognizer, followSet)
}
// The default implementation of {@link ANTLRErrorStrategy//Sync} makes sure
// that the current lookahead symbol is consistent with what were expecting
// at this point in the ATN. You can call this anytime but ANTLR only
// at d point in the ATN. You can call d anytime but ANTLR only
// generates code to check before subrules/loops and each iteration.
//
// <p>Implements Jim Idle's magic Sync mechanism in closures and optional
@ -198,12 +198,12 @@ func (this *DefaultErrorStrategy) Recover(recognizer Parser, e RecognitionExcept
//
// <p>This functionality cost a little bit of effort because the parser has to
// compare token set at the start of the loop and at each iteration. If for
// some reason speed is suffering for you, you can turn off this
// functionality by simply overriding this method as a blank { }.</p>
// some reason speed is suffering for you, you can turn off d
// functionality by simply overriding d method as a blank { }.</p>
//
func (this *DefaultErrorStrategy) Sync(recognizer Parser) {
func (d *DefaultErrorStrategy) Sync(recognizer Parser) {
// If already recovering, don't try to Sync
if this.inErrorRecoveryMode(recognizer) {
if d.inErrorRecoveryMode(recognizer) {
return
}
@ -247,7 +247,7 @@ func (this *DefaultErrorStrategy) Sync(recognizer Parser) {
fallthrough
case ATNStateStarLoopEntry:
// Report error and recover if possible
if this.singleTokenDeletion(recognizer) != nil {
if d.singleTokenDeletion(recognizer) != nil {
return
} else {
panic(NewInputMisMatchException(recognizer))
@ -255,11 +255,11 @@ func (this *DefaultErrorStrategy) Sync(recognizer Parser) {
case ATNStatePlusLoopBack:
fallthrough
case ATNStateStarLoopBack:
this.ReportUnwantedToken(recognizer)
d.ReportUnwantedToken(recognizer)
var expecting = NewIntervalSet()
expecting.addSet(recognizer.GetExpectedTokens())
var whatFollowsLoopIterationOrRule = expecting.addSet(this.getErrorRecoverySet(recognizer))
this.consumeUntil(recognizer, whatFollowsLoopIterationOrRule)
var whatFollowsLoopIterationOrRule = expecting.addSet(d.getErrorRecoverySet(recognizer))
d.consumeUntil(recognizer, whatFollowsLoopIterationOrRule)
default:
// do nothing if we can't identify the exact kind of ATN state
}
@ -273,7 +273,7 @@ func (this *DefaultErrorStrategy) Sync(recognizer Parser) {
// @param recognizer the parser instance
// @param e the recognition exception
//
func (this *DefaultErrorStrategy) ReportNoViableAlternative(recognizer Parser, e *NoViableAltException) {
func (d *DefaultErrorStrategy) ReportNoViableAlternative(recognizer Parser, e *NoViableAltException) {
var tokens = recognizer.GetTokenStream()
var input string
if tokens != nil {
@ -285,7 +285,7 @@ func (this *DefaultErrorStrategy) ReportNoViableAlternative(recognizer Parser, e
} else {
input = "<unknown input>"
}
var msg = "no viable alternative at input " + this.escapeWSAndQuote(input)
var msg = "no viable alternative at input " + d.escapeWSAndQuote(input)
recognizer.NotifyErrorListeners(msg, e.offendingToken, e)
}
@ -298,8 +298,8 @@ func (this *DefaultErrorStrategy) ReportNoViableAlternative(recognizer Parser, e
// @param recognizer the parser instance
// @param e the recognition exception
//
func (this *DefaultErrorStrategy) ReportInputMisMatch(recognizer Parser, e *InputMisMatchException) {
var msg = "misMatched input " + this.GetTokenErrorDisplay(e.offendingToken) +
func (d *DefaultErrorStrategy) ReportInputMisMatch(recognizer Parser, e *InputMisMatchException) {
var msg = "misMatched input " + d.GetTokenErrorDisplay(e.offendingToken) +
" expecting " + e.getExpectedTokens().StringVerbose(recognizer.GetLiteralNames(), recognizer.GetSymbolicNames(), false)
recognizer.NotifyErrorListeners(msg, e.offendingToken, e)
}
@ -313,16 +313,16 @@ func (this *DefaultErrorStrategy) ReportInputMisMatch(recognizer Parser, e *Inpu
// @param recognizer the parser instance
// @param e the recognition exception
//
func (this *DefaultErrorStrategy) ReportFailedPredicate(recognizer Parser, e *FailedPredicateException) {
func (d *DefaultErrorStrategy) ReportFailedPredicate(recognizer Parser, e *FailedPredicateException) {
var ruleName = recognizer.GetRuleNames()[recognizer.GetParserRuleContext().GetRuleIndex()]
var msg = "rule " + ruleName + " " + e.message
recognizer.NotifyErrorListeners(msg, e.offendingToken, e)
}
// This method is called to Report a syntax error which requires the removal
// of a token from the input stream. At the time this method is called, the
// of a token from the input stream. At the time d method is called, the
// erroneous symbol is current {@code LT(1)} symbol and has not yet been
// removed from the input stream. When this method returns,
// removed from the input stream. When d method returns,
// {@code recognizer} is in error recovery mode.
//
// <p>This method is called when {@link //singleTokenDeletion} identifies
@ -336,22 +336,22 @@ func (this *DefaultErrorStrategy) ReportFailedPredicate(recognizer Parser, e *Fa
//
// @param recognizer the parser instance
//
func (this *DefaultErrorStrategy) ReportUnwantedToken(recognizer Parser) {
if this.inErrorRecoveryMode(recognizer) {
func (d *DefaultErrorStrategy) ReportUnwantedToken(recognizer Parser) {
if d.inErrorRecoveryMode(recognizer) {
return
}
this.beginErrorCondition(recognizer)
d.beginErrorCondition(recognizer)
var t = recognizer.GetCurrentToken()
var tokenName = this.GetTokenErrorDisplay(t)
var expecting = this.getExpectedTokens(recognizer)
var tokenName = d.GetTokenErrorDisplay(t)
var expecting = d.getExpectedTokens(recognizer)
var msg = "extraneous input " + tokenName + " expecting " +
expecting.StringVerbose(recognizer.GetLiteralNames(), recognizer.GetSymbolicNames(), false)
recognizer.NotifyErrorListeners(msg, t, nil)
}
// This method is called to Report a syntax error which requires the
// insertion of a missing token into the input stream. At the time this
// method is called, the missing token has not yet been inserted. When this
// insertion of a missing token into the input stream. At the time d
// method is called, the missing token has not yet been inserted. When d
// method returns, {@code recognizer} is in error recovery mode.
//
// <p>This method is called when {@link //singleTokenInsertion} identifies
@ -365,21 +365,21 @@ func (this *DefaultErrorStrategy) ReportUnwantedToken(recognizer Parser) {
//
// @param recognizer the parser instance
//
func (this *DefaultErrorStrategy) ReportMissingToken(recognizer Parser) {
if this.inErrorRecoveryMode(recognizer) {
func (d *DefaultErrorStrategy) ReportMissingToken(recognizer Parser) {
if d.inErrorRecoveryMode(recognizer) {
return
}
this.beginErrorCondition(recognizer)
d.beginErrorCondition(recognizer)
var t = recognizer.GetCurrentToken()
var expecting = this.getExpectedTokens(recognizer)
var expecting = d.getExpectedTokens(recognizer)
var msg = "missing " + expecting.StringVerbose(recognizer.GetLiteralNames(), recognizer.GetSymbolicNames(), false) +
" at " + this.GetTokenErrorDisplay(t)
" at " + d.GetTokenErrorDisplay(t)
recognizer.NotifyErrorListeners(msg, t, nil)
}
// <p>The default implementation attempts to recover from the misMatched input
// by using single token insertion and deletion as described below. If the
// recovery attempt fails, this method panics an
// recovery attempt fails, d method panics an
// {@link InputMisMatchException}.</p>
//
// <p><strong>EXTRA TOKEN</strong> (single token deletion)</p>
@ -413,7 +413,7 @@ func (this *DefaultErrorStrategy) ReportMissingToken(recognizer Parser) {
// stat &rarr expr &rarr atom
// </pre>
//
// and it will be trying to Match the {@code ')'} at this point in the
// and it will be trying to Match the {@code ')'} at d point in the
// derivation:
//
// <pre>
@ -426,9 +426,9 @@ func (this *DefaultErrorStrategy) ReportMissingToken(recognizer Parser) {
// is in the set of tokens that can follow the {@code ')'} token reference
// in rule {@code atom}. It can assume that you forgot the {@code ')'}.
//
func (this *DefaultErrorStrategy) RecoverInline(recognizer Parser) Token {
func (d *DefaultErrorStrategy) RecoverInline(recognizer Parser) Token {
// SINGLE TOKEN DELETION
var MatchedSymbol = this.singleTokenDeletion(recognizer)
var MatchedSymbol = d.singleTokenDeletion(recognizer)
if MatchedSymbol != nil {
// we have deleted the extra token.
// now, move past ttype token as if all were ok
@ -436,8 +436,8 @@ func (this *DefaultErrorStrategy) RecoverInline(recognizer Parser) Token {
return MatchedSymbol
}
// SINGLE TOKEN INSERTION
if this.singleTokenInsertion(recognizer) {
return this.getMissingSymbol(recognizer)
if d.singleTokenInsertion(recognizer) {
return d.getMissingSymbol(recognizer)
}
// even that didn't work must panic the exception
panic(NewInputMisMatchException(recognizer))
@ -446,21 +446,21 @@ func (this *DefaultErrorStrategy) RecoverInline(recognizer Parser) Token {
//
// This method implements the single-token insertion inline error recovery
// strategy. It is called by {@link //recoverInline} if the single-token
// deletion strategy fails to recover from the misMatched input. If this
// deletion strategy fails to recover from the misMatched input. If d
// method returns {@code true}, {@code recognizer} will be in error recovery
// mode.
//
// <p>This method determines whether or not single-token insertion is viable by
// checking if the {@code LA(1)} input symbol could be successfully Matched
// if it were instead the {@code LA(2)} symbol. If this method returns
// if it were instead the {@code LA(2)} symbol. If d method returns
// {@code true}, the caller is responsible for creating and inserting a
// token with the correct type to produce this behavior.</p>
// token with the correct type to produce d behavior.</p>
//
// @param recognizer the parser instance
// @return {@code true} if single-token insertion is a viable recovery
// strategy for the current misMatched input, otherwise {@code false}
//
func (this *DefaultErrorStrategy) singleTokenInsertion(recognizer Parser) bool {
func (d *DefaultErrorStrategy) singleTokenInsertion(recognizer Parser) bool {
var currentSymbolType = recognizer.GetTokenStream().LA(1)
// if current token is consistent with what could come after current
// ATN state, then we know we're missing a token error recovery
@ -470,7 +470,7 @@ func (this *DefaultErrorStrategy) singleTokenInsertion(recognizer Parser) bool {
var next = currentState.GetTransitions()[0].getTarget()
var expectingAtLL2 = atn.NextTokens(next, recognizer.GetParserRuleContext())
if expectingAtLL2.contains(currentSymbolType) {
this.ReportMissingToken(recognizer)
d.ReportMissingToken(recognizer)
return true
} else {
return false
@ -479,12 +479,12 @@ func (this *DefaultErrorStrategy) singleTokenInsertion(recognizer Parser) bool {
// This method implements the single-token deletion inline error recovery
// strategy. It is called by {@link //recoverInline} to attempt to recover
// from misMatched input. If this method returns nil, the parser and error
// handler state will not have changed. If this method returns non-nil,
// from misMatched input. If d method returns nil, the parser and error
// handler state will not have changed. If d method returns non-nil,
// {@code recognizer} will <em>not</em> be in error recovery mode since the
// returned token was a successful Match.
//
// <p>If the single-token deletion is successful, this method calls
// <p>If the single-token deletion is successful, d method calls
// {@link //ReportUnwantedToken} to Report the error, followed by
// {@link Parser//consume} to actually "delete" the extraneous token. Then,
// before returning {@link //ReportMatch} is called to signal a successful
@ -495,11 +495,11 @@ func (this *DefaultErrorStrategy) singleTokenInsertion(recognizer Parser) bool {
// deletion successfully recovers from the misMatched input, otherwise
// {@code nil}
//
func (this *DefaultErrorStrategy) singleTokenDeletion(recognizer Parser) Token {
func (d *DefaultErrorStrategy) singleTokenDeletion(recognizer Parser) Token {
var NextTokenType = recognizer.GetTokenStream().LA(2)
var expecting = this.getExpectedTokens(recognizer)
var expecting = d.getExpectedTokens(recognizer)
if expecting.contains(NextTokenType) {
this.ReportUnwantedToken(recognizer)
d.ReportUnwantedToken(recognizer)
// print("recoverFromMisMatchedToken deleting " \
// + str(recognizer.GetTokenStream().LT(1)) \
// + " since " + str(recognizer.GetTokenStream().LT(2)) \
@ -507,7 +507,7 @@ func (this *DefaultErrorStrategy) singleTokenDeletion(recognizer Parser) Token {
recognizer.Consume() // simply delete extra token
// we want to return the token we're actually Matching
var MatchedSymbol = recognizer.GetCurrentToken()
this.ReportMatch(recognizer) // we know current token is correct
d.ReportMatch(recognizer) // we know current token is correct
return MatchedSymbol
} else {
return nil
@ -522,7 +522,7 @@ func (this *DefaultErrorStrategy) singleTokenDeletion(recognizer Parser) Token {
// that there has been an identifier Matched previously and that
// $x points at that token. If that token is missing, but
// the next token in the stream is what we want we assume that
// this token is missing and we keep going. Because we
// d token is missing and we keep going. Because we
// have to return some token to replace the missing token,
// we have to conjure one up. This method gives the user control
// over the tokens returned for missing tokens. Mostly,
@ -531,11 +531,11 @@ func (this *DefaultErrorStrategy) singleTokenDeletion(recognizer Parser) Token {
// action in the parser or tree parser works. It simply creates
// a CommonToken of the appropriate type. The text will be the token.
// If you change what tokens must be created by the lexer,
// override this method to create the appropriate tokens.
// override d method to create the appropriate tokens.
//
func (this *DefaultErrorStrategy) getMissingSymbol(recognizer Parser) Token {
func (d *DefaultErrorStrategy) getMissingSymbol(recognizer Parser) Token {
var currentSymbol = recognizer.GetCurrentToken()
var expecting = this.getExpectedTokens(recognizer)
var expecting = d.getExpectedTokens(recognizer)
var expectedTokenType = expecting.first()
var tokenText string
@ -563,7 +563,7 @@ func (this *DefaultErrorStrategy) getMissingSymbol(recognizer Parser) Token {
return tf.Create(current.GetSource(), expectedTokenType, tokenText, TokenDefaultChannel, -1, -1, current.GetLine(), current.GetColumn())
}
func (this *DefaultErrorStrategy) getExpectedTokens(recognizer Parser) *IntervalSet {
func (d *DefaultErrorStrategy) getExpectedTokens(recognizer Parser) *IntervalSet {
return recognizer.GetExpectedTokens()
}
@ -575,7 +575,7 @@ func (this *DefaultErrorStrategy) getExpectedTokens(recognizer Parser) *Interval
// your token objects because you don't have to go modify your lexer
// so that it creates a NewJava type.
//
func (this *DefaultErrorStrategy) GetTokenErrorDisplay(t Token) string {
func (d *DefaultErrorStrategy) GetTokenErrorDisplay(t Token) string {
if t == nil {
return "<no token>"
}
@ -587,10 +587,10 @@ func (this *DefaultErrorStrategy) GetTokenErrorDisplay(t Token) string {
s = "<" + strconv.Itoa(t.GetTokenType()) + ">"
}
}
return this.escapeWSAndQuote(s)
return d.escapeWSAndQuote(s)
}
func (this *DefaultErrorStrategy) escapeWSAndQuote(s string) string {
func (d *DefaultErrorStrategy) escapeWSAndQuote(s string) string {
s = strings.Replace(s, "\t", "\\t", -1)
s = strings.Replace(s, "\n", "\\n", -1)
s = strings.Replace(s, "\r", "\\r", -1)
@ -599,7 +599,7 @@ func (this *DefaultErrorStrategy) escapeWSAndQuote(s string) string {
// Compute the error recovery set for the current rule. During
// rule invocation, the parser pushes the set of tokens that can
// follow that rule reference on the stack this amounts to
// follow that rule reference on the stack d amounts to
// computing FIRST of what follows the rule reference in the
// enclosing rule. See LinearApproximator.FIRST().
// This local follow set only includes tokens
@ -656,10 +656,10 @@ func (this *DefaultErrorStrategy) escapeWSAndQuote(s string) string {
// reSync to one of those tokens. Note that FOLLOW(c)='^' and if
// we reSync'd to that token, we'd consume until EOF. We need to
// Sync to context-sensitive FOLLOWs for a, b, and c: {']','^'}.
// In this case, for input "[]", LA(1) is ']' and in the set, so we would
// In d case, for input "[]", LA(1) is ']' and in the set, so we would
// not consume anything. After printing an error, rule c would
// return normally. Rule b would not find the required '^' though.
// At this point, it gets a misMatched token error and panics an
// At d point, it gets a misMatched token error and panics an
// exception (since LA(1) is not in the viable following token
// set). The rule exception handler tries to recover, but finds
// the same recovery set and doesn't consume anything. Rule b
@ -689,7 +689,7 @@ func (this *DefaultErrorStrategy) escapeWSAndQuote(s string) string {
// Like Grosch I implement context-sensitive FOLLOW sets that are combined
// at run-time upon error to avoid overhead during parsing.
//
func (this *DefaultErrorStrategy) getErrorRecoverySet(recognizer Parser) *IntervalSet {
func (d *DefaultErrorStrategy) getErrorRecoverySet(recognizer Parser) *IntervalSet {
var atn = recognizer.GetInterpreter().atn
var ctx = recognizer.GetParserRuleContext()
var recoverSet = NewIntervalSet()
@ -706,7 +706,7 @@ func (this *DefaultErrorStrategy) getErrorRecoverySet(recognizer Parser) *Interv
}
// Consume tokens until one Matches the given token set.//
func (this *DefaultErrorStrategy) consumeUntil(recognizer Parser, set *IntervalSet) {
func (d *DefaultErrorStrategy) consumeUntil(recognizer Parser, set *IntervalSet) {
var ttype = recognizer.GetTokenStream().LA(1)
for ttype != TokenEOF && !set.contains(ttype) {
recognizer.Consume()
@ -748,11 +748,11 @@ type BailErrorStrategy struct {
func NewBailErrorStrategy() *BailErrorStrategy {
this := new(BailErrorStrategy)
b := new(BailErrorStrategy)
this.DefaultErrorStrategy = NewDefaultErrorStrategy()
b.DefaultErrorStrategy = NewDefaultErrorStrategy()
return this
return b
}
// Instead of recovering from exception {@code e}, re-panic it wrapped
@ -760,7 +760,7 @@ func NewBailErrorStrategy() *BailErrorStrategy {
// rule func catches. Use {@link Exception//getCause()} to get the
// original {@link RecognitionException}.
//
func (this *BailErrorStrategy) Recover(recognizer Parser, e RecognitionException) {
func (b *BailErrorStrategy) Recover(recognizer Parser, e RecognitionException) {
var context = recognizer.GetParserRuleContext()
for context != nil {
context.SetException(e)
@ -772,11 +772,11 @@ func (this *BailErrorStrategy) Recover(recognizer Parser, e RecognitionException
// Make sure we don't attempt to recover inline if the parser
// successfully recovers, it won't panic an exception.
//
func (this *BailErrorStrategy) RecoverInline(recognizer Parser) {
this.Recover(recognizer, NewInputMisMatchException(recognizer))
func (b *BailErrorStrategy) RecoverInline(recognizer Parser) {
b.Recover(recognizer, NewInputMisMatchException(recognizer))
}
// Make sure we don't attempt to recover from problems in subrules.//
func (this *BailErrorStrategy) Sync(recognizer Parser) {
func (b *BailErrorStrategy) Sync(recognizer Parser) {
// pass
}

View File

@ -56,40 +56,40 @@ func NewBaseRecognitionException(message string, recognizer Recognizer, input In
return t
}
func (this *BaseRecognitionException) GetMessage() string {
return this.message
func (b *BaseRecognitionException) GetMessage() string {
return b.message
}
func (this *BaseRecognitionException) GetOffendingToken() Token {
return this.offendingToken
func (b *BaseRecognitionException) GetOffendingToken() Token {
return b.offendingToken
}
func (this *BaseRecognitionException) GetInputStream() IntStream {
return this.input
func (b *BaseRecognitionException) GetInputStream() IntStream {
return b.input
}
// <p>If the state number is not known, this method returns -1.</p>
// <p>If the state number is not known, b method returns -1.</p>
//
// Gets the set of input symbols which could potentially follow the
// previously Matched symbol at the time this exception was panicn.
// previously Matched symbol at the time b exception was panicn.
//
// <p>If the set of expected tokens is not known and could not be computed,
// this method returns {@code nil}.</p>
// b method returns {@code nil}.</p>
//
// @return The set of token types that could potentially follow the current
// state in the ATN, or {@code nil} if the information is not available.
// /
func (this *BaseRecognitionException) getExpectedTokens() *IntervalSet {
if this.recognizer != nil {
return this.recognizer.GetATN().getExpectedTokens(this.offendingState, this.ctx)
func (b *BaseRecognitionException) getExpectedTokens() *IntervalSet {
if b.recognizer != nil {
return b.recognizer.GetATN().getExpectedTokens(b.offendingState, b.ctx)
} else {
return nil
}
}
func (this *BaseRecognitionException) String() string {
return this.message
func (b *BaseRecognitionException) String() string {
return b.message
}
type LexerNoViableAltException struct {
@ -101,20 +101,20 @@ type LexerNoViableAltException struct {
func NewLexerNoViableAltException(lexer Lexer, input CharStream, startIndex int, deadEndConfigs ATNConfigSet) *LexerNoViableAltException {
this := new(LexerNoViableAltException)
l := new(LexerNoViableAltException)
this.BaseRecognitionException = NewBaseRecognitionException("", lexer, input, nil)
l.BaseRecognitionException = NewBaseRecognitionException("", lexer, input, nil)
this.startIndex = startIndex
this.deadEndConfigs = deadEndConfigs
l.startIndex = startIndex
l.deadEndConfigs = deadEndConfigs
return this
return l
}
func (this *LexerNoViableAltException) String() string {
func (l *LexerNoViableAltException) String() string {
var symbol = ""
if this.startIndex >= 0 && this.startIndex < this.input.Size() {
symbol = this.input.(CharStream).GetTextFromInterval(NewInterval(this.startIndex, this.startIndex))
if l.startIndex >= 0 && l.startIndex < l.input.Size() {
symbol = l.input.(CharStream).GetTextFromInterval(NewInterval(l.startIndex, l.startIndex))
}
return "LexerNoViableAltException" + symbol
}
@ -151,20 +151,20 @@ func NewNoViableAltException(recognizer Parser, input TokenStream, startToken To
input = recognizer.GetInputStream().(TokenStream)
}
this := new(NoViableAltException)
this.BaseRecognitionException = NewBaseRecognitionException("", recognizer, input, ctx)
n := new(NoViableAltException)
n.BaseRecognitionException = NewBaseRecognitionException("", recognizer, input, ctx)
// Which configurations did we try at input.Index() that couldn't Match
// input.LT(1)?//
this.deadEndConfigs = deadEndConfigs
n.deadEndConfigs = deadEndConfigs
// The token object at the start index the input stream might
// not be buffering tokens so get a reference to it. (At the
// time the error occurred, of course the stream needs to keep a
// buffer all of the tokens but later we might not have access to those.)
this.startToken = startToken
this.offendingToken = offendingToken
n.startToken = startToken
n.offendingToken = offendingToken
return this
return n
}
type InputMisMatchException struct {
@ -176,12 +176,12 @@ type InputMisMatchException struct {
//
func NewInputMisMatchException(recognizer Parser) *InputMisMatchException {
this := new(InputMisMatchException)
this.BaseRecognitionException = NewBaseRecognitionException("", recognizer, recognizer.GetInputStream(), recognizer.GetParserRuleContext())
i := new(InputMisMatchException)
i.BaseRecognitionException = NewBaseRecognitionException("", recognizer, recognizer.GetInputStream(), recognizer.GetParserRuleContext())
this.offendingToken = recognizer.GetCurrentToken()
i.offendingToken = recognizer.GetCurrentToken()
return this
return i
}
@ -200,26 +200,26 @@ type FailedPredicateException struct {
func NewFailedPredicateException(recognizer Parser, predicate string, message string) *FailedPredicateException {
this := new(FailedPredicateException)
f := new(FailedPredicateException)
this.BaseRecognitionException = NewBaseRecognitionException(this.formatMessage(predicate, message), recognizer, recognizer.GetInputStream(), recognizer.GetParserRuleContext())
f.BaseRecognitionException = NewBaseRecognitionException(f.formatMessage(predicate, message), recognizer, recognizer.GetInputStream(), recognizer.GetParserRuleContext())
var s = recognizer.GetInterpreter().atn.states[recognizer.GetState()]
var trans = s.GetTransitions()[0]
if trans2, ok := trans.(*PredicateTransition); ok {
this.ruleIndex = trans2.ruleIndex
this.predicateIndex = trans2.predIndex
f.ruleIndex = trans2.ruleIndex
f.predicateIndex = trans2.predIndex
} else {
this.ruleIndex = 0
this.predicateIndex = 0
f.ruleIndex = 0
f.predicateIndex = 0
}
this.predicate = predicate
this.offendingToken = recognizer.GetCurrentToken()
f.predicate = predicate
f.offendingToken = recognizer.GetCurrentToken()
return this
return f
}
func (this *FailedPredicateException) formatMessage(predicate, message string) string {
func (f *FailedPredicateException) formatMessage(predicate, message string) string {
if message != "" {
return message
} else {

View File

@ -109,110 +109,110 @@ const (
LexerMaxCharValue = '\uFFFE'
)
func (l *BaseLexer) reset() {
func (b *BaseLexer) reset() {
// wack Lexer state variables
if l._input != nil {
l._input.Seek(0) // rewind the input
if b._input != nil {
b._input.Seek(0) // rewind the input
}
l._token = nil
l._type = TokenInvalidType
l._channel = TokenDefaultChannel
l.TokenStartCharIndex = -1
l.TokenStartColumn = -1
l.TokenStartLine = -1
l._text = ""
b._token = nil
b._type = TokenInvalidType
b._channel = TokenDefaultChannel
b.TokenStartCharIndex = -1
b.TokenStartColumn = -1
b.TokenStartLine = -1
b._text = ""
l._hitEOF = false
l._mode = LexerDefaultMode
l._modeStack = make([]int, 0)
b._hitEOF = false
b._mode = LexerDefaultMode
b._modeStack = make([]int, 0)
l.Interpreter.reset()
b.Interpreter.reset()
}
func (l *BaseLexer) GetInterpreter() *LexerATNSimulator {
return l.Interpreter
func (b *BaseLexer) GetInterpreter() *LexerATNSimulator {
return b.Interpreter
}
func (l *BaseLexer) GetInputStream() CharStream {
return l._input
func (b *BaseLexer) GetInputStream() CharStream {
return b._input
}
func (l *BaseLexer) GetSourceName() string {
return l.GrammarFileName
func (b *BaseLexer) GetSourceName() string {
return b.GrammarFileName
}
func (l *BaseLexer) setChannel(v int) {
l._channel = v
func (b *BaseLexer) setChannel(v int) {
b._channel = v
}
func (l *BaseLexer) GetTokenFactory() TokenFactory {
return l._factory
func (b *BaseLexer) GetTokenFactory() TokenFactory {
return b._factory
}
func (l *BaseLexer) setTokenFactory(f TokenFactory) {
l._factory = f
func (b *BaseLexer) setTokenFactory(f TokenFactory) {
b._factory = f
}
func (l *BaseLexer) safeMatch() (ret int) {
func (b *BaseLexer) safeMatch() (ret int) {
// previously in catch block
defer func() {
if e := recover(); e != nil {
if re, ok := e.(RecognitionException); ok {
l.notifyListeners(re) // Report error
l.Recover(re)
b.notifyListeners(re) // Report error
b.Recover(re)
ret = LexerSkip // default
}
}
}()
return l.Interpreter.Match(l._input, l._mode)
return b.Interpreter.Match(b._input, b._mode)
}
// Return a token from l source i.e., Match a token on the char stream.
func (l *BaseLexer) NextToken() Token {
if l._input == nil {
func (b *BaseLexer) NextToken() Token {
if b._input == nil {
panic("NextToken requires a non-nil input stream.")
}
var tokenStartMarker = l._input.Mark()
var tokenStartMarker = b._input.Mark()
// previously in finally block
defer func() {
// make sure we release marker after Match or
// unbuffered char stream will keep buffering
l._input.Release(tokenStartMarker)
b._input.Release(tokenStartMarker)
}()
for true {
if l._hitEOF {
l.emitEOF()
return l._token
if b._hitEOF {
b.emitEOF()
return b._token
}
l._token = nil
l._channel = TokenDefaultChannel
l.TokenStartCharIndex = l._input.Index()
l.TokenStartColumn = l.Interpreter.column
l.TokenStartLine = l.Interpreter.line
l._text = ""
b._token = nil
b._channel = TokenDefaultChannel
b.TokenStartCharIndex = b._input.Index()
b.TokenStartColumn = b.Interpreter.column
b.TokenStartLine = b.Interpreter.line
b._text = ""
var continueOuter = false
for true {
l._type = TokenInvalidType
b._type = TokenInvalidType
var ttype = LexerSkip
ttype = l.safeMatch()
ttype = b.safeMatch()
if l._input.LA(1) == TokenEOF {
l._hitEOF = true
if b._input.LA(1) == TokenEOF {
b._hitEOF = true
}
if l._type == TokenInvalidType {
l._type = ttype
if b._type == TokenInvalidType {
b._type = ttype
}
if l._type == LexerSkip {
if b._type == LexerSkip {
continueOuter = true
break
}
if l._type != LexerMore {
if b._type != LexerMore {
break
}
if PortDebug {
@ -226,10 +226,10 @@ func (l *BaseLexer) NextToken() Token {
if continueOuter {
continue
}
if l._token == nil {
l.emit()
if b._token == nil {
b.emit()
}
return l._token
return b._token
}
return nil
@ -241,48 +241,48 @@ func (l *BaseLexer) NextToken() Token {
// if token==nil at end of any token rule, it creates one for you
// and emits it.
// /
func (l *BaseLexer) Skip() {
l._type = LexerSkip
func (b *BaseLexer) Skip() {
b._type = LexerSkip
}
func (l *BaseLexer) More() {
l._type = LexerMore
func (b *BaseLexer) More() {
b._type = LexerMore
}
func (l *BaseLexer) mode(m int) {
l._mode = m
func (b *BaseLexer) mode(m int) {
b._mode = m
}
func (l *BaseLexer) pushMode(m int) {
func (b *BaseLexer) pushMode(m int) {
if LexerATNSimulatorDebug {
fmt.Println("pushMode " + strconv.Itoa(m))
}
l._modeStack.Push(l._mode)
l.mode(m)
b._modeStack.Push(b._mode)
b.mode(m)
}
func (l *BaseLexer) popMode() int {
if len(l._modeStack) == 0 {
func (b *BaseLexer) popMode() int {
if len(b._modeStack) == 0 {
panic("Empty Stack")
}
if LexerATNSimulatorDebug {
fmt.Println("popMode back to " + fmt.Sprint(l._modeStack[0:len(l._modeStack)-1]))
fmt.Println("popMode back to " + fmt.Sprint(b._modeStack[0:len(b._modeStack)-1]))
}
i, _ := l._modeStack.Pop()
l.mode(i)
return l._mode
i, _ := b._modeStack.Pop()
b.mode(i)
return b._mode
}
func (l *BaseLexer) inputStream() CharStream {
return l._input
func (b *BaseLexer) inputStream() CharStream {
return b._input
}
func (l *BaseLexer) setInputStream(input CharStream) {
l._input = nil
l._tokenFactorySourcePair = &TokenSourceCharStreamPair{l, l._input}
l.reset()
l._input = input
l._tokenFactorySourcePair = &TokenSourceCharStreamPair{l, l._input}
func (b *BaseLexer) setInputStream(input CharStream) {
b._input = nil
b._tokenFactorySourcePair = &TokenSourceCharStreamPair{b, b._input}
b.reset()
b._input = input
b._tokenFactorySourcePair = &TokenSourceCharStreamPair{b, b._input}
}
// By default does not support multiple emits per NextToken invocation
@ -290,8 +290,8 @@ func (l *BaseLexer) setInputStream(input CharStream) {
// and GetToken (to push tokens into a list and pull from that list
// rather than a single variable as l implementation does).
// /
func (l *BaseLexer) emitToken(token Token) {
l._token = token
func (b *BaseLexer) emitToken(token Token) {
b._token = token
}
// The standard method called to automatically emit a token at the
@ -300,94 +300,94 @@ func (l *BaseLexer) emitToken(token Token) {
// use that to set the token's text. Override l method to emit
// custom Token objects or provide a Newfactory.
// /
func (l *BaseLexer) emit() Token {
func (b *BaseLexer) emit() Token {
if PortDebug {
fmt.Println("emit")
}
var t = l._factory.Create(l._tokenFactorySourcePair, l._type, l._text, l._channel, l.TokenStartCharIndex, l.getCharIndex()-1, l.TokenStartLine, l.TokenStartColumn)
l.emitToken(t)
var t = b._factory.Create(b._tokenFactorySourcePair, b._type, b._text, b._channel, b.TokenStartCharIndex, b.getCharIndex()-1, b.TokenStartLine, b.TokenStartColumn)
b.emitToken(t)
return t
}
func (l *BaseLexer) emitEOF() Token {
cpos := l.GetCharPositionInLine()
lpos := l.GetLine()
func (b *BaseLexer) emitEOF() Token {
cpos := b.GetCharPositionInLine()
lpos := b.GetLine()
if PortDebug {
fmt.Println("emitEOF")
}
var eof = l._factory.Create(l._tokenFactorySourcePair, TokenEOF, "", TokenDefaultChannel, l._input.Index(), l._input.Index()-1, lpos, cpos)
l.emitToken(eof)
var eof = b._factory.Create(b._tokenFactorySourcePair, TokenEOF, "", TokenDefaultChannel, b._input.Index(), b._input.Index()-1, lpos, cpos)
b.emitToken(eof)
return eof
}
func (l *BaseLexer) GetCharPositionInLine() int {
return l.Interpreter.column
func (b *BaseLexer) GetCharPositionInLine() int {
return b.Interpreter.column
}
func (l *BaseLexer) GetLine() int {
return l.Interpreter.line
func (b *BaseLexer) GetLine() int {
return b.Interpreter.line
}
func (l *BaseLexer) getType() int {
return l._type
func (b *BaseLexer) getType() int {
return b._type
}
func (l *BaseLexer) setType(t int) {
l._type = t
func (b *BaseLexer) setType(t int) {
b._type = t
}
// What is the index of the current character of lookahead?///
func (l *BaseLexer) getCharIndex() int {
return l._input.Index()
func (b *BaseLexer) getCharIndex() int {
return b._input.Index()
}
// Return the text Matched so far for the current token or any text override.
//Set the complete text of l token it wipes any previous changes to the text.
func (l *BaseLexer) GetText() string {
if l._text != "" {
return l._text
func (b *BaseLexer) GetText() string {
if b._text != "" {
return b._text
} else {
return l.Interpreter.GetText(l._input)
return b.Interpreter.GetText(b._input)
}
}
func (l *BaseLexer) SetText(text string) {
l._text = text
func (b *BaseLexer) SetText(text string) {
b._text = text
}
func (this *BaseLexer) GetATN() *ATN {
return this.Interpreter.atn
func (b *BaseLexer) GetATN() *ATN {
return b.Interpreter.atn
}
// Return a list of all Token objects in input char stream.
// Forces load of all tokens. Does not include EOF token.
// /
func (l *BaseLexer) getAllTokens() []Token {
func (b *BaseLexer) getAllTokens() []Token {
if PortDebug {
fmt.Println("getAllTokens")
}
var tokens = make([]Token, 0)
var t = l.NextToken()
var t = b.NextToken()
for t.GetTokenType() != TokenEOF {
tokens = append(tokens, t)
if PortDebug {
fmt.Println("getAllTokens")
}
t = l.NextToken()
t = b.NextToken()
}
return tokens
}
func (l *BaseLexer) notifyListeners(e RecognitionException) {
var start = l.TokenStartCharIndex
var stop = l._input.Index()
var text = l._input.GetTextFromInterval(NewInterval(start, stop))
func (b *BaseLexer) notifyListeners(e RecognitionException) {
var start = b.TokenStartCharIndex
var stop = b._input.Index()
var text = b._input.GetTextFromInterval(NewInterval(start, stop))
var msg = "token recognition error at: '" + text + "'"
var listener = l.GetErrorListenerDispatch()
listener.SyntaxError(l, nil, l.TokenStartLine, l.TokenStartColumn, msg, e)
var listener = b.GetErrorListenerDispatch()
listener.SyntaxError(b, nil, b.TokenStartLine, b.TokenStartColumn, msg, e)
}
func (l *BaseLexer) getErrorDisplayForChar(c rune) string {
func (b *BaseLexer) getErrorDisplayForChar(c rune) string {
if c == TokenEOF {
return "<EOF>"
} else if c == '\n' {
@ -401,8 +401,8 @@ func (l *BaseLexer) getErrorDisplayForChar(c rune) string {
}
}
func (l *BaseLexer) getCharErrorDisplay(c rune) string {
return "'" + l.getErrorDisplayForChar(c) + "'"
func (b *BaseLexer) getCharErrorDisplay(c rune) string {
return "'" + b.getErrorDisplayForChar(c) + "'"
}
// Lexers can normally Match any char in it's vocabulary after Matching
@ -410,14 +410,14 @@ func (l *BaseLexer) getCharErrorDisplay(c rune) string {
// it all works out. You can instead use the rule invocation stack
// to do sophisticated error recovery if you are in a fragment rule.
// /
func (l *BaseLexer) Recover(re RecognitionException) {
if l._input.LA(1) != TokenEOF {
func (b *BaseLexer) Recover(re RecognitionException) {
if b._input.LA(1) != TokenEOF {
if _, ok := re.(*LexerNoViableAltException); ok {
// Skip a char and try again
l.Interpreter.consume(l._input)
b.Interpreter.consume(b._input)
} else {
// TODO: Do we lose character or line position information?
l._input.Consume()
b._input.Consume()
}
}
}

View File

@ -35,30 +35,30 @@ func NewBaseLexerAction(action int) *BaseLexerAction {
return la
}
func (this *BaseLexerAction) execute(lexer Lexer) {
func (b *BaseLexerAction) execute(lexer Lexer) {
panic("Not implemented")
}
func (this *BaseLexerAction) getActionType() int {
return this.actionType
func (b *BaseLexerAction) getActionType() int {
return b.actionType
}
func (this *BaseLexerAction) getIsPositionDependent() bool {
return this.isPositionDependent
func (b *BaseLexerAction) getIsPositionDependent() bool {
return b.isPositionDependent
}
func (this *BaseLexerAction) Hash() string {
return strconv.Itoa(this.actionType)
func (b *BaseLexerAction) Hash() string {
return strconv.Itoa(b.actionType)
}
func (this *BaseLexerAction) equals(other LexerAction) bool {
return this == other
func (b *BaseLexerAction) equals(other LexerAction) bool {
return b == other
}
//
// Implements the {@code Skip} lexer action by calling {@link Lexer//Skip}.
//
// <p>The {@code Skip} command does not have any parameters, so this action is
// <p>The {@code Skip} command does not have any parameters, so l action is
// implemented as a singleton instance exposed by {@link //INSTANCE}.</p>
type LexerSkipAction struct {
*BaseLexerAction
@ -70,14 +70,14 @@ func NewLexerSkipAction() *LexerSkipAction {
return la
}
// Provides a singleton instance of this parameterless lexer action.
// Provides a singleton instance of l parameterless lexer action.
var LexerSkipActionINSTANCE = NewLexerSkipAction()
func (this *LexerSkipAction) execute(lexer Lexer) {
func (l *LexerSkipAction) execute(lexer Lexer) {
lexer.Skip()
}
func (this *LexerSkipAction) String() string {
func (l *LexerSkipAction) String() string {
return "skip"
}
@ -90,32 +90,32 @@ type LexerTypeAction struct {
}
func NewLexerTypeAction(_type int) *LexerTypeAction {
this := new(LexerTypeAction)
this.BaseLexerAction = NewBaseLexerAction(LexerActionTypeType)
this._type = _type
return this
l := new(LexerTypeAction)
l.BaseLexerAction = NewBaseLexerAction(LexerActionTypeType)
l._type = _type
return l
}
func (this *LexerTypeAction) execute(lexer Lexer) {
lexer.setType(this._type)
func (l *LexerTypeAction) execute(lexer Lexer) {
lexer.setType(l._type)
}
func (this *LexerTypeAction) Hash() string {
return strconv.Itoa(this.actionType) + strconv.Itoa(this._type)
func (l *LexerTypeAction) Hash() string {
return strconv.Itoa(l.actionType) + strconv.Itoa(l._type)
}
func (this *LexerTypeAction) equals(other LexerAction) bool {
if this == other {
func (l *LexerTypeAction) equals(other LexerAction) bool {
if l == other {
return true
} else if _, ok := other.(*LexerTypeAction); !ok {
return false
} else {
return this._type == other.(*LexerTypeAction)._type
return l._type == other.(*LexerTypeAction)._type
}
}
func (this *LexerTypeAction) String() string {
return "actionType(" + strconv.Itoa(this._type) + ")"
func (l *LexerTypeAction) String() string {
return "actionType(" + strconv.Itoa(l._type) + ")"
}
// Implements the {@code pushMode} lexer action by calling
@ -128,40 +128,40 @@ type LexerPushModeAction struct {
func NewLexerPushModeAction(mode int) *LexerPushModeAction {
this := new(LexerPushModeAction)
this.BaseLexerAction = NewBaseLexerAction(LexerActionTypePushMode)
l := new(LexerPushModeAction)
l.BaseLexerAction = NewBaseLexerAction(LexerActionTypePushMode)
this.mode = mode
return this
l.mode = mode
return l
}
// <p>This action is implemented by calling {@link Lexer//pushMode} with the
// value provided by {@link //getMode}.</p>
func (this *LexerPushModeAction) execute(lexer Lexer) {
lexer.pushMode(this.mode)
func (l *LexerPushModeAction) execute(lexer Lexer) {
lexer.pushMode(l.mode)
}
func (this *LexerPushModeAction) Hash() string {
return strconv.Itoa(this.actionType) + strconv.Itoa(this.mode)
func (l *LexerPushModeAction) Hash() string {
return strconv.Itoa(l.actionType) + strconv.Itoa(l.mode)
}
func (this *LexerPushModeAction) equals(other LexerAction) bool {
if this == other {
func (l *LexerPushModeAction) equals(other LexerAction) bool {
if l == other {
return true
} else if _, ok := other.(*LexerPushModeAction); !ok {
return false
} else {
return this.mode == other.(*LexerPushModeAction).mode
return l.mode == other.(*LexerPushModeAction).mode
}
}
func (this *LexerPushModeAction) String() string {
return "pushMode(" + strconv.Itoa(this.mode) + ")"
func (l *LexerPushModeAction) String() string {
return "pushMode(" + strconv.Itoa(l.mode) + ")"
}
// Implements the {@code popMode} lexer action by calling {@link Lexer//popMode}.
//
// <p>The {@code popMode} command does not have any parameters, so this action is
// <p>The {@code popMode} command does not have any parameters, so l action is
// implemented as a singleton instance exposed by {@link //INSTANCE}.</p>
type LexerPopModeAction struct {
*BaseLexerAction
@ -169,27 +169,27 @@ type LexerPopModeAction struct {
func NewLexerPopModeAction() *LexerPopModeAction {
this := new(LexerPopModeAction)
l := new(LexerPopModeAction)
this.BaseLexerAction = NewBaseLexerAction(LexerActionTypePopMode)
l.BaseLexerAction = NewBaseLexerAction(LexerActionTypePopMode)
return this
return l
}
var LexerPopModeActionINSTANCE = NewLexerPopModeAction()
// <p>This action is implemented by calling {@link Lexer//popMode}.</p>
func (this *LexerPopModeAction) execute(lexer Lexer) {
func (l *LexerPopModeAction) execute(lexer Lexer) {
lexer.popMode()
}
func (this *LexerPopModeAction) String() string {
func (l *LexerPopModeAction) String() string {
return "popMode"
}
// Implements the {@code more} lexer action by calling {@link Lexer//more}.
//
// <p>The {@code more} command does not have any parameters, so this action is
// <p>The {@code more} command does not have any parameters, so l action is
// implemented as a singleton instance exposed by {@link //INSTANCE}.</p>
type LexerMoreAction struct {
@ -197,20 +197,20 @@ type LexerMoreAction struct {
}
func NewLexerMoreAction() *LexerModeAction {
this := new(LexerModeAction)
this.BaseLexerAction = NewBaseLexerAction(LexerActionTypeMore)
l := new(LexerModeAction)
l.BaseLexerAction = NewBaseLexerAction(LexerActionTypeMore)
return this
return l
}
var LexerMoreActionINSTANCE = NewLexerMoreAction()
// <p>This action is implemented by calling {@link Lexer//popMode}.</p>
func (this *LexerMoreAction) execute(lexer Lexer) {
func (l *LexerMoreAction) execute(lexer Lexer) {
lexer.More()
}
func (this *LexerMoreAction) String() string {
func (l *LexerMoreAction) String() string {
return "more"
}
@ -223,34 +223,34 @@ type LexerModeAction struct {
}
func NewLexerModeAction(mode int) *LexerModeAction {
this := new(LexerModeAction)
this.BaseLexerAction = NewBaseLexerAction(LexerActionTypeMode)
this.mode = mode
return this
l := new(LexerModeAction)
l.BaseLexerAction = NewBaseLexerAction(LexerActionTypeMode)
l.mode = mode
return l
}
// <p>This action is implemented by calling {@link Lexer//mode} with the
// value provided by {@link //getMode}.</p>
func (this *LexerModeAction) execute(lexer Lexer) {
lexer.mode(this.mode)
func (l *LexerModeAction) execute(lexer Lexer) {
lexer.mode(l.mode)
}
func (this *LexerModeAction) Hash() string {
return strconv.Itoa(this.actionType) + strconv.Itoa(this.mode)
func (l *LexerModeAction) Hash() string {
return strconv.Itoa(l.actionType) + strconv.Itoa(l.mode)
}
func (this *LexerModeAction) equals(other LexerAction) bool {
if this == other {
func (l *LexerModeAction) equals(other LexerAction) bool {
if l == other {
return true
} else if _, ok := other.(*LexerModeAction); !ok {
return false
} else {
return this.mode == other.(*LexerModeAction).mode
return l.mode == other.(*LexerModeAction).mode
}
}
func (this *LexerModeAction) String() string {
return "mode(" + strconv.Itoa(this.mode) + ")"
func (l *LexerModeAction) String() string {
return "mode(" + strconv.Itoa(l.mode) + ")"
}
// Executes a custom lexer action by calling {@link Recognizer//action} with the
@ -276,31 +276,31 @@ type LexerCustomAction struct {
}
func NewLexerCustomAction(ruleIndex, actionIndex int) *LexerCustomAction {
this := new(LexerCustomAction)
this.BaseLexerAction = NewBaseLexerAction(LexerActionTypeCustom)
this.ruleIndex = ruleIndex
this.actionIndex = actionIndex
this.isPositionDependent = true
return this
l := new(LexerCustomAction)
l.BaseLexerAction = NewBaseLexerAction(LexerActionTypeCustom)
l.ruleIndex = ruleIndex
l.actionIndex = actionIndex
l.isPositionDependent = true
return l
}
// <p>Custom actions are implemented by calling {@link Lexer//action} with the
// appropriate rule and action indexes.</p>
func (this *LexerCustomAction) execute(lexer Lexer) {
lexer.Action(nil, this.ruleIndex, this.actionIndex)
func (l *LexerCustomAction) execute(lexer Lexer) {
lexer.Action(nil, l.ruleIndex, l.actionIndex)
}
func (this *LexerCustomAction) Hash() string {
return strconv.Itoa(this.actionType) + strconv.Itoa(this.ruleIndex) + strconv.Itoa(this.actionIndex)
func (l *LexerCustomAction) Hash() string {
return strconv.Itoa(l.actionType) + strconv.Itoa(l.ruleIndex) + strconv.Itoa(l.actionIndex)
}
func (this *LexerCustomAction) equals(other LexerAction) bool {
if this == other {
func (l *LexerCustomAction) equals(other LexerAction) bool {
if l == other {
return true
} else if _, ok := other.(*LexerCustomAction); !ok {
return false
} else {
return this.ruleIndex == other.(*LexerCustomAction).ruleIndex && this.actionIndex == other.(*LexerCustomAction).actionIndex
return l.ruleIndex == other.(*LexerCustomAction).ruleIndex && l.actionIndex == other.(*LexerCustomAction).actionIndex
}
}
@ -315,34 +315,34 @@ type LexerChannelAction struct {
}
func NewLexerChannelAction(channel int) *LexerChannelAction {
this := new(LexerChannelAction)
this.BaseLexerAction = NewBaseLexerAction(LexerActionTypeChannel)
this.channel = channel
return this
l := new(LexerChannelAction)
l.BaseLexerAction = NewBaseLexerAction(LexerActionTypeChannel)
l.channel = channel
return l
}
// <p>This action is implemented by calling {@link Lexer//setChannel} with the
// value provided by {@link //getChannel}.</p>
func (this *LexerChannelAction) execute(lexer Lexer) {
lexer.setChannel(this.channel)
func (l *LexerChannelAction) execute(lexer Lexer) {
lexer.setChannel(l.channel)
}
func (this *LexerChannelAction) Hash() string {
return strconv.Itoa(this.actionType) + strconv.Itoa(this.channel)
func (l *LexerChannelAction) Hash() string {
return strconv.Itoa(l.actionType) + strconv.Itoa(l.channel)
}
func (this *LexerChannelAction) equals(other LexerAction) bool {
if this == other {
func (l *LexerChannelAction) equals(other LexerAction) bool {
if l == other {
return true
} else if _, ok := other.(*LexerChannelAction); !ok {
return false
} else {
return this.channel == other.(*LexerChannelAction).channel
return l.channel == other.(*LexerChannelAction).channel
}
}
func (this *LexerChannelAction) String() string {
return "channel(" + strconv.Itoa(this.channel) + ")"
func (l *LexerChannelAction) String() string {
return "channel(" + strconv.Itoa(l.channel) + ")"
}
// This implementation of {@link LexerAction} is used for tracking input offsets
@ -375,33 +375,33 @@ type LexerIndexedCustomAction struct {
func NewLexerIndexedCustomAction(offset int, lexerAction LexerAction) *LexerIndexedCustomAction {
this := new(LexerIndexedCustomAction)
this.BaseLexerAction = NewBaseLexerAction(lexerAction.getActionType())
l := new(LexerIndexedCustomAction)
l.BaseLexerAction = NewBaseLexerAction(lexerAction.getActionType())
this.offset = offset
this.lexerAction = lexerAction
this.isPositionDependent = true
l.offset = offset
l.lexerAction = lexerAction
l.isPositionDependent = true
return this
return l
}
// <p>This method calls {@link //execute} on the result of {@link //getAction}
// using the provided {@code lexer}.</p>
func (this *LexerIndexedCustomAction) execute(lexer Lexer) {
func (l *LexerIndexedCustomAction) execute(lexer Lexer) {
// assume the input stream position was properly set by the calling code
this.lexerAction.execute(lexer)
l.lexerAction.execute(lexer)
}
func (this *LexerIndexedCustomAction) Hash() string {
return strconv.Itoa(this.actionType) + strconv.Itoa(this.offset) + this.lexerAction.Hash()
func (l *LexerIndexedCustomAction) Hash() string {
return strconv.Itoa(l.actionType) + strconv.Itoa(l.offset) + l.lexerAction.Hash()
}
func (this *LexerIndexedCustomAction) equals(other LexerAction) bool {
if this == other {
func (l *LexerIndexedCustomAction) equals(other LexerAction) bool {
if l == other {
return true
} else if _, ok := other.(*LexerIndexedCustomAction); !ok {
return false
} else {
return this.offset == other.(*LexerIndexedCustomAction).offset && this.lexerAction == other.(*LexerIndexedCustomAction).lexerAction
return l.offset == other.(*LexerIndexedCustomAction).offset && l.lexerAction == other.(*LexerIndexedCustomAction).lexerAction
}
}

View File

@ -18,9 +18,9 @@ func NewLexerActionExecutor(lexerActions []LexerAction) *LexerActionExecutor {
lexerActions = make([]LexerAction, 0)
}
this := new(LexerActionExecutor)
l := new(LexerActionExecutor)
this.lexerActions = lexerActions
l.lexerActions = lexerActions
// Caches the result of {@link //hashCode} since the hash code is an element
// of the performance-critical {@link LexerATNConfig//hashCode} operation.
@ -30,9 +30,9 @@ func NewLexerActionExecutor(lexerActions []LexerAction) *LexerActionExecutor {
s += a.Hash()
}
this.cachedHashString = s // "".join([str(la) for la in
l.cachedHashString = s // "".join([str(la) for la in
return this
return l
}
// Creates a {@link LexerActionExecutor} which executes the actions for
@ -87,30 +87,30 @@ func LexerActionExecutorappend(lexerActionExecutor *LexerActionExecutor, lexerAc
// @return A {@link LexerActionExecutor} which stores input stream offsets
// for all position-dependent lexer actions.
// /
func (this *LexerActionExecutor) fixOffsetBeforeMatch(offset int) *LexerActionExecutor {
func (l *LexerActionExecutor) fixOffsetBeforeMatch(offset int) *LexerActionExecutor {
var updatedLexerActions []LexerAction = nil
for i := 0; i < len(this.lexerActions); i++ {
_, ok := this.lexerActions[i].(*LexerIndexedCustomAction)
if this.lexerActions[i].getIsPositionDependent() && !ok {
for i := 0; i < len(l.lexerActions); i++ {
_, ok := l.lexerActions[i].(*LexerIndexedCustomAction)
if l.lexerActions[i].getIsPositionDependent() && !ok {
if updatedLexerActions == nil {
updatedLexerActions = make([]LexerAction, 0)
for _, a := range this.lexerActions {
for _, a := range l.lexerActions {
updatedLexerActions = append(updatedLexerActions, a)
}
}
updatedLexerActions[i] = NewLexerIndexedCustomAction(offset, this.lexerActions[i])
updatedLexerActions[i] = NewLexerIndexedCustomAction(offset, l.lexerActions[i])
}
}
if updatedLexerActions == nil {
return this
return l
} else {
return NewLexerActionExecutor(updatedLexerActions)
}
}
// Execute the actions encapsulated by this executor within the context of a
// Execute the actions encapsulated by l executor within the context of a
// particular {@link Lexer}.
//
// <p>This method calls {@link IntStream//seek} to set the position of the
@ -121,14 +121,14 @@ func (this *LexerActionExecutor) fixOffsetBeforeMatch(offset int) *LexerActionEx
//
// @param lexer The lexer instance.
// @param input The input stream which is the source for the current token.
// When this method is called, the current {@link IntStream//index} for
// When l method is called, the current {@link IntStream//index} for
// {@code input} should be the start of the following token, i.e. 1
// character past the end of the current token.
// @param startIndex The token start index. This value may be passed to
// {@link IntStream//seek} to set the {@code input} position to the beginning
// of the token.
// /
func (this *LexerActionExecutor) execute(lexer Lexer, input CharStream, startIndex int) {
func (l *LexerActionExecutor) execute(lexer Lexer, input CharStream, startIndex int) {
var requiresSeek = false
var stopIndex = input.Index()
@ -138,8 +138,8 @@ func (this *LexerActionExecutor) execute(lexer Lexer, input CharStream, startInd
}
}()
for i := 0; i < len(this.lexerActions); i++ {
var lexerAction LexerAction = this.lexerActions[i]
for i := 0; i < len(l.lexerActions); i++ {
var lexerAction LexerAction = l.lexerActions[i]
if la, ok := lexerAction.(*LexerIndexedCustomAction); ok {
var offset = la.offset
input.Seek(startIndex + offset)
@ -153,17 +153,17 @@ func (this *LexerActionExecutor) execute(lexer Lexer, input CharStream, startInd
}
}
func (this *LexerActionExecutor) Hash() string {
return this.cachedHashString
func (l *LexerActionExecutor) Hash() string {
return l.cachedHashString
}
func (this *LexerActionExecutor) equals(other interface{}) bool {
if this == other {
func (l *LexerActionExecutor) equals(other interface{}) bool {
if l == other {
return true
} else if _, ok := other.(*LexerActionExecutor); !ok {
return false
} else {
return this.cachedHashString == other.(*LexerActionExecutor).cachedHashString &&
&this.lexerActions == &other.(*LexerActionExecutor).lexerActions
return l.cachedHashString == other.(*LexerActionExecutor).cachedHashString &&
&l.lexerActions == &other.(*LexerActionExecutor).lexerActions
}
}

View File

@ -37,14 +37,14 @@ type SimState struct {
func NewSimState() *SimState {
this := new(SimState)
resetSimState(this)
return this
s := new(SimState)
resetSimState(s)
return s
}
func (this *SimState) reset() {
resetSimState(this)
func (s *SimState) reset() {
resetSimState(s)
}
type LexerATNSimulator struct {
@ -64,28 +64,28 @@ type LexerATNSimulator struct {
func NewLexerATNSimulator(recog Lexer, atn *ATN, decisionToDFA []*DFA, sharedContextCache *PredictionContextCache) *LexerATNSimulator {
this := new(LexerATNSimulator)
l := new(LexerATNSimulator)
this.BaseATNSimulator = NewBaseATNSimulator(atn, sharedContextCache)
l.BaseATNSimulator = NewBaseATNSimulator(atn, sharedContextCache)
this.DecisionToDFA = decisionToDFA
this.recog = recog
l.DecisionToDFA = decisionToDFA
l.recog = recog
// The current token's starting index into the character stream.
// Shared across DFA to ATN simulation in case the ATN fails and the
// DFA did not have a previous accept state. In this case, we use the
// DFA did not have a previous accept state. In l case, we use the
// ATN-generated exception object.
this.startIndex = -1
l.startIndex = -1
// line number 1..n within the input///
this.line = 1
l.line = 1
// The index of the character relative to the beginning of the line
// 0..n-1///
this.column = 0
this.mode = LexerDefaultMode
l.column = 0
l.mode = LexerDefaultMode
// Used during DFA/ATN exec to record the most recent accept configuration
// info
this.prevAccept = NewSimState()
l.prevAccept = NewSimState()
// done
return this
return l
}
var LexerATNSimulatorDebug = false
@ -96,21 +96,21 @@ var LexerATNSimulatorMAX_DFA_EDGE = 127 // forces unicode to stay in ATN
var LexerATNSimulatorMatch_calls = 0
func (this *LexerATNSimulator) copyState(simulator *LexerATNSimulator) {
this.column = simulator.column
this.line = simulator.line
this.mode = simulator.mode
this.startIndex = simulator.startIndex
func (l *LexerATNSimulator) copyState(simulator *LexerATNSimulator) {
l.column = simulator.column
l.line = simulator.line
l.mode = simulator.mode
l.startIndex = simulator.startIndex
}
func (this *LexerATNSimulator) Match(input CharStream, mode int) int {
func (l *LexerATNSimulator) Match(input CharStream, mode int) int {
if PortDebug {
fmt.Println("Match")
}
this.Match_calls += 1
this.mode = mode
l.Match_calls += 1
l.mode = mode
var mark = input.Mark()
defer func() {
@ -120,65 +120,65 @@ func (this *LexerATNSimulator) Match(input CharStream, mode int) int {
input.Release(mark)
}()
this.startIndex = input.Index()
this.prevAccept.reset()
l.startIndex = input.Index()
l.prevAccept.reset()
var dfa = this.DecisionToDFA[mode]
var dfa = l.DecisionToDFA[mode]
if dfa.s0 == nil {
if PortDebug {
fmt.Println("MatchATN")
}
return this.MatchATN(input)
return l.MatchATN(input)
} else {
if PortDebug {
fmt.Println("execATN")
}
return this.execATN(input, dfa.s0)
return l.execATN(input, dfa.s0)
}
}
func (this *LexerATNSimulator) reset() {
this.prevAccept.reset()
this.startIndex = -1
this.line = 1
this.column = 0
this.mode = LexerDefaultMode
func (l *LexerATNSimulator) reset() {
l.prevAccept.reset()
l.startIndex = -1
l.line = 1
l.column = 0
l.mode = LexerDefaultMode
}
func (this *LexerATNSimulator) MatchATN(input CharStream) int {
var startState = this.atn.modeToStartState[this.mode]
func (l *LexerATNSimulator) MatchATN(input CharStream) int {
var startState = l.atn.modeToStartState[l.mode]
if LexerATNSimulatorDebug {
fmt.Println("MatchATN mode " + strconv.Itoa(this.mode) + " start: " + startState.String())
fmt.Println("MatchATN mode " + strconv.Itoa(l.mode) + " start: " + startState.String())
}
var old_mode = this.mode
var s0_closure = this.computeStartState(input, startState)
var old_mode = l.mode
var s0_closure = l.computeStartState(input, startState)
var suppressEdge = s0_closure.hasSemanticContext
s0_closure.hasSemanticContext = false
var next = this.addDFAState(s0_closure)
var next = l.addDFAState(s0_closure)
if !suppressEdge {
this.DecisionToDFA[this.mode].s0 = next
l.DecisionToDFA[l.mode].s0 = next
}
var predict = this.execATN(input, next)
var predict = l.execATN(input, next)
if LexerATNSimulatorDebug {
fmt.Println("DFA after MatchATN: " + this.DecisionToDFA[old_mode].ToLexerString())
fmt.Println("DFA after MatchATN: " + l.DecisionToDFA[old_mode].ToLexerString())
}
return predict
}
func (this *LexerATNSimulator) execATN(input CharStream, ds0 *DFAState) int {
func (l *LexerATNSimulator) execATN(input CharStream, ds0 *DFAState) int {
if LexerATNSimulatorDebug {
fmt.Println("start state closure=" + ds0.configs.String())
}
if ds0.isAcceptState {
// allow zero-length tokens
this.captureSimState(this.prevAccept, input, ds0)
l.captureSimState(l.prevAccept, input, ds0)
}
var t = input.LA(1)
var s = ds0 // s is current/from DFA state
@ -206,26 +206,26 @@ func (this *LexerATNSimulator) execATN(input CharStream, ds0 *DFAState) int {
// A character will take us back to an existing DFA state
// that already has lots of edges out of it. e.g., .* in comments.
// print("Target for:" + str(s) + " and:" + str(t))
var target = this.getExistingTargetState(s, t)
var target = l.getExistingTargetState(s, t)
// if PortDebug {
// fmt.Println(target)
// }
if target == nil {
target = this.computeTargetState(input, s, t)
target = l.computeTargetState(input, s, t)
// print("Computed:" + str(target))
}
if target == ATNSimulatorError {
break
}
// If this is a consumable input element, make sure to consume before
// If l is a consumable input element, make sure to consume before
// capturing the accept state so the input index, line, and char
// position accurately reflect the state of the interpreter at the
// end of the token.
if t != TokenEOF {
this.consume(input)
l.consume(input)
}
if target.isAcceptState {
this.captureSimState(this.prevAccept, input, target)
l.captureSimState(l.prevAccept, input, target)
if t == TokenEOF {
break
}
@ -237,19 +237,19 @@ func (this *LexerATNSimulator) execATN(input CharStream, ds0 *DFAState) int {
if PortDebug {
fmt.Println("DONE WITH execATN loop")
}
return this.failOrAccept(this.prevAccept, input, s.configs, t)
return l.failOrAccept(l.prevAccept, input, s.configs, t)
}
// Get an existing target state for an edge in the DFA. If the target state
// for the edge has not yet been computed or is otherwise not available,
// this method returns {@code nil}.
// l method returns {@code nil}.
//
// @param s The current DFA state
// @param t The next input symbol
// @return The existing target DFA state for the given input symbol
// {@code t}, or {@code nil} if the target state for this edge is not
// {@code t}, or {@code nil} if the target state for l edge is not
// already cached
func (this *LexerATNSimulator) getExistingTargetState(s *DFAState, t int) *DFAState {
func (l *LexerATNSimulator) getExistingTargetState(s *DFAState, t int) *DFAState {
if s.edges == nil || t < LexerATNSimulatorMIN_DFA_EDGE || t > LexerATNSimulatorMAX_DFA_EDGE {
return nil
}
@ -272,32 +272,32 @@ func (this *LexerATNSimulator) getExistingTargetState(s *DFAState, t int) *DFASt
// @param t The next input symbol
//
// @return The computed target DFA state for the given input symbol
// {@code t}. If {@code t} does not lead to a valid DFA state, this method
// {@code t}. If {@code t} does not lead to a valid DFA state, l method
// returns {@link //ERROR}.
func (this *LexerATNSimulator) computeTargetState(input CharStream, s *DFAState, t int) *DFAState {
func (l *LexerATNSimulator) computeTargetState(input CharStream, s *DFAState, t int) *DFAState {
var reach = NewOrderedATNConfigSet()
// if we don't find an existing DFA state
// Fill reach starting from closure, following t transitions
this.getReachableConfigSet(input, s.configs, reach.BaseATNConfigSet, t)
l.getReachableConfigSet(input, s.configs, reach.BaseATNConfigSet, t)
if len(reach.configs) == 0 { // we got nowhere on t from s
if !reach.hasSemanticContext {
// we got nowhere on t, don't panic out this knowledge it'd
// we got nowhere on t, don't panic out l knowledge it'd
// cause a failover from DFA later.
this.addDFAEdge(s, t, ATNSimulatorError, nil)
l.addDFAEdge(s, t, ATNSimulatorError, nil)
}
// stop when we can't Match any more char
return ATNSimulatorError
}
// Add an edge from s to target DFA found/created for reach
return this.addDFAEdge(s, t, nil, reach.BaseATNConfigSet)
return l.addDFAEdge(s, t, nil, reach.BaseATNConfigSet)
}
func (this *LexerATNSimulator) failOrAccept(prevAccept *SimState, input CharStream, reach ATNConfigSet, t int) int {
if this.prevAccept.dfaState != nil {
func (l *LexerATNSimulator) failOrAccept(prevAccept *SimState, input CharStream, reach ATNConfigSet, t int) int {
if l.prevAccept.dfaState != nil {
var lexerActionExecutor = prevAccept.dfaState.lexerActionExecutor
this.accept(input, lexerActionExecutor, this.startIndex, prevAccept.index, prevAccept.line, prevAccept.column)
l.accept(input, lexerActionExecutor, l.startIndex, prevAccept.index, prevAccept.line, prevAccept.column)
if PortDebug {
fmt.Println(prevAccept.dfaState.prediction)
@ -305,18 +305,18 @@ func (this *LexerATNSimulator) failOrAccept(prevAccept *SimState, input CharStre
return prevAccept.dfaState.prediction
} else {
// if no accept and EOF is first char, return EOF
if t == TokenEOF && input.Index() == this.startIndex {
if t == TokenEOF && input.Index() == l.startIndex {
return TokenEOF
}
panic(NewLexerNoViableAltException(this.recog, input, this.startIndex, reach))
panic(NewLexerNoViableAltException(l.recog, input, l.startIndex, reach))
}
}
// Given a starting configuration set, figure out all ATN configurations
// we can reach upon input {@code t}. Parameter {@code reach} is a return
// parameter.
func (this *LexerATNSimulator) getReachableConfigSet(input CharStream, closure ATNConfigSet, reach ATNConfigSet, t int) {
// this is used to Skip processing for configs which have a lower priority
func (l *LexerATNSimulator) getReachableConfigSet(input CharStream, closure ATNConfigSet, reach ATNConfigSet, t int) {
// l is used to Skip processing for configs which have a lower priority
// than a config that already reached an accept state for the same rule
var SkipAlt = ATNInvalidAltNumber
@ -332,21 +332,21 @@ func (this *LexerATNSimulator) getReachableConfigSet(input CharStream, closure A
if LexerATNSimulatorDebug {
fmt.Printf("testing %s at %s\n", this.GetTokenName(t), cfg.String()) // this.recog, true))
fmt.Printf("testing %s at %s\n", l.GetTokenName(t), cfg.String()) // l.recog, true))
}
for _, trans := range cfg.GetState().GetTransitions() {
var target = this.getReachableTarget(trans, t)
var target = l.getReachableTarget(trans, t)
if target != nil {
var lexerActionExecutor = cfg.(*LexerATNConfig).lexerActionExecutor
if lexerActionExecutor != nil {
lexerActionExecutor = lexerActionExecutor.fixOffsetBeforeMatch(input.Index() - this.startIndex)
lexerActionExecutor = lexerActionExecutor.fixOffsetBeforeMatch(input.Index() - l.startIndex)
}
var treatEofAsEpsilon = (t == TokenEOF)
var config = NewLexerATNConfig3(cfg.(*LexerATNConfig), target, lexerActionExecutor)
if this.closure(input, config, reach,
if l.closure(input, config, reach,
currentAltReachedAcceptState, true, treatEofAsEpsilon) {
// any remaining configs for this alt have a lower priority
// any remaining configs for l alt have a lower priority
// than the one that just reached an accept state.
SkipAlt = cfg.GetAlt()
}
@ -355,20 +355,20 @@ func (this *LexerATNSimulator) getReachableConfigSet(input CharStream, closure A
}
}
func (this *LexerATNSimulator) accept(input CharStream, lexerActionExecutor *LexerActionExecutor, startIndex, index, line, charPos int) {
func (l *LexerATNSimulator) accept(input CharStream, lexerActionExecutor *LexerActionExecutor, startIndex, index, line, charPos int) {
if LexerATNSimulatorDebug {
fmt.Printf("ACTION %s\n", lexerActionExecutor)
}
// seek to after last char in token
input.Seek(index)
this.line = line
this.column = charPos
if lexerActionExecutor != nil && this.recog != nil {
lexerActionExecutor.execute(this.recog, input, startIndex)
l.line = line
l.column = charPos
if lexerActionExecutor != nil && l.recog != nil {
lexerActionExecutor.execute(l.recog, input, startIndex)
}
}
func (this *LexerATNSimulator) getReachableTarget(trans Transition, t int) ATNState {
func (l *LexerATNSimulator) getReachableTarget(trans Transition, t int) ATNState {
if trans.Matches(t, 0, 0xFFFE) {
return trans.getTarget()
} else {
@ -376,7 +376,7 @@ func (this *LexerATNSimulator) getReachableTarget(trans Transition, t int) ATNSt
}
}
func (this *LexerATNSimulator) computeStartState(input CharStream, p ATNState) *OrderedATNConfigSet {
func (l *LexerATNSimulator) computeStartState(input CharStream, p ATNState) *OrderedATNConfigSet {
if PortDebug {
fmt.Println("Num transitions" + strconv.Itoa(len(p.GetTransitions())))
@ -386,33 +386,33 @@ func (this *LexerATNSimulator) computeStartState(input CharStream, p ATNState) *
for i := 0; i < len(p.GetTransitions()); i++ {
var target = p.GetTransitions()[i].getTarget()
var cfg = NewLexerATNConfig6(target, i+1, BasePredictionContextEMPTY)
this.closure(input, cfg, configs, false, false, false)
l.closure(input, cfg, configs, false, false, false)
}
return configs
}
// Since the alternatives within any lexer decision are ordered by
// preference, this method stops pursuing the closure as soon as an accept
// preference, l method stops pursuing the closure as soon as an accept
// state is reached. After the first accept state is reached by depth-first
// search from {@code config}, all other (potentially reachable) states for
// this rule would have a lower priority.
// l rule would have a lower priority.
//
// @return {@code true} if an accept state is reached, otherwise
// {@code false}.
func (this *LexerATNSimulator) closure(input CharStream, config *LexerATNConfig, configs ATNConfigSet,
func (l *LexerATNSimulator) closure(input CharStream, config *LexerATNConfig, configs ATNConfigSet,
currentAltReachedAcceptState, speculative, treatEofAsEpsilon bool) bool {
if LexerATNSimulatorDebug {
fmt.Println("closure(" + config.String() + ")") // config.String(this.recog, true) + ")")
fmt.Println("closure(" + config.String() + ")") // config.String(l.recog, true) + ")")
}
_, ok := config.state.(*RuleStopState)
if ok {
if LexerATNSimulatorDebug {
if this.recog != nil {
fmt.Printf("closure at %s rule stop %s\n", this.recog.GetRuleNames()[config.state.GetRuleIndex()], config)
if l.recog != nil {
fmt.Printf("closure at %s rule stop %s\n", l.recog.GetRuleNames()[config.state.GetRuleIndex()], config)
} else {
fmt.Printf("closure at rule stop %s\n", config)
}
@ -431,9 +431,9 @@ func (this *LexerATNSimulator) closure(input CharStream, config *LexerATNConfig,
for i := 0; i < config.context.length(); i++ {
if config.context.getReturnState(i) != BasePredictionContextEMPTY_RETURN_STATE {
var newContext = config.context.GetParent(i) // "pop" return state
var returnState = this.atn.states[config.context.getReturnState(i)]
var returnState = l.atn.states[config.context.getReturnState(i)]
cfg := NewLexerATNConfig2(config, returnState, newContext)
currentAltReachedAcceptState = this.closure(input, cfg, configs, currentAltReachedAcceptState, speculative, treatEofAsEpsilon)
currentAltReachedAcceptState = l.closure(input, cfg, configs, currentAltReachedAcceptState, speculative, treatEofAsEpsilon)
}
}
}
@ -447,9 +447,9 @@ func (this *LexerATNSimulator) closure(input CharStream, config *LexerATNConfig,
}
for j := 0; j < len(config.state.GetTransitions()); j++ {
var trans = config.state.GetTransitions()[j]
cfg := this.getEpsilonTarget(input, config, trans, configs, speculative, treatEofAsEpsilon)
cfg := l.getEpsilonTarget(input, config, trans, configs, speculative, treatEofAsEpsilon)
if cfg != nil {
currentAltReachedAcceptState = this.closure(input, cfg, configs,
currentAltReachedAcceptState = l.closure(input, cfg, configs,
currentAltReachedAcceptState, speculative, treatEofAsEpsilon)
}
}
@ -457,7 +457,7 @@ func (this *LexerATNSimulator) closure(input CharStream, config *LexerATNConfig,
}
// side-effect: can alter configs.hasSemanticContext
func (this *LexerATNSimulator) getEpsilonTarget(input CharStream, config *LexerATNConfig, trans Transition,
func (l *LexerATNSimulator) getEpsilonTarget(input CharStream, config *LexerATNConfig, trans Transition,
configs ATNConfigSet, speculative, treatEofAsEpsilon bool) *LexerATNConfig {
var cfg *LexerATNConfig
@ -472,13 +472,13 @@ func (this *LexerATNSimulator) getEpsilonTarget(input CharStream, config *LexerA
panic("Precedence predicates are not supported in lexers.")
} else if trans.getSerializationType() == TransitionPREDICATE {
// Track traversing semantic predicates. If we traverse,
// we cannot add a DFA state for this "reach" computation
// we cannot add a DFA state for l "reach" computation
// because the DFA would not test the predicate again in the
// future. Rather than creating collections of semantic predicates
// like v3 and testing them on prediction, v4 will test them on the
// fly all the time using the ATN not the DFA. This is slower but
// semantically it's not used that often. One of the key elements to
// this predicate mechanism is not adding DFA states that see
// l predicate mechanism is not adding DFA states that see
// predicates immediately afterwards in the ATN. For example,
// a : ID {p1}? | ID {p2}?
@ -486,7 +486,7 @@ func (this *LexerATNSimulator) getEpsilonTarget(input CharStream, config *LexerA
// should create the start state for rule 'a' (to save start state
// competition), but should not create target of ID state. The
// collection of ATN states the following ID references includes
// states reached by traversing predicates. Since this is when we
// states reached by traversing predicates. Since l is when we
// test them, we cannot cash the DFA state target of ID.
pt := trans.(*PredicateTransition)
@ -495,7 +495,7 @@ func (this *LexerATNSimulator) getEpsilonTarget(input CharStream, config *LexerA
fmt.Println("EVAL rule " + strconv.Itoa(trans.(*PredicateTransition).ruleIndex) + ":" + strconv.Itoa(pt.predIndex))
}
configs.SetHasSemanticContext(true)
if this.evaluatePredicate(input, pt.ruleIndex, pt.predIndex, speculative) {
if l.evaluatePredicate(input, pt.ruleIndex, pt.predIndex, speculative) {
cfg = NewLexerATNConfig4(config, trans.getTarget())
}
} else if trans.getSerializationType() == TransitionACTION {
@ -505,14 +505,14 @@ func (this *LexerATNSimulator) getEpsilonTarget(input CharStream, config *LexerA
// TODO: if the entry rule is invoked recursively, some
// actions may be executed during the recursive call. The
// problem can appear when hasEmptyPath() is true but
// isEmpty() is false. In this case, the config needs to be
// isEmpty() is false. In l case, the config needs to be
// split into two contexts - one with just the empty path
// and another with everything but the empty path.
// Unfortunately, the current algorithm does not allow
// getEpsilonTarget to return two configurations, so
// additional modifications are needed before we can support
// the split operation.
var lexerActionExecutor = LexerActionExecutorappend(config.lexerActionExecutor, this.atn.lexerActions[trans.(*ActionTransition).actionIndex])
var lexerActionExecutor = LexerActionExecutorappend(config.lexerActionExecutor, l.atn.lexerActions[trans.(*ActionTransition).actionIndex])
cfg = NewLexerATNConfig3(config, trans.getTarget(), lexerActionExecutor)
} else {
// ignore actions in referenced rules
@ -534,7 +534,7 @@ func (this *LexerATNSimulator) getEpsilonTarget(input CharStream, config *LexerA
// Evaluate a predicate specified in the lexer.
//
// <p>If {@code speculative} is {@code true}, this method was called before
// <p>If {@code speculative} is {@code true}, l method was called before
// {@link //consume} for the Matched character. This method should call
// {@link //consume} before evaluating the predicate to ensure position
// sensitive values, including {@link Lexer//GetText}, {@link Lexer//GetLine},
@ -552,41 +552,41 @@ func (this *LexerATNSimulator) getEpsilonTarget(input CharStream, config *LexerA
// @return {@code true} if the specified predicate evaluates to
// {@code true}.
// /
func (this *LexerATNSimulator) evaluatePredicate(input CharStream, ruleIndex, predIndex int, speculative bool) bool {
func (l *LexerATNSimulator) evaluatePredicate(input CharStream, ruleIndex, predIndex int, speculative bool) bool {
// assume true if no recognizer was provided
if this.recog == nil {
if l.recog == nil {
return true
}
if !speculative {
return this.recog.Sempred(nil, ruleIndex, predIndex)
return l.recog.Sempred(nil, ruleIndex, predIndex)
}
var savedcolumn = this.column
var savedLine = this.line
var savedcolumn = l.column
var savedLine = l.line
var index = input.Index()
var marker = input.Mark()
defer func() {
this.column = savedcolumn
this.line = savedLine
l.column = savedcolumn
l.line = savedLine
input.Seek(index)
input.Release(marker)
}()
this.consume(input)
return this.recog.Sempred(nil, ruleIndex, predIndex)
l.consume(input)
return l.recog.Sempred(nil, ruleIndex, predIndex)
}
func (this *LexerATNSimulator) captureSimState(settings *SimState, input CharStream, dfaState *DFAState) {
func (l *LexerATNSimulator) captureSimState(settings *SimState, input CharStream, dfaState *DFAState) {
settings.index = input.Index()
settings.line = this.line
settings.column = this.column
settings.line = l.line
settings.column = l.column
settings.dfaState = dfaState
}
func (this *LexerATNSimulator) addDFAEdge(from_ *DFAState, tk int, to *DFAState, cfgs ATNConfigSet) *DFAState {
func (l *LexerATNSimulator) addDFAEdge(from_ *DFAState, tk int, to *DFAState, cfgs ATNConfigSet) *DFAState {
if to == nil && cfgs != nil {
// leading to this call, ATNConfigSet.hasSemanticContext is used as a
// marker indicating dynamic predicate evaluation makes this edge
// leading to l call, ATNConfigSet.hasSemanticContext is used as a
// marker indicating dynamic predicate evaluation makes l edge
// dependent on the specific input sequence, so the static edge in the
// DFA should be omitted. The target DFAState is still created since
// execATN has the ability to reSynchronize with the DFA state cache
@ -599,7 +599,7 @@ func (this *LexerATNSimulator) addDFAEdge(from_ *DFAState, tk int, to *DFAState,
var suppressEdge = cfgs.HasSemanticContext()
cfgs.SetHasSemanticContext(false)
to = this.addDFAState(cfgs)
to = l.addDFAState(cfgs)
if suppressEdge {
return to
@ -622,11 +622,11 @@ func (this *LexerATNSimulator) addDFAEdge(from_ *DFAState, tk int, to *DFAState,
return to
}
// Add a NewDFA state if there isn't one with this set of
// Add a NewDFA state if there isn't one with l set of
// configurations already. This method also detects the first
// configuration containing an ATN rule stop state. Later, when
// traversing the DFA, we will know which rule to accept.
func (this *LexerATNSimulator) addDFAState(configs ATNConfigSet) *DFAState {
func (l *LexerATNSimulator) addDFAState(configs ATNConfigSet) *DFAState {
var proposed = NewDFAState(-1, configs)
var firstConfigWithRuleStopState ATNConfig = nil
@ -643,10 +643,10 @@ func (this *LexerATNSimulator) addDFAState(configs ATNConfigSet) *DFAState {
if firstConfigWithRuleStopState != nil {
proposed.isAcceptState = true
proposed.lexerActionExecutor = firstConfigWithRuleStopState.(*LexerATNConfig).lexerActionExecutor
proposed.setPrediction(this.atn.ruleToTokenType[firstConfigWithRuleStopState.GetState().GetRuleIndex()])
proposed.setPrediction(l.atn.ruleToTokenType[firstConfigWithRuleStopState.GetState().GetRuleIndex()])
}
var hash = proposed.Hash()
var dfa = this.DecisionToDFA[this.mode]
var dfa = l.DecisionToDFA[l.mode]
var existing = dfa.GetStates()[hash]
if existing != nil {
return existing
@ -659,28 +659,28 @@ func (this *LexerATNSimulator) addDFAState(configs ATNConfigSet) *DFAState {
return newState
}
func (this *LexerATNSimulator) getDFA(mode int) *DFA {
return this.DecisionToDFA[mode]
func (l *LexerATNSimulator) getDFA(mode int) *DFA {
return l.DecisionToDFA[mode]
}
// Get the text Matched so far for the current token.
func (this *LexerATNSimulator) GetText(input CharStream) string {
func (l *LexerATNSimulator) GetText(input CharStream) string {
// index is first lookahead char, don't include.
return input.GetTextFromInterval(NewInterval(this.startIndex, input.Index()-1))
return input.GetTextFromInterval(NewInterval(l.startIndex, input.Index()-1))
}
func (this *LexerATNSimulator) consume(input CharStream) {
func (l *LexerATNSimulator) consume(input CharStream) {
var curChar = input.LA(1)
if curChar == int('\n') {
this.line += 1
this.column = 0
l.line += 1
l.column = 0
} else {
this.column += 1
l.column += 1
}
input.Consume()
}
func (this *LexerATNSimulator) GetTokenName(tt int) string {
func (l *LexerATNSimulator) GetTokenName(tt int) string {
if PortDebug {
fmt.Println(tt)
}

View File

@ -315,12 +315,12 @@ func (p *BaseParser) TriggerExitRuleEvent() {
}
}
func (this *BaseParser) GetInterpreter() *ParserATNSimulator {
return this.Interpreter
func (p *BaseParser) GetInterpreter() *ParserATNSimulator {
return p.Interpreter
}
func (this *BaseParser) GetATN() *ATN {
return this.Interpreter.atn
func (p *BaseParser) GetATN() *ATN {
return p.Interpreter.atn
}
func (p *BaseParser) GetTokenFactory() TokenFactory {
@ -664,9 +664,9 @@ func (p *BaseParser) GetRuleIndex(ruleName string) int {
//
// this very useful for error messages.
func (this *BaseParser) GetRuleInvocationStack(p ParserRuleContext) []string {
func (b *BaseParser) GetRuleInvocationStack(p ParserRuleContext) []string {
if p == nil {
p = this._ctx
p = b._ctx
}
var stack = make([]string, 0)
for p != nil {
@ -675,7 +675,7 @@ func (this *BaseParser) GetRuleInvocationStack(p ParserRuleContext) []string {
if ruleIndex < 0 {
stack = append(stack, "n/a")
} else {
stack = append(stack, this.GetRuleNames()[ruleIndex])
stack = append(stack, b.GetRuleNames()[ruleIndex])
}
vp := p.GetParent()

File diff suppressed because it is too large Load Diff

View File

@ -76,12 +76,12 @@ func (prc *BaseParserRuleContext) CopyFrom(ctx *BaseParserRuleContext) {
prc.stop = ctx.stop
}
func (this *BaseParserRuleContext) GetText() string {
if this.GetChildCount() == 0 {
func (b *BaseParserRuleContext) GetText() string {
if b.GetChildCount() == 0 {
return ""
} else {
var s string
for _, child := range this.children {
for _, child := range b.children {
s += child.(ParseTree).GetText()
}
@ -171,16 +171,16 @@ func (prc *BaseParserRuleContext) GetChildOfType(i int, childType reflect.Type)
}
}
func (this *BaseParserRuleContext) ToStringTree(ruleNames []string, recog Recognizer) string {
return TreesStringTree(this, ruleNames, recog)
func (b *BaseParserRuleContext) ToStringTree(ruleNames []string, recog Recognizer) string {
return TreesStringTree(b, ruleNames, recog)
}
func (prc *BaseParserRuleContext) GetRuleContext() RuleContext {
return prc
}
func (this *BaseParserRuleContext) Accept(visitor ParseTreeVisitor) interface{} {
return visitor.VisitChildren(this)
func (b *BaseParserRuleContext) Accept(visitor ParseTreeVisitor) interface{} {
return visitor.VisitChildren(b)
}
func (prc *BaseParserRuleContext) SetStart(t Token) {
@ -300,12 +300,12 @@ func (prc *BaseParserRuleContext) GetSourceInterval() *Interval {
//need to manage circular dependencies, so export now
// Print out a whole tree, not just a node, in LISP format
// (root child1 .. childN). Print just a node if this is a leaf.
// (root child1 .. childN). Print just a node if b is a leaf.
//
func (this *BaseParserRuleContext) String(ruleNames []string, stop RuleContext) string {
func (b *BaseParserRuleContext) String(ruleNames []string, stop RuleContext) string {
var p ParserRuleContext = this
var p ParserRuleContext = b
var s = "["
for p != nil && p != stop {
if ruleNames == nil {

View File

@ -68,12 +68,12 @@ var BasePredictionContextid = BasePredictionContextglobalNodeCount
// </pre>
//
func (this *BasePredictionContext) isEmpty() bool {
func (b *BasePredictionContext) isEmpty() bool {
return false
}
func (this *BasePredictionContext) Hash() string {
return this.cachedHashString
func (b *BasePredictionContext) Hash() string {
return b.cachedHashString
}
func calculateHashString(parent PredictionContext, returnState int) string {
@ -102,24 +102,24 @@ func NewPredictionContextCache() *PredictionContextCache {
// return that one instead and do not add a Newcontext to the cache.
// Protect shared cache from unsafe thread access.
//
func (this *PredictionContextCache) add(ctx PredictionContext) PredictionContext {
func (p *PredictionContextCache) add(ctx PredictionContext) PredictionContext {
if ctx == BasePredictionContextEMPTY {
return BasePredictionContextEMPTY
}
var existing = this.cache[ctx]
var existing = p.cache[ctx]
if existing != nil {
return existing
}
this.cache[ctx] = ctx
p.cache[ctx] = ctx
return ctx
}
func (this *PredictionContextCache) Get(ctx PredictionContext) PredictionContext {
return this.cache[ctx]
func (p *PredictionContextCache) Get(ctx PredictionContext) PredictionContext {
return p.cache[ctx]
}
func (this *PredictionContextCache) length() int {
return len(this.cache)
func (p *PredictionContextCache) length() int {
return len(p.cache)
}
type SingletonPredictionContext interface {
@ -159,64 +159,64 @@ func SingletonBasePredictionContextCreate(parent PredictionContext, returnState
}
}
func (this *BaseSingletonPredictionContext) length() int {
func (b *BaseSingletonPredictionContext) length() int {
return 1
}
func (this *BaseSingletonPredictionContext) GetParent(index int) PredictionContext {
return this.parentCtx
func (b *BaseSingletonPredictionContext) GetParent(index int) PredictionContext {
return b.parentCtx
}
func (this *BaseSingletonPredictionContext) getReturnState(index int) int {
return this.returnState
func (b *BaseSingletonPredictionContext) getReturnState(index int) int {
return b.returnState
}
func (this *BaseSingletonPredictionContext) hasEmptyPath() bool {
return this.returnState == BasePredictionContextEMPTY_RETURN_STATE
func (b *BaseSingletonPredictionContext) hasEmptyPath() bool {
return b.returnState == BasePredictionContextEMPTY_RETURN_STATE
}
func (this *BaseSingletonPredictionContext) equals(other PredictionContext) bool {
if this == other {
func (b *BaseSingletonPredictionContext) equals(other PredictionContext) bool {
if b == other {
return true
} else if _, ok := other.(*BaseSingletonPredictionContext); !ok {
return false
} else if this.Hash() != other.Hash() {
} else if b.Hash() != other.Hash() {
return false // can't be same if hash is different
} else {
otherP := other.(*BaseSingletonPredictionContext)
if this.returnState != other.getReturnState(0) {
if b.returnState != other.getReturnState(0) {
return false
} else if this.parentCtx == nil {
} else if b.parentCtx == nil {
return otherP.parentCtx == nil
} else {
return this.parentCtx.equals(otherP.parentCtx)
return b.parentCtx.equals(otherP.parentCtx)
}
}
}
func (this *BaseSingletonPredictionContext) Hash() string {
return this.cachedHashString
func (b *BaseSingletonPredictionContext) Hash() string {
return b.cachedHashString
}
func (this *BaseSingletonPredictionContext) String() string {
func (b *BaseSingletonPredictionContext) String() string {
var up string
if this.parentCtx == nil {
if b.parentCtx == nil {
up = ""
} else {
up = this.parentCtx.String()
up = b.parentCtx.String()
}
if len(up) == 0 {
if this.returnState == BasePredictionContextEMPTY_RETURN_STATE {
if b.returnState == BasePredictionContextEMPTY_RETURN_STATE {
return "$"
} else {
return strconv.Itoa(this.returnState)
return strconv.Itoa(b.returnState)
}
} else {
return strconv.Itoa(this.returnState) + " " + up
return strconv.Itoa(b.returnState) + " " + up
}
}
@ -235,23 +235,23 @@ func NewEmptyPredictionContext() *EmptyPredictionContext {
return p
}
func (this *EmptyPredictionContext) isEmpty() bool {
func (e *EmptyPredictionContext) isEmpty() bool {
return true
}
func (this *EmptyPredictionContext) GetParent(index int) PredictionContext {
func (e *EmptyPredictionContext) GetParent(index int) PredictionContext {
return nil
}
func (this *EmptyPredictionContext) getReturnState(index int) int {
return this.returnState
func (e *EmptyPredictionContext) getReturnState(index int) int {
return e.returnState
}
func (this *EmptyPredictionContext) equals(other PredictionContext) bool {
return this == other
func (e *EmptyPredictionContext) equals(other PredictionContext) bool {
return e == other
}
func (this *EmptyPredictionContext) String() string {
func (e *EmptyPredictionContext) String() string {
return "$"
}
@ -285,55 +285,55 @@ func (c *ArrayPredictionContext) GetReturnStates() []int {
return c.returnStates
}
func (this *ArrayPredictionContext) hasEmptyPath() bool {
return this.getReturnState(this.length()-1) == BasePredictionContextEMPTY_RETURN_STATE
func (a *ArrayPredictionContext) hasEmptyPath() bool {
return a.getReturnState(a.length()-1) == BasePredictionContextEMPTY_RETURN_STATE
}
func (this *ArrayPredictionContext) isEmpty() bool {
func (a *ArrayPredictionContext) isEmpty() bool {
// since EMPTY_RETURN_STATE can only appear in the last position, we
// don't need to verify that size==1
return this.returnStates[0] == BasePredictionContextEMPTY_RETURN_STATE
return a.returnStates[0] == BasePredictionContextEMPTY_RETURN_STATE
}
func (this *ArrayPredictionContext) length() int {
return len(this.returnStates)
func (a *ArrayPredictionContext) length() int {
return len(a.returnStates)
}
func (this *ArrayPredictionContext) GetParent(index int) PredictionContext {
return this.parents[index]
func (a *ArrayPredictionContext) GetParent(index int) PredictionContext {
return a.parents[index]
}
func (this *ArrayPredictionContext) getReturnState(index int) int {
return this.returnStates[index]
func (a *ArrayPredictionContext) getReturnState(index int) int {
return a.returnStates[index]
}
func (this *ArrayPredictionContext) equals(other PredictionContext) bool {
func (a *ArrayPredictionContext) equals(other PredictionContext) bool {
if _, ok := other.(*ArrayPredictionContext); !ok {
return false
} else if this.cachedHashString != other.Hash() {
} else if a.cachedHashString != other.Hash() {
return false // can't be same if hash is different
} else {
otherP := other.(*ArrayPredictionContext)
return &this.returnStates == &otherP.returnStates && &this.parents == &otherP.parents
return &a.returnStates == &otherP.returnStates && &a.parents == &otherP.parents
}
}
func (this *ArrayPredictionContext) String() string {
if this.isEmpty() {
func (a *ArrayPredictionContext) String() string {
if a.isEmpty() {
return "[]"
} else {
var s = "["
for i := 0; i < len(this.returnStates); i++ {
for i := 0; i < len(a.returnStates); i++ {
if i > 0 {
s = s + ", "
}
if this.returnStates[i] == BasePredictionContextEMPTY_RETURN_STATE {
if a.returnStates[i] == BasePredictionContextEMPTY_RETURN_STATE {
s = s + "$"
continue
}
s = s + strconv.Itoa(this.returnStates[i])
if this.parents[i] != nil {
s = s + " " + this.parents[i].String()
s = s + strconv.Itoa(a.returnStates[i])
if a.parents[i] != nil {
s = s + " " + a.parents[i].String()
} else {
s = s + "nil"
}

View File

@ -44,55 +44,55 @@ func NewBaseRecognizer() *BaseRecognizer {
var tokenTypeMapCache = make(map[string]int)
var ruleIndexMapCache = make(map[string]int)
func (this *BaseRecognizer) checkVersion(toolVersion string) {
func (b *BaseRecognizer) checkVersion(toolVersion string) {
var runtimeVersion = "4.5.2"
if runtimeVersion != toolVersion {
fmt.Println("ANTLR runtime and generated code versions disagree: " + runtimeVersion + "!=" + toolVersion)
}
}
func (this *BaseRecognizer) Action(context RuleContext, ruleIndex, actionIndex int) {
func (b *BaseRecognizer) Action(context RuleContext, ruleIndex, actionIndex int) {
panic("action not implemented on Recognizer!")
}
func (this *BaseRecognizer) AddErrorListener(listener ErrorListener) {
this._listeners = append(this._listeners, listener)
func (b *BaseRecognizer) AddErrorListener(listener ErrorListener) {
b._listeners = append(b._listeners, listener)
}
func (this *BaseRecognizer) RemoveErrorListeners() {
this._listeners = make([]ErrorListener, 0)
func (b *BaseRecognizer) RemoveErrorListeners() {
b._listeners = make([]ErrorListener, 0)
}
func (this *BaseRecognizer) GetRuleNames() []string {
return this.RuleNames
func (b *BaseRecognizer) GetRuleNames() []string {
return b.RuleNames
}
func (this *BaseRecognizer) GetTokenNames() []string {
return this.LiteralNames
func (b *BaseRecognizer) GetTokenNames() []string {
return b.LiteralNames
}
func (this *BaseRecognizer) GetSymbolicNames() []string {
return this.SymbolicNames
func (b *BaseRecognizer) GetSymbolicNames() []string {
return b.SymbolicNames
}
func (this *BaseRecognizer) GetLiteralNames() []string {
return this.LiteralNames
func (b *BaseRecognizer) GetLiteralNames() []string {
return b.LiteralNames
}
func (this *BaseRecognizer) GetState() int {
return this.state
func (b *BaseRecognizer) GetState() int {
return b.state
}
func (this *BaseRecognizer) SetState(v int) {
func (b *BaseRecognizer) SetState(v int) {
if PortDebug {
fmt.Println("SETTING STATE " + strconv.Itoa(v) + " from " + strconv.Itoa(this.state))
fmt.Println("SETTING STATE " + strconv.Itoa(v) + " from " + strconv.Itoa(b.state))
}
this.state = v
b.state = v
}
//func (this *Recognizer) GetTokenTypeMap() {
// var tokenNames = this.GetTokenNames()
//func (b *Recognizer) GetTokenTypeMap() {
// var tokenNames = b.GetTokenNames()
// if (tokenNames==nil) {
// panic("The current recognizer does not provide a list of token names.")
// }
@ -109,10 +109,10 @@ func (this *BaseRecognizer) SetState(v int) {
//
// <p>Used for XPath and tree pattern compilation.</p>
//
func (this *BaseRecognizer) GetRuleIndexMap() map[string]int {
func (b *BaseRecognizer) GetRuleIndexMap() map[string]int {
panic("Method not defined!")
// var ruleNames = this.GetRuleNames()
// var ruleNames = b.GetRuleNames()
// if (ruleNames==nil) {
// panic("The current recognizer does not provide a list of rule names.")
// }
@ -125,9 +125,9 @@ func (this *BaseRecognizer) GetRuleIndexMap() map[string]int {
// return result
}
func (this *BaseRecognizer) GetTokenType(tokenName string) int {
func (b *BaseRecognizer) GetTokenType(tokenName string) int {
panic("Method not defined!")
// var ttype = this.GetTokenTypeMap()[tokenName]
// var ttype = b.GetTokenTypeMap()[tokenName]
// if (ttype !=nil) {
// return ttype
// } else {
@ -135,7 +135,7 @@ func (this *BaseRecognizer) GetTokenType(tokenName string) int {
// }
}
//func (this *Recognizer) GetTokenTypeMap() map[string]int {
//func (b *Recognizer) GetTokenTypeMap() map[string]int {
// Vocabulary vocabulary = getVocabulary();
//
// Synchronized (tokenTypeMapCache) {
@ -164,7 +164,7 @@ func (this *BaseRecognizer) GetTokenType(tokenName string) int {
//}
// What is the error header, normally line/character position information?//
func (this *BaseRecognizer) GetErrorHeader(e RecognitionException) string {
func (b *BaseRecognizer) GetErrorHeader(e RecognitionException) string {
var line = e.GetOffendingToken().GetLine()
var column = e.GetOffendingToken().GetColumn()
return "line " + strconv.Itoa(line) + ":" + strconv.Itoa(column)
@ -183,7 +183,7 @@ func (this *BaseRecognizer) GetErrorHeader(e RecognitionException) string {
// feature when necessary. For example, see
// {@link DefaultErrorStrategy//GetTokenErrorDisplay}.
//
func (this *BaseRecognizer) GetTokenErrorDisplay(t Token) string {
func (b *BaseRecognizer) GetTokenErrorDisplay(t Token) string {
if t == nil {
return "<no token>"
}
@ -202,16 +202,16 @@ func (this *BaseRecognizer) GetTokenErrorDisplay(t Token) string {
return "'" + s + "'"
}
func (this *BaseRecognizer) GetErrorListenerDispatch() ErrorListener {
return NewProxyErrorListener(this._listeners)
func (b *BaseRecognizer) GetErrorListenerDispatch() ErrorListener {
return NewProxyErrorListener(b._listeners)
}
// subclass needs to override these if there are sempreds or actions
// that the ATN interp needs to execute
func (this *BaseRecognizer) Sempred(localctx RuleContext, ruleIndex int, actionIndex int) bool {
func (b *BaseRecognizer) Sempred(localctx RuleContext, ruleIndex int, actionIndex int) bool {
return true
}
func (this *BaseRecognizer) Precpred(localctx RuleContext, precedence int) bool {
func (b *BaseRecognizer) Precpred(localctx RuleContext, precedence int) bool {
return true
}

View File

@ -5,9 +5,9 @@ package antlr
// naturally the invoking state is not valid. The parent link
// provides a chain upwards from the current rule invocation to the root
// of the invocation tree, forming a stack. We actually carry no
// information about the rule associated with this context (except
// information about the rule associated with b context (except
// when parsing). We keep only the state number of the invoking state from
// the ATN submachine that invoked this. Contrast this with the s
// the ATN submachine that invoked b. Contrast b with the s
// pointer inside ParserRuleContext that tracks the current state
// being "executed" for the current rule.
//
@ -43,12 +43,12 @@ func NewBaseRuleContext(parent RuleContext, invokingState int) *BaseRuleContext
rn := new(BaseRuleContext)
// What context invoked this rule?
// What context invoked b rule?
rn.parentCtx = parent
// What state invoked the rule associated with this context?
// What state invoked the rule associated with b context?
// The "return address" is the followState of invokingState
// If parent is nil, this should be -1.
// If parent is nil, b should be -1.
if parent == nil {
rn.invokingState = -1
} else {
@ -58,40 +58,40 @@ func NewBaseRuleContext(parent RuleContext, invokingState int) *BaseRuleContext
return rn
}
func (this *BaseRuleContext) GetBaseRuleContext() *BaseRuleContext {
return this
func (b *BaseRuleContext) GetBaseRuleContext() *BaseRuleContext {
return b
}
func (this *BaseRuleContext) SetParent(v Tree) {
this.parentCtx = v.(RuleContext)
func (b *BaseRuleContext) SetParent(v Tree) {
b.parentCtx = v.(RuleContext)
}
func (this *BaseRuleContext) GetInvokingState() int {
return this.invokingState
func (b *BaseRuleContext) GetInvokingState() int {
return b.invokingState
}
func (this *BaseRuleContext) SetInvokingState(t int) {
this.invokingState = t
func (b *BaseRuleContext) SetInvokingState(t int) {
b.invokingState = t
}
func (this *BaseRuleContext) GetRuleIndex() int {
return this.RuleIndex
func (b *BaseRuleContext) GetRuleIndex() int {
return b.RuleIndex
}
// A context is empty if there is no invoking state meaning nobody call
// current context.
func (this *BaseRuleContext) IsEmpty() bool {
return this.invokingState == -1
func (b *BaseRuleContext) IsEmpty() bool {
return b.invokingState == -1
}
// Return the combined text of all child nodes. This method only considers
// tokens which have been added to the parse tree.
// <p>
// Since tokens on hidden channels (e.g. whitespace or comments) are not
// added to the parse trees, they will not appear in the output of this
// added to the parse trees, they will not appear in the output of b
// method.
//
func (this *BaseRuleContext) GetParent() Tree {
return this.parentCtx
func (b *BaseRuleContext) GetParent() Tree {
return b.parentCtx
}

View File

@ -74,39 +74,39 @@ func NewPredicate(ruleIndex, predIndex int, isCtxDependent bool) *Predicate {
var SemanticContextNone SemanticContext = NewPredicate(-1, -1, false)
func (this *Predicate) evalPrecedence(parser Recognizer, outerContext RuleContext) SemanticContext {
return this
func (p *Predicate) evalPrecedence(parser Recognizer, outerContext RuleContext) SemanticContext {
return p
}
func (this *Predicate) evaluate(parser Recognizer, outerContext RuleContext) bool {
func (p *Predicate) evaluate(parser Recognizer, outerContext RuleContext) bool {
var localctx RuleContext = nil
if this.isCtxDependent {
if p.isCtxDependent {
localctx = outerContext
}
return parser.Sempred(localctx, this.ruleIndex, this.predIndex)
return parser.Sempred(localctx, p.ruleIndex, p.predIndex)
}
func (this *Predicate) Hash() string {
return strconv.Itoa(this.ruleIndex) + "/" + strconv.Itoa(this.predIndex) + "/" + fmt.Sprint(this.isCtxDependent)
func (p *Predicate) Hash() string {
return strconv.Itoa(p.ruleIndex) + "/" + strconv.Itoa(p.predIndex) + "/" + fmt.Sprint(p.isCtxDependent)
}
func (this *Predicate) equals(other interface{}) bool {
if this == other {
func (p *Predicate) equals(other interface{}) bool {
if p == other {
return true
} else if _, ok := other.(*Predicate); !ok {
return false
} else {
return this.ruleIndex == other.(*Predicate).ruleIndex &&
this.predIndex == other.(*Predicate).predIndex &&
this.isCtxDependent == other.(*Predicate).isCtxDependent
return p.ruleIndex == other.(*Predicate).ruleIndex &&
p.predIndex == other.(*Predicate).predIndex &&
p.isCtxDependent == other.(*Predicate).isCtxDependent
}
}
func (this *Predicate) String() string {
return "{" + strconv.Itoa(this.ruleIndex) + ":" + strconv.Itoa(this.predIndex) + "}?"
func (p *Predicate) String() string {
return "{" + strconv.Itoa(p.ruleIndex) + ":" + strconv.Itoa(p.predIndex) + "}?"
}
type PrecedencePredicate struct {
@ -115,44 +115,44 @@ type PrecedencePredicate struct {
func NewPrecedencePredicate(precedence int) *PrecedencePredicate {
this := new(PrecedencePredicate)
this.precedence = precedence
p := new(PrecedencePredicate)
p.precedence = precedence
return this
return p
}
func (this *PrecedencePredicate) evaluate(parser Recognizer, outerContext RuleContext) bool {
return parser.Precpred(outerContext, this.precedence)
func (p *PrecedencePredicate) evaluate(parser Recognizer, outerContext RuleContext) bool {
return parser.Precpred(outerContext, p.precedence)
}
func (this *PrecedencePredicate) evalPrecedence(parser Recognizer, outerContext RuleContext) SemanticContext {
if parser.Precpred(outerContext, this.precedence) {
func (p *PrecedencePredicate) evalPrecedence(parser Recognizer, outerContext RuleContext) SemanticContext {
if parser.Precpred(outerContext, p.precedence) {
return SemanticContextNone
} else {
return nil
}
}
func (this *PrecedencePredicate) compareTo(other *PrecedencePredicate) int {
return this.precedence - other.precedence
func (p *PrecedencePredicate) compareTo(other *PrecedencePredicate) int {
return p.precedence - other.precedence
}
func (this *PrecedencePredicate) Hash() string {
func (p *PrecedencePredicate) Hash() string {
return "31"
}
func (this *PrecedencePredicate) equals(other interface{}) bool {
if this == other {
func (p *PrecedencePredicate) equals(other interface{}) bool {
if p == other {
return true
} else if _, ok := other.(*PrecedencePredicate); !ok {
return false
} else {
return this.precedence == other.(*PrecedencePredicate).precedence
return p.precedence == other.(*PrecedencePredicate).precedence
}
}
func (this *PrecedencePredicate) String() string {
return "{" + strconv.Itoa(this.precedence) + ">=prec}?"
func (p *PrecedencePredicate) String() string {
return "{" + strconv.Itoa(p.precedence) + ">=prec}?"
}
func PrecedencePredicatefilterPrecedencePredicates(set *Set) []*PrecedencePredicate {
@ -212,20 +212,20 @@ func NewAND(a, b SemanticContext) *AND {
vs[i] = v.(SemanticContext)
}
this := new(AND)
this.opnds = opnds
and := new(AND)
and.opnds = opnds
return this
return and
}
func (this *AND) equals(other interface{}) bool {
if this == other {
func (a *AND) equals(other interface{}) bool {
if a == other {
return true
} else if _, ok := other.(*AND); !ok {
return false
} else {
for i, v := range other.(*AND).opnds {
if !this.opnds[i].equals(v) {
if !a.opnds[i].equals(v) {
return false
}
}
@ -233,32 +233,32 @@ func (this *AND) equals(other interface{}) bool {
}
}
func (this *AND) Hash() string {
return fmt.Sprint(this.opnds) + "/AND"
func (a *AND) Hash() string {
return fmt.Sprint(a.opnds) + "/AND"
}
//
// {@inheritDoc}
//
// <p>
// The evaluation of predicates by this context is short-circuiting, but
// The evaluation of predicates by a context is short-circuiting, but
// unordered.</p>
//
func (this *AND) evaluate(parser Recognizer, outerContext RuleContext) bool {
for i := 0; i < len(this.opnds); i++ {
if !this.opnds[i].evaluate(parser, outerContext) {
func (a *AND) evaluate(parser Recognizer, outerContext RuleContext) bool {
for i := 0; i < len(a.opnds); i++ {
if !a.opnds[i].evaluate(parser, outerContext) {
return false
}
}
return true
}
func (this *AND) evalPrecedence(parser Recognizer, outerContext RuleContext) SemanticContext {
func (a *AND) evalPrecedence(parser Recognizer, outerContext RuleContext) SemanticContext {
var differs = false
var operands = make([]SemanticContext, 0)
for i := 0; i < len(this.opnds); i++ {
var context = this.opnds[i]
for i := 0; i < len(a.opnds); i++ {
var context = a.opnds[i]
var evaluated = context.evalPrecedence(parser, outerContext)
differs = differs || (evaluated != context)
if evaluated == nil {
@ -270,7 +270,7 @@ func (this *AND) evalPrecedence(parser Recognizer, outerContext RuleContext) Sem
}
}
if !differs {
return this
return a
}
if len(operands) == 0 {
@ -291,10 +291,10 @@ func (this *AND) evalPrecedence(parser Recognizer, outerContext RuleContext) Sem
return result
}
func (this *AND) String() string {
func (a *AND) String() string {
var s = ""
for _, o := range this.opnds {
for _, o := range a.opnds {
s += "&& " + o.String()
}
@ -353,20 +353,20 @@ func NewOR(a, b SemanticContext) *OR {
vs[i] = v.(SemanticContext)
}
this := new(OR)
this.opnds = opnds
o := new(OR)
o.opnds = opnds
return this
return o
}
func (this *OR) equals(other interface{}) bool {
if this == other {
func (o *OR) equals(other interface{}) bool {
if o == other {
return true
} else if _, ok := other.(*OR); !ok {
return false
} else {
for i, v := range other.(*OR).opnds {
if !this.opnds[i].equals(v) {
if !o.opnds[i].equals(v) {
return false
}
}
@ -374,28 +374,28 @@ func (this *OR) equals(other interface{}) bool {
}
}
func (this *OR) Hash() string {
return fmt.Sprint(this.opnds) + "/OR"
func (o *OR) Hash() string {
return fmt.Sprint(o.opnds) + "/OR"
}
// <p>
// The evaluation of predicates by this context is short-circuiting, but
// The evaluation of predicates by o context is short-circuiting, but
// unordered.</p>
//
func (this *OR) evaluate(parser Recognizer, outerContext RuleContext) bool {
for i := 0; i < len(this.opnds); i++ {
if this.opnds[i].evaluate(parser, outerContext) {
func (o *OR) evaluate(parser Recognizer, outerContext RuleContext) bool {
for i := 0; i < len(o.opnds); i++ {
if o.opnds[i].evaluate(parser, outerContext) {
return true
}
}
return false
}
func (this *OR) evalPrecedence(parser Recognizer, outerContext RuleContext) SemanticContext {
func (o *OR) evalPrecedence(parser Recognizer, outerContext RuleContext) SemanticContext {
var differs = false
var operands = make([]SemanticContext, 0)
for i := 0; i < len(this.opnds); i++ {
var context = this.opnds[i]
for i := 0; i < len(o.opnds); i++ {
var context = o.opnds[i]
var evaluated = context.evalPrecedence(parser, outerContext)
differs = differs || (evaluated != context)
if evaluated == SemanticContextNone {
@ -407,7 +407,7 @@ func (this *OR) evalPrecedence(parser Recognizer, outerContext RuleContext) Sema
}
}
if !differs {
return this
return o
}
if len(operands) == 0 {
// all elements were false, so the OR context is false
@ -426,10 +426,10 @@ func (this *OR) evalPrecedence(parser Recognizer, outerContext RuleContext) Sema
return result
}
func (this *OR) String() string {
func (o *OR) String() string {
var s = ""
for _, o := range this.opnds {
for _, o := range o.opnds {
s += "|| " + o.String()
}

View File

@ -69,48 +69,48 @@ const (
TokenHiddenChannel = 1
)
func (this *BaseToken) GetChannel() int {
return this.channel
func (b *BaseToken) GetChannel() int {
return b.channel
}
func (this *BaseToken) GetStart() int {
return this.start
func (b *BaseToken) GetStart() int {
return b.start
}
func (this *BaseToken) GetStop() int {
return this.stop
func (b *BaseToken) GetStop() int {
return b.stop
}
func (this *BaseToken) GetLine() int {
return this.line
func (b *BaseToken) GetLine() int {
return b.line
}
func (this *BaseToken) GetColumn() int {
return this.column
func (b *BaseToken) GetColumn() int {
return b.column
}
func (this *BaseToken) GetTokenType() int {
return this.tokenType
func (b *BaseToken) GetTokenType() int {
return b.tokenType
}
func (this *BaseToken) GetSource() *TokenSourceCharStreamPair {
return this.source
func (b *BaseToken) GetSource() *TokenSourceCharStreamPair {
return b.source
}
func (this *BaseToken) GetTokenIndex() int {
return this.tokenIndex
func (b *BaseToken) GetTokenIndex() int {
return b.tokenIndex
}
func (this *BaseToken) SetTokenIndex(v int) {
this.tokenIndex = v
func (b *BaseToken) SetTokenIndex(v int) {
b.tokenIndex = v
}
func (this *BaseToken) GetTokenSource() TokenSource {
return this.source.tokenSource
func (b *BaseToken) GetTokenSource() TokenSource {
return b.source.tokenSource
}
func (this *BaseToken) GetInputStream() CharStream {
return this.source.charStream
func (b *BaseToken) GetInputStream() CharStream {
return b.source.charStream
}
type CommonToken struct {
@ -164,28 +164,28 @@ func (ct *CommonToken) clone() *CommonToken {
return t
}
func (this *CommonToken) GetText() string {
if this._text != "" {
return this._text
func (c *CommonToken) GetText() string {
if c._text != "" {
return c._text
}
var input = this.GetInputStream()
var input = c.GetInputStream()
if input == nil {
return ""
}
var n = input.Size()
if this.start < n && this.stop < n {
return input.GetTextFromInterval(NewInterval(this.start, this.stop))
if c.start < n && c.stop < n {
return input.GetTextFromInterval(NewInterval(c.start, c.stop))
} else {
return "<EOF>"
}
}
func (this *CommonToken) SetText(text string) {
this._text = text
func (c *CommonToken) SetText(text string) {
c._text = text
}
func (this *CommonToken) String() string {
var txt = this.GetText()
func (c *CommonToken) String() string {
var txt = c.GetText()
if txt != "" {
txt = strings.Replace(txt, "\n", "\\n", -1)
txt = strings.Replace(txt, "\r", "\\r", -1)
@ -195,13 +195,13 @@ func (this *CommonToken) String() string {
}
var ch string
if this.channel > 0 {
ch = ",channel=" + strconv.Itoa(this.channel)
if c.channel > 0 {
ch = ",channel=" + strconv.Itoa(c.channel)
} else {
ch = ""
}
return "[@" + strconv.Itoa(this.tokenIndex) + "," + strconv.Itoa(this.start) + ":" + strconv.Itoa(this.stop) + "='" +
txt + "',<" + strconv.Itoa(this.tokenType) + ">" +
ch + "," + strconv.Itoa(this.line) + ":" + strconv.Itoa(this.column) + "]"
return "[@" + strconv.Itoa(c.tokenIndex) + "," + strconv.Itoa(c.start) + ":" + strconv.Itoa(c.stop) + "='" +
txt + "',<" + strconv.Itoa(c.tokenType) + ">" +
ch + "," + strconv.Itoa(c.line) + ":" + strconv.Itoa(c.column) + "]"
}

View File

@ -12,17 +12,17 @@ func NewTraceListener(parser *BaseParser) *TraceListener {
return tl
}
func (this *TraceListener) VisitErrorNode(_ ErrorNode) {
func (t *TraceListener) VisitErrorNode(_ ErrorNode) {
}
func (this *TraceListener) EnterEveryRule(ctx ParserRuleContext) {
fmt.Println("enter " + this.parser.GetRuleNames()[ctx.GetRuleIndex()] + ", LT(1)=" + this.parser._input.LT(1).GetText())
func (t *TraceListener) EnterEveryRule(ctx ParserRuleContext) {
fmt.Println("enter " + t.parser.GetRuleNames()[ctx.GetRuleIndex()] + ", LT(1)=" + t.parser._input.LT(1).GetText())
}
func (this *TraceListener) VisitTerminal(node TerminalNode) {
fmt.Println("consume " + fmt.Sprint(node.GetSymbol()) + " rule " + this.parser.GetRuleNames()[this.parser._ctx.GetRuleIndex()])
func (t *TraceListener) VisitTerminal(node TerminalNode) {
fmt.Println("consume " + fmt.Sprint(node.GetSymbol()) + " rule " + t.parser.GetRuleNames()[t.parser._ctx.GetRuleIndex()])
}
func (this *TraceListener) ExitEveryRule(ctx ParserRuleContext) {
fmt.Println("exit " + this.parser.GetRuleNames()[ctx.GetRuleIndex()] + ", LT(1)=" + this.parser._input.LT(1).GetText())
func (t *TraceListener) ExitEveryRule(ctx ParserRuleContext) {
fmt.Println("exit " + t.parser.GetRuleNames()[ctx.GetRuleIndex()] + ", LT(1)=" + t.parser._input.LT(1).GetText())
}

View File

@ -114,43 +114,43 @@ func hashCode(s string) string {
return fmt.Sprint(h.Sum32())
}
func (this *Set) length() int {
return len(this.data)
func (s *Set) length() int {
return len(s.data)
}
func (this *Set) add(value interface{}) interface{} {
func (s *Set) add(value interface{}) interface{} {
var hash = this.hashFunction(value)
var hash = s.hashFunction(value)
var key = "hash_" + hashCode(hash)
values := this.data[key]
values := s.data[key]
if this.data[key] != nil {
if s.data[key] != nil {
for i := 0; i < len(values); i++ {
if this.equalsFunction(value, values[i]) {
if s.equalsFunction(value, values[i]) {
return values[i]
}
}
this.data[key] = append(this.data[key], value)
s.data[key] = append(s.data[key], value)
return value
}
this.data[key] = []interface{}{value}
s.data[key] = []interface{}{value}
return value
}
func (this *Set) contains(value interface{}) bool {
func (s *Set) contains(value interface{}) bool {
hash := this.hashFunction(value)
hash := s.hashFunction(value)
key := "hash_" + hashCode(hash)
values := this.data[key]
values := s.data[key]
if this.data[key] != nil {
if s.data[key] != nil {
for i := 0; i < len(values); i++ {
if this.equalsFunction(value, values[i]) {
if s.equalsFunction(value, values[i]) {
return true
}
}
@ -158,28 +158,28 @@ func (this *Set) contains(value interface{}) bool {
return false
}
func (this *Set) values() []interface{} {
func (s *Set) values() []interface{} {
var l = make([]interface{}, 0)
for key, _ := range this.data {
for key, _ := range s.data {
if strings.Index(key, "hash_") == 0 {
l = append(l, this.data[key]...)
l = append(l, s.data[key]...)
}
}
return l
}
func (this *Set) String() string {
func (s *Set) String() string {
s := ""
r := ""
for _, av := range this.data {
for _, av := range s.data {
for _, v := range av {
s += fmt.Sprint(v)
r += fmt.Sprint(v)
}
}
return s
return r
}
type BitSet struct {
@ -192,42 +192,42 @@ func NewBitSet() *BitSet {
return b
}
func (this *BitSet) add(value int) {
this.data[value] = true
func (b *BitSet) add(value int) {
b.data[value] = true
}
func (this *BitSet) clear(index int) {
delete(this.data, index)
func (b *BitSet) clear(index int) {
delete(b.data, index)
}
func (this *BitSet) or(set *BitSet) {
func (b *BitSet) or(set *BitSet) {
for k, _ := range set.data {
this.add(k)
b.add(k)
}
}
func (this *BitSet) remove(value int) {
delete(this.data, value)
func (b *BitSet) remove(value int) {
delete(b.data, value)
}
func (this *BitSet) contains(value int) bool {
return this.data[value] == true
func (b *BitSet) contains(value int) bool {
return b.data[value] == true
}
func (this *BitSet) values() []int {
ks := make([]int, len(this.data))
func (b *BitSet) values() []int {
ks := make([]int, len(b.data))
i := 0
for k, _ := range this.data {
for k, _ := range b.data {
ks[i] = k
i++
}
return ks
}
func (this *BitSet) minValue() int {
func (b *BitSet) minValue() int {
min := 2147483647
for k, _ := range this.data {
for k, _ := range b.data {
if k < min {
min = k
}
@ -236,17 +236,17 @@ func (this *BitSet) minValue() int {
return min
}
func (this *BitSet) equals(other interface{}) bool {
func (b *BitSet) equals(other interface{}) bool {
otherBitSet, ok := other.(*BitSet)
if !ok {
return false
}
if len(this.data) != len(otherBitSet.data) {
if len(b.data) != len(otherBitSet.data) {
return false
}
for k, v := range this.data {
for k, v := range b.data {
if otherBitSet.data[k] != v {
return false
}
@ -255,12 +255,12 @@ func (this *BitSet) equals(other interface{}) bool {
return true
}
func (this *BitSet) length() int {
return len(this.data)
func (b *BitSet) length() int {
return len(b.data)
}
func (this *BitSet) String() string {
vals := this.values()
func (b *BitSet) String() string {
vals := b.values()
valsS := make([]string, len(vals))
for i, val := range vals {
@ -279,20 +279,20 @@ func NewAltDict() *AltDict {
return d
}
func (this *AltDict) Get(key string) interface{} {
func (a *AltDict) Get(key string) interface{} {
key = "k-" + key
return this.data[key]
return a.data[key]
}
func (this *AltDict) put(key string, value interface{}) {
func (a *AltDict) put(key string, value interface{}) {
key = "k-" + key
this.data[key] = value
a.data[key] = value
}
func (this *AltDict) values() []interface{} {
vs := make([]interface{}, len(this.data))
func (a *AltDict) values() []interface{} {
vs := make([]interface{}, len(a.data))
i := 0
for _, v := range this.data {
for _, v := range a.data {
vs[i] = v
i++
}
@ -309,25 +309,25 @@ func NewDoubleDict() *DoubleDict {
return dd
}
func (this *DoubleDict) Get(a string, b string) interface{} {
var d = this.data[a]
func (d *DoubleDict) Get(a string, b string) interface{} {
var data = d.data[a]
if d == nil {
if data == nil {
return nil
}
return d[b]
return data[b]
}
func (this *DoubleDict) set(a, b string, o interface{}) {
var d = this.data[a]
func (d *DoubleDict) set(a, b string, o interface{}) {
var data = d.data[a]
if d == nil {
d = make(map[string]interface{})
this.data[a] = d
if data == nil {
data = make(map[string]interface{})
d.data[a] = data
}
d[b] = o
data[b] = o
}
func EscapeWhitespace(s string, escapeSpaces bool) string {