Cleanup var statements (#73)

* Cleanup var statements

* Fix type decl
This commit is contained in:
Peter Boyer 2016-10-17 12:57:26 -04:00 committed by GitHub
parent 2444386091
commit adef6f690a
32 changed files with 545 additions and 561 deletions

View File

@ -132,21 +132,21 @@ func (a *ATN) getExpectedTokens(stateNumber int, ctx RuleContext) *IntervalSet {
panic("Invalid state number.")
}
var s = a.states[stateNumber]
var following = a.NextTokens(s, nil)
s := a.states[stateNumber]
following := a.NextTokens(s, nil)
if !following.contains(TokenEpsilon) {
return following
}
var expected = NewIntervalSet()
expected := NewIntervalSet()
expected.addSet(following)
expected.removeOne(TokenEpsilon)
for ctx != nil && ctx.GetInvokingState() >= 0 && following.contains(TokenEpsilon) {
var invokingState = a.states[ctx.GetInvokingState()]
var rt = invokingState.GetTransitions()[0]
invokingState := a.states[ctx.GetInvokingState()]
rt := invokingState.GetTransitions()[0]
following = a.NextTokens(rt.(*RuleTransition).followState, nil)
expected.addSet(following)

View File

@ -113,7 +113,7 @@ func (b *BaseATNConfigSet) Add(config ATNConfig, mergeCache *DoubleDict) bool {
b.dipsIntoOuterContext = true
}
var existing = b.configLookup.add(config).(ATNConfig)
existing := b.configLookup.add(config).(ATNConfig)
if existing == config {
b.cachedHashString = "-1"
@ -123,8 +123,8 @@ func (b *BaseATNConfigSet) Add(config ATNConfig, mergeCache *DoubleDict) bool {
}
// Merge a previous (s, i, pi, _) with it and save the result
var rootIsWildcard = !b.fullCtx
var merged = merge(existing.GetContext(), config.GetContext(), rootIsWildcard, mergeCache)
rootIsWildcard := !b.fullCtx
merged := merge(existing.GetContext(), config.GetContext(), rootIsWildcard, mergeCache)
// No need to check for existing.context because config.context is in the cache,
// since the only way to create new graphs is the "call rule" and here. We cache
@ -143,7 +143,7 @@ func (b *BaseATNConfigSet) Add(config ATNConfig, mergeCache *DoubleDict) bool {
}
func (b *BaseATNConfigSet) GetStates() *Set {
var states = NewSet(nil, nil)
states := NewSet(nil, nil)
for i := 0; i < len(b.configs); i++ {
states.add(b.configs[i].GetState())
@ -161,10 +161,10 @@ func (b *BaseATNConfigSet) SetHasSemanticContext(v bool) {
}
func (b *BaseATNConfigSet) GetPredicates() []SemanticContext {
var preds = make([]SemanticContext, 0)
preds := make([]SemanticContext, 0)
for i := 0; i < len(b.configs); i++ {
var c = b.configs[i].GetSemanticContext()
c := b.configs[i].GetSemanticContext()
if c != SemanticContextNone {
preds = append(preds, c)
@ -188,7 +188,7 @@ func (b *BaseATNConfigSet) OptimizeConfigs(interpreter *BaseATNSimulator) {
}
for i := 0; i < len(b.configs); i++ {
var config = b.configs[i]
config := b.configs[i]
config.SetContext(interpreter.getCachedContext(config.GetContext()))
}
@ -209,7 +209,7 @@ func (b *BaseATNConfigSet) Equals(other interface{}) bool {
return false
}
var other2 = other.(*BaseATNConfigSet)
other2 := other.(*BaseATNConfigSet)
return b.configs != nil &&
// TODO: b.configs.equals(other2.configs) && // TODO: Is b necessary?
@ -233,7 +233,7 @@ func (b *BaseATNConfigSet) Hash() string {
}
func (b *BaseATNConfigSet) hashConfigs() string {
var s = ""
s := ""
for _, c := range b.configs {
s += fmt.Sprint(c)
@ -353,7 +353,7 @@ type OrderedATNConfigSet struct {
}
func NewOrderedATNConfigSet() *OrderedATNConfigSet {
var b = NewBaseATNConfigSet(false)
b := NewBaseATNConfigSet(false)
b.configLookup = NewSet(nil, nil)
@ -380,9 +380,9 @@ func equalATNConfigs(a, b interface{}) bool {
return false
}
var nums = ai.GetState().GetStateNumber() == bi.GetState().GetStateNumber()
var alts = ai.GetAlt() == bi.GetAlt()
var cons = ai.GetSemanticContext().equals(bi.GetSemanticContext())
nums := ai.GetState().GetStateNumber() == bi.GetState().GetStateNumber()
alts := ai.GetAlt() == bi.GetAlt()
cons := ai.GetSemanticContext().equals(bi.GetSemanticContext())
return nums && alts && cons
}

View File

@ -9,7 +9,7 @@ type ATNDeserializationOptions struct {
}
func NewATNDeserializationOptions(CopyFrom *ATNDeserializationOptions) *ATNDeserializationOptions {
var o = new(ATNDeserializationOptions)
o := new(ATNDeserializationOptions)
if CopyFrom != nil {
o.readOnly = CopyFrom.readOnly

View File

@ -65,13 +65,13 @@ func stringInSlice(a string, list []string) int {
// at or after the feature identified by feature was introduced, and otherwise
// false.
func (a *ATNDeserializer) isFeatureSupported(feature, actualUUID string) bool {
var idx1 = stringInSlice(feature, SupportedUUIDs)
idx1 := stringInSlice(feature, SupportedUUIDs)
if idx1 < 0 {
return false
}
var idx2 = stringInSlice(actualUUID, SupportedUUIDs)
idx2 := stringInSlice(actualUUID, SupportedUUIDs)
return idx2 >= idx1
}
@ -81,13 +81,13 @@ func (a *ATNDeserializer) DeserializeFromUInt16(data []uint16) *ATN {
a.checkVersion()
a.checkUUID()
var atn = a.readATN()
atn := a.readATN()
a.readStates(atn)
a.readRules(atn)
a.readModes(atn)
var sets = a.readSets(atn)
sets := a.readSets(atn)
a.readEdges(atn, sets)
a.readDecisions(atn)
@ -106,7 +106,7 @@ func (a *ATNDeserializer) DeserializeFromUInt16(data []uint16) *ATN {
}
func (a *ATNDeserializer) reset(data []rune) {
var temp = make([]rune, len(data))
temp := make([]rune, len(data))
for i, c := range data {
// Don't adjust the first value since that's the version number
@ -122,7 +122,7 @@ func (a *ATNDeserializer) reset(data []rune) {
}
func (a *ATNDeserializer) checkVersion() {
var version = a.readInt()
version := a.readInt()
if version != SerializedVersion {
panic("Could not deserialize ATN with version " + strconv.Itoa(version) + " (expected " + strconv.Itoa(SerializedVersion) + ").")
@ -130,7 +130,7 @@ func (a *ATNDeserializer) checkVersion() {
}
func (a *ATNDeserializer) checkUUID() {
var uuid = a.readUUID()
uuid := a.readUUID()
if stringInSlice(uuid, SupportedUUIDs) < 0 {
panic("Could not deserialize ATN with UUID: " + uuid + " (expected " + SerializedUUID + " or a legacy UUID).")
@ -140,20 +140,20 @@ func (a *ATNDeserializer) checkUUID() {
}
func (a *ATNDeserializer) readATN() *ATN {
var grammarType = a.readInt()
var maxTokenType = a.readInt()
grammarType := a.readInt()
maxTokenType := a.readInt()
return NewATN(grammarType, maxTokenType)
}
func (a *ATNDeserializer) readStates(atn *ATN) {
var loopBackStateNumbers = make([]LoopEndStateIntPair, 0)
var endStateNumbers = make([]BlockStartStateIntPair, 0)
loopBackStateNumbers := make([]LoopEndStateIntPair, 0)
endStateNumbers := make([]BlockStartStateIntPair, 0)
var nstates = a.readInt()
nstates := a.readInt()
for i := 0; i < nstates; i++ {
var stype = a.readInt()
stype := a.readInt()
// Ignore bad types of states
if stype == ATNStateInvalidType {
@ -162,20 +162,20 @@ func (a *ATNDeserializer) readStates(atn *ATN) {
continue
}
var ruleIndex = a.readInt()
ruleIndex := a.readInt()
if ruleIndex == 0xFFFF {
ruleIndex = -1
}
var s = a.stateFactory(stype, ruleIndex)
s := a.stateFactory(stype, ruleIndex)
if stype == ATNStateLoopEnd {
var loopBackStateNumber = a.readInt()
loopBackStateNumber := a.readInt()
loopBackStateNumbers = append(loopBackStateNumbers, LoopEndStateIntPair{s.(*LoopEndState), loopBackStateNumber})
} else if s2, ok := s.(BlockStartState); ok {
var endStateNumber = a.readInt()
endStateNumber := a.readInt()
endStateNumbers = append(endStateNumbers, BlockStartStateIntPair{s2, endStateNumber})
}
@ -186,36 +186,36 @@ func (a *ATNDeserializer) readStates(atn *ATN) {
// Delay the assignment of loop back and end states until we know all the state
// instances have been initialized
for j := 0; j < len(loopBackStateNumbers); j++ {
var pair = loopBackStateNumbers[j]
pair := loopBackStateNumbers[j]
pair.item0.loopBackState = atn.states[pair.item1]
}
for j := 0; j < len(endStateNumbers); j++ {
var pair = endStateNumbers[j]
pair := endStateNumbers[j]
pair.item0.setEndState(atn.states[pair.item1].(*BlockEndState))
}
var numNonGreedyStates = a.readInt()
numNonGreedyStates := a.readInt()
for j := 0; j < numNonGreedyStates; j++ {
var stateNumber = a.readInt()
stateNumber := a.readInt()
atn.states[stateNumber].(DecisionState).setNonGreedy(true)
}
var numPrecedenceStates = a.readInt()
numPrecedenceStates := a.readInt()
for j := 0; j < numPrecedenceStates; j++ {
var stateNumber = a.readInt()
stateNumber := a.readInt()
atn.states[stateNumber].(*RuleStartState).isPrecedenceRule = true
}
}
func (a *ATNDeserializer) readRules(atn *ATN) {
var nrules = a.readInt()
nrules := a.readInt()
if atn.grammarType == ATNTypeLexer {
atn.ruleToTokenType = make([]int, nrules) // TODO: initIntArray(nrules, 0)
@ -224,13 +224,13 @@ func (a *ATNDeserializer) readRules(atn *ATN) {
atn.ruleToStartState = make([]*RuleStartState, nrules) // TODO: initIntArray(nrules, 0)
for i := 0; i < nrules; i++ {
var s = a.readInt()
var startState = atn.states[s].(*RuleStartState)
s := a.readInt()
startState := atn.states[s].(*RuleStartState)
atn.ruleToStartState[i] = startState
if atn.grammarType == ATNTypeLexer {
var tokenType = a.readInt()
tokenType := a.readInt()
if tokenType == 0xFFFF {
tokenType = TokenEOF
@ -243,7 +243,7 @@ func (a *ATNDeserializer) readRules(atn *ATN) {
atn.ruleToStopState = make([]*RuleStopState, nrules) //initIntArray(nrules, 0)
for i := 0; i < len(atn.states); i++ {
var state = atn.states[i]
state := atn.states[i]
if s2, ok := state.(*RuleStopState); ok {
atn.ruleToStopState[s2.ruleIndex] = s2
@ -253,34 +253,34 @@ func (a *ATNDeserializer) readRules(atn *ATN) {
}
func (a *ATNDeserializer) readModes(atn *ATN) {
var nmodes = a.readInt()
nmodes := a.readInt()
for i := 0; i < nmodes; i++ {
var s = a.readInt()
s := a.readInt()
atn.modeToStartState = append(atn.modeToStartState, atn.states[s].(*TokensStartState))
}
}
func (a *ATNDeserializer) readSets(atn *ATN) []*IntervalSet {
var sets = make([]*IntervalSet, 0)
var m = a.readInt()
sets := make([]*IntervalSet, 0)
m := a.readInt()
for i := 0; i < m; i++ {
var iset = NewIntervalSet()
iset := NewIntervalSet()
sets = append(sets, iset)
var n = a.readInt()
var containsEOF = a.readInt()
n := a.readInt()
containsEOF := a.readInt()
if containsEOF != 0 {
iset.addOne(-1)
}
for j := 0; j < n; j++ {
var i1 = a.readInt()
var i2 = a.readInt()
i1 := a.readInt()
i2 := a.readInt()
iset.addRange(i1, i2)
}
@ -290,7 +290,7 @@ func (a *ATNDeserializer) readSets(atn *ATN) []*IntervalSet {
}
func (a *ATNDeserializer) readEdges(atn *ATN, sets []*IntervalSet) {
var nedges = a.readInt()
nedges := a.readInt()
for i := 0; i < nedges; i++ {
var (
@ -309,7 +309,7 @@ func (a *ATNDeserializer) readEdges(atn *ATN, sets []*IntervalSet) {
// Edges for rule stop states can be derived, so they are not serialized
for i := 0; i < len(atn.states); i++ {
var state = atn.states[i]
state := atn.states[i]
for j := 0; j < len(state.GetTransitions()); j++ {
var t, ok = state.GetTransitions()[j].(*RuleTransition)
@ -318,7 +318,7 @@ func (a *ATNDeserializer) readEdges(atn *ATN, sets []*IntervalSet) {
continue
}
var outermostPrecedenceReturn = -1
outermostPrecedenceReturn := -1
if atn.ruleToStartState[t.getTarget().GetRuleIndex()].isPrecedenceRule {
if t.precedence == 0 {
@ -326,14 +326,14 @@ func (a *ATNDeserializer) readEdges(atn *ATN, sets []*IntervalSet) {
}
}
var trans = NewEpsilonTransition(t.followState, outermostPrecedenceReturn)
trans := NewEpsilonTransition(t.followState, outermostPrecedenceReturn)
atn.ruleToStopState[t.getTarget().GetRuleIndex()].AddTransition(trans, -1)
}
}
for i := 0; i < len(atn.states); i++ {
var state = atn.states[i]
state := atn.states[i]
if s2, ok := state.(*BaseBlockStartState); ok {
// We need to know the end state to set its start state
@ -351,7 +351,7 @@ func (a *ATNDeserializer) readEdges(atn *ATN, sets []*IntervalSet) {
if s2, ok := state.(*PlusLoopbackState); ok {
for j := 0; j < len(s2.GetTransitions()); j++ {
var target = s2.GetTransitions()[j].getTarget()
target := s2.GetTransitions()[j].getTarget()
if t2, ok := target.(*PlusBlockStartState); ok {
t2.loopBackState = state
@ -359,7 +359,7 @@ func (a *ATNDeserializer) readEdges(atn *ATN, sets []*IntervalSet) {
}
} else if s2, ok := state.(*StarLoopbackState); ok {
for j := 0; j < len(s2.GetTransitions()); j++ {
var target = s2.GetTransitions()[j].getTarget()
target := s2.GetTransitions()[j].getTarget()
if t2, ok := target.(*StarLoopEntryState); ok {
t2.loopBackState = state
@ -370,11 +370,11 @@ func (a *ATNDeserializer) readEdges(atn *ATN, sets []*IntervalSet) {
}
func (a *ATNDeserializer) readDecisions(atn *ATN) {
var ndecisions = a.readInt()
ndecisions := a.readInt()
for i := 0; i < ndecisions; i++ {
var s = a.readInt()
var decState = atn.states[s].(DecisionState)
s := a.readInt()
decState := atn.states[s].(DecisionState)
atn.DecisionToState = append(atn.DecisionToState, decState)
decState.setDecision(i)
@ -383,25 +383,25 @@ func (a *ATNDeserializer) readDecisions(atn *ATN) {
func (a *ATNDeserializer) readLexerActions(atn *ATN) {
if atn.grammarType == ATNTypeLexer {
var count = a.readInt()
count := a.readInt()
atn.lexerActions = make([]LexerAction, count) // initIntArray(count, nil)
for i := 0; i < count; i++ {
var actionType = a.readInt()
var data1 = a.readInt()
actionType := a.readInt()
data1 := a.readInt()
if data1 == 0xFFFF {
data1 = -1
}
var data2 = a.readInt()
data2 := a.readInt()
if data2 == 0xFFFF {
data2 = -1
}
var lexerAction = a.lexerActionFactory(actionType, data1, data2)
lexerAction := a.lexerActionFactory(actionType, data1, data2)
atn.lexerActions[i] = lexerAction
}
@ -409,7 +409,7 @@ func (a *ATNDeserializer) readLexerActions(atn *ATN) {
}
func (a *ATNDeserializer) generateRuleBypassTransitions(atn *ATN) {
var count = len(atn.ruleToStartState)
count := len(atn.ruleToStartState)
for i := 0; i < count; i++ {
atn.ruleToTokenType[i] = atn.maxTokenType + i + 1
@ -421,12 +421,12 @@ func (a *ATNDeserializer) generateRuleBypassTransitions(atn *ATN) {
}
func (a *ATNDeserializer) generateRuleBypassTransition(atn *ATN, idx int) {
var bypassStart = NewBasicBlockStartState()
bypassStart := NewBasicBlockStartState()
bypassStart.ruleIndex = idx
atn.addState(bypassStart)
var bypassStop = NewBlockEndState()
bypassStop := NewBlockEndState()
bypassStop.ruleIndex = idx
atn.addState(bypassStop)
@ -445,7 +445,7 @@ func (a *ATNDeserializer) generateRuleBypassTransition(atn *ATN, idx int) {
endState = nil
for i := 0; i < len(atn.states); i++ {
var state = atn.states[i]
state := atn.states[i]
if a.stateIsEndStateFor(state, idx) != nil {
endState = state
@ -465,10 +465,10 @@ func (a *ATNDeserializer) generateRuleBypassTransition(atn *ATN, idx int) {
// All non-excluded transitions that currently target end state need to target
// blockEnd instead
for i := 0; i < len(atn.states); i++ {
var state = atn.states[i]
state := atn.states[i]
for j := 0; j < len(state.GetTransitions()); j++ {
var transition = state.GetTransitions()[j]
transition := state.GetTransitions()[j]
if transition == excludeTransition {
continue
@ -481,8 +481,8 @@ func (a *ATNDeserializer) generateRuleBypassTransition(atn *ATN, idx int) {
}
// All transitions leaving the rule start state need to leave blockStart instead
var ruleToStartState = atn.ruleToStartState[idx]
var count = len(ruleToStartState.GetTransitions())
ruleToStartState := atn.ruleToStartState[idx]
count := len(ruleToStartState.GetTransitions())
for count > 0 {
bypassStart.AddTransition(ruleToStartState.GetTransitions()[count-1], -1)
@ -493,7 +493,7 @@ func (a *ATNDeserializer) generateRuleBypassTransition(atn *ATN, idx int) {
atn.ruleToStartState[idx].AddTransition(NewEpsilonTransition(bypassStart, -1), -1)
bypassStop.AddTransition(NewEpsilonTransition(endState, -1), -1)
var MatchState = NewBasicState()
MatchState := NewBasicState()
atn.addState(MatchState)
MatchState.AddTransition(NewAtomTransition(bypassStop, atn.ruleToTokenType[idx]), -1)
@ -509,7 +509,7 @@ func (a *ATNDeserializer) stateIsEndStateFor(state ATNState, idx int) ATNState {
return nil
}
var maybeLoopEndState = state.GetTransitions()[len(state.GetTransitions())-1].getTarget()
maybeLoopEndState := state.GetTransitions()[len(state.GetTransitions())-1].getTarget()
if _, ok := maybeLoopEndState.(*LoopEndState); !ok {
return nil
@ -537,7 +537,7 @@ func (a *ATNDeserializer) markPrecedenceDecisions(atn *ATN) {
// decision for the closure block that determines whether a
// precedence rule should continue or complete.
if atn.ruleToStartState[state.GetRuleIndex()].isPrecedenceRule {
var maybeLoopEndState = state.GetTransitions()[len(state.GetTransitions())-1].getTarget()
maybeLoopEndState := state.GetTransitions()[len(state.GetTransitions())-1].getTarget()
if s3, ok := maybeLoopEndState.(*LoopEndState); ok {
var _, ok2 = maybeLoopEndState.GetTransitions()[0].getTarget().(*RuleStopState)
@ -557,7 +557,7 @@ func (a *ATNDeserializer) verifyATN(atn *ATN) {
// Verify assumptions
for i := 0; i < len(atn.states); i++ {
var state = atn.states[i]
state := atn.states[i]
if state == nil {
continue
@ -631,7 +631,7 @@ func (a *ATNDeserializer) checkCondition(condition bool, message string) {
}
func (a *ATNDeserializer) readInt() int {
var v = a.data[a.pos]
v := a.data[a.pos]
a.pos++
@ -647,7 +647,7 @@ func (a *ATNDeserializer) readInt() int {
//}
func createByteToHex() []string {
var bth = make([]string, 256)
bth := make([]string, 256)
for i := 0; i < 256; i++ {
bth[i] = strings.ToUpper(hex.EncodeToString([]byte{byte(i)}))
@ -659,10 +659,10 @@ func createByteToHex() []string {
var byteToHex = createByteToHex()
func (a *ATNDeserializer) readUUID() string {
var bb = make([]int, 16)
bb := make([]int, 16)
for i := 7; i >= 0; i-- {
var integer = a.readInt()
integer := a.readInt()
bb[(2*i)+1] = integer & 0xFF
bb[2*i] = (integer >> 8) & 0xFF
@ -679,7 +679,7 @@ func (a *ATNDeserializer) readUUID() string {
}
func (a *ATNDeserializer) edgeFactory(atn *ATN, typeIndex, src, trg, arg1, arg2, arg3 int, sets []*IntervalSet) Transition {
var target = atn.states[trg]
target := atn.states[trg]
switch typeIndex {
case TransitionEPSILON:

View File

@ -15,7 +15,7 @@ type BaseATNSimulator struct {
}
func NewBaseATNSimulator(atn *ATN, sharedContextCache *PredictionContextCache) *BaseATNSimulator {
var b = new(BaseATNSimulator)
b := new(BaseATNSimulator)
b.atn = atn
b.sharedContextCache = sharedContextCache
@ -28,7 +28,7 @@ func (b *BaseATNSimulator) getCachedContext(context PredictionContext) Predictio
return context
}
var visited = make(map[PredictionContext]PredictionContext)
visited := make(map[PredictionContext]PredictionContext)
return getCachedBasePredictionContext(context, b.sharedContextCache, visited)
}

View File

@ -154,7 +154,7 @@ type BasicState struct {
}
func NewBasicState() *BasicState {
var b = NewBaseATNState()
b := NewBaseATNState()
b.stateType = ATNStateBasic
@ -227,7 +227,7 @@ type BasicBlockStartState struct {
}
func NewBasicBlockStartState() *BasicBlockStartState {
var b = NewBlockStartState()
b := NewBlockStartState()
b.stateType = ATNStateBlockStart
@ -241,7 +241,7 @@ type BlockEndState struct {
}
func NewBlockEndState() *BlockEndState {
var b = NewBaseATNState()
b := NewBaseATNState()
b.stateType = ATNStateBlockEnd
@ -257,7 +257,7 @@ type RuleStopState struct {
}
func NewRuleStopState() *RuleStopState {
var b = NewBaseATNState()
b := NewBaseATNState()
b.stateType = ATNStateRuleStop
@ -271,7 +271,7 @@ type RuleStartState struct {
}
func NewRuleStartState() *RuleStartState {
var b = NewBaseATNState()
b := NewBaseATNState()
b.stateType = ATNStateRuleStart
@ -285,7 +285,7 @@ type PlusLoopbackState struct {
}
func NewPlusLoopbackState() *PlusLoopbackState {
var b = NewBaseDecisionState()
b := NewBaseDecisionState()
b.stateType = ATNStatePlusLoopBack
@ -302,7 +302,7 @@ type PlusBlockStartState struct {
}
func NewPlusBlockStartState() *PlusBlockStartState {
var b = NewBlockStartState()
b := NewBlockStartState()
b.stateType = ATNStatePlusBlockStart
@ -315,7 +315,7 @@ type StarBlockStartState struct {
}
func NewStarBlockStartState() *StarBlockStartState {
var b = NewBlockStartState()
b := NewBlockStartState()
b.stateType = ATNStateStarBlockStart
@ -327,7 +327,7 @@ type StarLoopbackState struct {
}
func NewStarLoopbackState() *StarLoopbackState {
var b = NewBaseATNState()
b := NewBaseATNState()
b.stateType = ATNStateStarLoopBack
@ -341,7 +341,7 @@ type StarLoopEntryState struct {
}
func NewStarLoopEntryState() *StarLoopEntryState {
var b = NewBaseDecisionState()
b := NewBaseDecisionState()
b.stateType = ATNStateStarLoopEntry
@ -356,7 +356,7 @@ type LoopEndState struct {
}
func NewLoopEndState() *LoopEndState {
var b = NewBaseATNState()
b := NewBaseATNState()
b.stateType = ATNStateLoopEnd
@ -369,7 +369,7 @@ type TokensStartState struct {
}
func NewTokensStartState() *TokensStartState {
var b = NewBaseDecisionState()
b := NewBaseDecisionState()
b.stateType = ATNStateTokenStart

View File

@ -36,7 +36,7 @@ func (c *CommonTokenFactory) Create(source *TokenSourceCharStreamPair, ttype int
fmt.Println("Token factory creating: " + text)
}
var t = NewCommonToken(source, ttype, channel, start, stop)
t := NewCommonToken(source, ttype, channel, start, stop)
t.line = line
t.column = column
@ -55,7 +55,7 @@ func (c *CommonTokenFactory) createThin(ttype int, text string) Token {
fmt.Println("Token factory creating: " + text)
}
var t = NewCommonToken(nil, ttype, TokenDefaultChannel, -1, -1)
t := NewCommonToken(nil, ttype, TokenDefaultChannel, -1, -1)
t.SetText(text)

View File

@ -76,7 +76,7 @@ func (c *CommonTokenStream) Get(index int) Token {
}
func (c *CommonTokenStream) Consume() {
var SkipEOFCheck = false
SkipEOFCheck := false
if c.index >= 0 {
if c.fetchedEOF {
@ -112,10 +112,10 @@ func (c *CommonTokenStream) Consume() {
// Sync makes sure index i in tokens has a token and returns true if a token is
// located at index i and otherwise false.
func (c *CommonTokenStream) Sync(i int) bool {
var n = i - len(c.tokens) + 1 // TODO: How many more elements do we need?
n := i - len(c.tokens) + 1 // TODO: How many more elements do we need?
if n > 0 {
var fetched = c.fetch(n)
fetched := c.fetch(n)
if PortDebug {
fmt.Println("Sync done")
@ -135,7 +135,7 @@ func (c *CommonTokenStream) fetch(n int) int {
}
for i := 0; i < n; i++ {
var t = c.tokenSource.NextToken()
t := c.tokenSource.NextToken()
if PortDebug {
fmt.Println("fetch loop")
@ -166,14 +166,14 @@ func (c *CommonTokenStream) GetTokens(start int, stop int, types *IntervalSet) [
c.lazyInit()
var subset = make([]Token, 0)
subset := make([]Token, 0)
if stop >= len(c.tokens) {
stop = len(c.tokens) - 1
}
for i := start; i < stop; i++ {
var t = c.tokens[i]
t := c.tokens[i]
if t.GetTokenType() == TokenEOF {
break
@ -223,7 +223,7 @@ func (c *CommonTokenStream) NextTokenOnChannel(i, channel int) int {
return -1
}
var token = c.tokens[i]
token := c.tokens[i]
for token.GetChannel() != c.channel {
if token.GetTokenType() == TokenEOF {
@ -259,8 +259,8 @@ func (c *CommonTokenStream) getHiddenTokensToRight(tokenIndex, channel int) []To
panic(strconv.Itoa(tokenIndex) + " not in 0.." + strconv.Itoa(len(c.tokens)-1))
}
var nextOnChannel = c.NextTokenOnChannel(tokenIndex+1, LexerDefaultTokenChannel)
var from = tokenIndex + 1
nextOnChannel := c.NextTokenOnChannel(tokenIndex+1, LexerDefaultTokenChannel)
from := tokenIndex + 1
// If no onchannel to the right, then nextOnChannel == -1, so set to to last token
var to int
@ -284,24 +284,24 @@ func (c *CommonTokenStream) getHiddenTokensToLeft(tokenIndex, channel int) []Tok
panic(strconv.Itoa(tokenIndex) + " not in 0.." + strconv.Itoa(len(c.tokens)-1))
}
var prevOnChannel = c.previousTokenOnChannel(tokenIndex-1, LexerDefaultTokenChannel)
prevOnChannel := c.previousTokenOnChannel(tokenIndex-1, LexerDefaultTokenChannel)
if prevOnChannel == tokenIndex-1 {
return nil
}
// If there are none on channel to the left and prevOnChannel == -1 then from = 0
var from = prevOnChannel + 1
var to = tokenIndex - 1
from := prevOnChannel + 1
to := tokenIndex - 1
return c.filterForChannel(from, to, channel)
}
func (c *CommonTokenStream) filterForChannel(left, right, channel int) []Token {
var hidden = make([]Token, 0)
hidden := make([]Token, 0)
for i := left; i < right+1; i++ {
var t = c.tokens[i]
t := c.tokens[i]
if channel == -1 {
if t.GetChannel() != LexerDefaultTokenChannel {
@ -355,8 +355,8 @@ func (c *CommonTokenStream) GetTextFromInterval(interval *Interval) string {
interval = NewInterval(0, len(c.tokens)-1)
}
var start = interval.start
var stop = interval.stop
start := interval.start
stop := interval.stop
if start < 0 || stop < 0 {
return ""
@ -366,10 +366,10 @@ func (c *CommonTokenStream) GetTextFromInterval(interval *Interval) string {
stop = len(c.tokens) - 1
}
var s = ""
s := ""
for i := start; i < stop+1; i++ {
var t = c.tokens[i]
t := c.tokens[i]
if t.GetTokenType() == TokenEOF {
break
@ -399,8 +399,8 @@ func (c *CommonTokenStream) LB(k int) Token {
return nil
}
var i = c.index
var n = 1
i := c.index
n := 1
// Find k good tokens looking backward
for n <= k {
@ -427,8 +427,8 @@ func (c *CommonTokenStream) LT(k int) Token {
return c.LB(-k)
}
var i = c.index
var n = 1 // We know tokens[n] is valid
i := c.index
n := 1 // We know tokens[n] is valid
// Find k good tokens
for n < k {
@ -450,7 +450,7 @@ func (c *CommonTokenStream) getNumberOfOnChannelTokens() int {
c.Fill()
for i := 0; i < len(c.tokens); i++ {
var t = c.tokens[i]
t := c.tokens[i]
if t.GetChannel() == c.channel {
n++

View File

@ -75,7 +75,7 @@ func (d *DFA) setPrecedenceDfa(precedenceDfa bool) {
d.states = make(map[string]*DFAState)
if precedenceDfa {
var precedenceState = NewDFAState(-1, NewBaseATNConfigSet(false))
precedenceState := NewDFAState(-1, NewBaseATNConfigSet(false))
precedenceState.edges = make([]*DFAState, 0)
precedenceState.isAcceptState = false
@ -101,7 +101,7 @@ func (d DFAStateList) Swap(i, j int) { d[i], d[j] = d[j], d[i] }
// sortedStates returns the states in d sorted by their state number.
func (d *DFA) sortedStates() []*DFAState {
var vs = make([]*DFAState, 0, len(d.states))
vs := make([]*DFAState, 0, len(d.states))
for _, v := range d.states {
vs = append(vs, v)

View File

@ -34,15 +34,15 @@ func (d *DFASerializer) String() string {
return ""
}
var buf = ""
var states = d.dfa.sortedStates()
buf := ""
states := d.dfa.sortedStates()
for _, s := range states {
if s.edges != nil {
var n = len(s.edges)
n := len(s.edges)
for j := 0; j < n; j++ {
var t = s.edges[j]
t := s.edges[j]
if t != nil && t.stateNumber != 0x7FFFFFFF {
buf += d.GetStateString(s)
@ -86,7 +86,7 @@ func (d *DFASerializer) GetStateString(s *DFAState) string {
b = "^"
}
var baseStateStr = a + "s" + strconv.Itoa(s.stateNumber) + b
baseStateStr := a + "s" + strconv.Itoa(s.stateNumber) + b
if s.isAcceptState {
if s.predicates != nil {
@ -116,17 +116,17 @@ func (l *LexerDFASerializer) String() string {
return ""
}
var buf = ""
var states = l.dfa.sortedStates()
buf := ""
states := l.dfa.sortedStates()
for i := 0; i < len(states); i++ {
var s = states[i]
s := states[i]
if s.edges != nil {
var n = len(s.edges)
n := len(s.edges)
for j := 0; j < n; j++ {
var t = s.edges[j]
t := s.edges[j]
if t != nil && t.stateNumber != 0x7FFFFFFF {
buf += l.GetStateString(s)

View File

@ -88,7 +88,7 @@ func NewDFAState(stateNumber int, configs ATNConfigSet) *DFAState {
// GetAltSet gets the set of all alts mentioned by all ATN configurations in d.
func (d *DFAState) GetAltSet() *Set {
var alts = NewSet(nil, nil)
alts := NewSet(nil, nil)
if d.configs != nil {
for _, c := range d.configs.GetItems() {

View File

@ -42,7 +42,7 @@ func (d *DiagnosticErrorListener) ReportAmbiguity(recognizer Parser, dfa *DFA, s
if d.exactOnly && !exact {
return
}
var msg = "reportAmbiguity d=" +
msg := "reportAmbiguity d=" +
d.getDecisionDescription(recognizer, dfa) +
": ambigAlts=" +
d.getConflictingAlts(ambigAlts, configs).String() +
@ -53,7 +53,7 @@ func (d *DiagnosticErrorListener) ReportAmbiguity(recognizer Parser, dfa *DFA, s
func (d *DiagnosticErrorListener) ReportAttemptingFullContext(recognizer Parser, dfa *DFA, startIndex, stopIndex int, conflictingAlts *BitSet, configs ATNConfigSet) {
var msg = "reportAttemptingFullContext d=" +
msg := "reportAttemptingFullContext d=" +
d.getDecisionDescription(recognizer, dfa) +
", input='" +
recognizer.GetTokenStream().GetTextFromInterval(NewInterval(startIndex, stopIndex)) + "'"
@ -61,7 +61,7 @@ func (d *DiagnosticErrorListener) ReportAttemptingFullContext(recognizer Parser,
}
func (d *DiagnosticErrorListener) ReportContextSensitivity(recognizer Parser, dfa *DFA, startIndex, stopIndex, prediction int, configs ATNConfigSet) {
var msg = "reportContextSensitivity d=" +
msg := "reportContextSensitivity d=" +
d.getDecisionDescription(recognizer, dfa) +
", input='" +
recognizer.GetTokenStream().GetTextFromInterval(NewInterval(startIndex, stopIndex)) + "'"
@ -69,14 +69,14 @@ func (d *DiagnosticErrorListener) ReportContextSensitivity(recognizer Parser, df
}
func (d *DiagnosticErrorListener) getDecisionDescription(recognizer Parser, dfa *DFA) string {
var decision = dfa.decision
var ruleIndex = dfa.atnStartState.GetRuleIndex()
decision := dfa.decision
ruleIndex := dfa.atnStartState.GetRuleIndex()
var ruleNames = recognizer.GetRuleNames()
ruleNames := recognizer.GetRuleNames()
if ruleIndex < 0 || ruleIndex >= len(ruleNames) {
return strconv.Itoa(decision)
}
var ruleName = ruleNames[ruleIndex]
ruleName := ruleNames[ruleIndex]
if ruleName == "" {
return strconv.Itoa(decision)
}
@ -98,7 +98,7 @@ func (d *DiagnosticErrorListener) getConflictingAlts(ReportedAlts *BitSet, set A
if ReportedAlts != nil {
return ReportedAlts
}
var result = NewBitSet()
result := NewBitSet()
for _, c := range set.GetItems() {
result.add(c.GetAlt())
}

View File

@ -154,7 +154,7 @@ func (d *DefaultErrorStrategy) Recover(recognizer Parser, e RecognitionException
d.lastErrorStates = NewIntervalSet()
}
d.lastErrorStates.addOne(recognizer.GetState())
var followSet = d.getErrorRecoverySet(recognizer)
followSet := d.getErrorRecoverySet(recognizer)
d.consumeUntil(recognizer, followSet)
}
@ -213,8 +213,8 @@ func (d *DefaultErrorStrategy) Sync(recognizer Parser) {
fmt.Println("STATE" + strconv.Itoa(recognizer.GetState()))
}
var s = recognizer.GetInterpreter().atn.states[recognizer.GetState()]
var la = recognizer.GetTokenStream().LA(1)
s := recognizer.GetInterpreter().atn.states[recognizer.GetState()]
la := recognizer.GetTokenStream().LA(1)
if PortDebug {
fmt.Println("LA" + strconv.Itoa(la))
@ -249,9 +249,9 @@ func (d *DefaultErrorStrategy) Sync(recognizer Parser) {
panic(NewInputMisMatchException(recognizer))
case ATNStatePlusLoopBack, ATNStateStarLoopBack:
d.ReportUnwantedToken(recognizer)
var expecting = NewIntervalSet()
expecting := NewIntervalSet()
expecting.addSet(recognizer.GetExpectedTokens())
var whatFollowsLoopIterationOrRule = expecting.addSet(d.getErrorRecoverySet(recognizer))
whatFollowsLoopIterationOrRule := expecting.addSet(d.getErrorRecoverySet(recognizer))
d.consumeUntil(recognizer, whatFollowsLoopIterationOrRule)
default:
// do nothing if we can't identify the exact kind of ATN state
@ -267,7 +267,7 @@ func (d *DefaultErrorStrategy) Sync(recognizer Parser) {
// @param e the recognition exception
//
func (d *DefaultErrorStrategy) ReportNoViableAlternative(recognizer Parser, e *NoViableAltException) {
var tokens = recognizer.GetTokenStream()
tokens := recognizer.GetTokenStream()
var input string
if tokens != nil {
if e.startToken.GetTokenType() == TokenEOF {
@ -278,7 +278,7 @@ func (d *DefaultErrorStrategy) ReportNoViableAlternative(recognizer Parser, e *N
} else {
input = "<unknown input>"
}
var msg = "no viable alternative at input " + d.escapeWSAndQuote(input)
msg := "no viable alternative at input " + d.escapeWSAndQuote(input)
recognizer.NotifyErrorListeners(msg, e.offendingToken, e)
}
@ -292,7 +292,7 @@ func (d *DefaultErrorStrategy) ReportNoViableAlternative(recognizer Parser, e *N
// @param e the recognition exception
//
func (this *DefaultErrorStrategy) ReportInputMisMatch(recognizer Parser, e *InputMisMatchException) {
var msg = "mismatched input " + this.GetTokenErrorDisplay(e.offendingToken) +
msg := "mismatched input " + this.GetTokenErrorDisplay(e.offendingToken) +
" expecting " + e.getExpectedTokens().StringVerbose(recognizer.GetLiteralNames(), recognizer.GetSymbolicNames(), false)
recognizer.NotifyErrorListeners(msg, e.offendingToken, e)
}
@ -307,8 +307,8 @@ func (this *DefaultErrorStrategy) ReportInputMisMatch(recognizer Parser, e *Inpu
// @param e the recognition exception
//
func (d *DefaultErrorStrategy) ReportFailedPredicate(recognizer Parser, e *FailedPredicateException) {
var ruleName = recognizer.GetRuleNames()[recognizer.GetParserRuleContext().GetRuleIndex()]
var msg = "rule " + ruleName + " " + e.message
ruleName := recognizer.GetRuleNames()[recognizer.GetParserRuleContext().GetRuleIndex()]
msg := "rule " + ruleName + " " + e.message
recognizer.NotifyErrorListeners(msg, e.offendingToken, e)
}
@ -334,10 +334,10 @@ func (d *DefaultErrorStrategy) ReportUnwantedToken(recognizer Parser) {
return
}
d.beginErrorCondition(recognizer)
var t = recognizer.GetCurrentToken()
var tokenName = d.GetTokenErrorDisplay(t)
var expecting = d.GetExpectedTokens(recognizer)
var msg = "extraneous input " + tokenName + " expecting " +
t := recognizer.GetCurrentToken()
tokenName := d.GetTokenErrorDisplay(t)
expecting := d.GetExpectedTokens(recognizer)
msg := "extraneous input " + tokenName + " expecting " +
expecting.StringVerbose(recognizer.GetLiteralNames(), recognizer.GetSymbolicNames(), false)
recognizer.NotifyErrorListeners(msg, t, nil)
}
@ -363,9 +363,9 @@ func (d *DefaultErrorStrategy) ReportMissingToken(recognizer Parser) {
return
}
d.beginErrorCondition(recognizer)
var t = recognizer.GetCurrentToken()
var expecting = d.GetExpectedTokens(recognizer)
var msg = "missing " + expecting.StringVerbose(recognizer.GetLiteralNames(), recognizer.GetSymbolicNames(), false) +
t := recognizer.GetCurrentToken()
expecting := d.GetExpectedTokens(recognizer)
msg := "missing " + expecting.StringVerbose(recognizer.GetLiteralNames(), recognizer.GetSymbolicNames(), false) +
" at " + d.GetTokenErrorDisplay(t)
recognizer.NotifyErrorListeners(msg, t, nil)
}
@ -421,7 +421,7 @@ func (d *DefaultErrorStrategy) ReportMissingToken(recognizer Parser) {
//
func (d *DefaultErrorStrategy) RecoverInline(recognizer Parser) Token {
// SINGLE TOKEN DELETION
var MatchedSymbol = d.SingleTokenDeletion(recognizer)
MatchedSymbol := d.SingleTokenDeletion(recognizer)
if MatchedSymbol != nil {
// we have deleted the extra token.
// now, move past ttype token as if all were ok
@ -454,14 +454,14 @@ func (d *DefaultErrorStrategy) RecoverInline(recognizer Parser) Token {
// strategy for the current mismatched input, otherwise {@code false}
//
func (d *DefaultErrorStrategy) SingleTokenInsertion(recognizer Parser) bool {
var currentSymbolType = recognizer.GetTokenStream().LA(1)
currentSymbolType := recognizer.GetTokenStream().LA(1)
// if current token is consistent with what could come after current
// ATN state, then we know we're missing a token error recovery
// is free to conjure up and insert the missing token
var atn = recognizer.GetInterpreter().atn
var currentState = atn.states[recognizer.GetState()]
var next = currentState.GetTransitions()[0].getTarget()
var expectingAtLL2 = atn.NextTokens(next, recognizer.GetParserRuleContext())
atn := recognizer.GetInterpreter().atn
currentState := atn.states[recognizer.GetState()]
next := currentState.GetTransitions()[0].getTarget()
expectingAtLL2 := atn.NextTokens(next, recognizer.GetParserRuleContext())
if expectingAtLL2.contains(currentSymbolType) {
d.ReportMissingToken(recognizer)
return true
@ -489,8 +489,8 @@ func (d *DefaultErrorStrategy) SingleTokenInsertion(recognizer Parser) bool {
// {@code nil}
//
func (d *DefaultErrorStrategy) SingleTokenDeletion(recognizer Parser) Token {
var NextTokenType = recognizer.GetTokenStream().LA(2)
var expecting = d.GetExpectedTokens(recognizer)
NextTokenType := recognizer.GetTokenStream().LA(2)
expecting := d.GetExpectedTokens(recognizer)
if expecting.contains(NextTokenType) {
d.ReportUnwantedToken(recognizer)
// print("recoverFromMisMatchedToken deleting " \
@ -499,7 +499,7 @@ func (d *DefaultErrorStrategy) SingleTokenDeletion(recognizer Parser) Token {
// + " is what we want", file=sys.stderr)
recognizer.Consume() // simply delete extra token
// we want to return the token we're actually Matching
var MatchedSymbol = recognizer.GetCurrentToken()
MatchedSymbol := recognizer.GetCurrentToken()
d.ReportMatch(recognizer) // we know current token is correct
return MatchedSymbol
}
@ -527,9 +527,9 @@ func (d *DefaultErrorStrategy) SingleTokenDeletion(recognizer Parser) Token {
// override d method to create the appropriate tokens.
//
func (d *DefaultErrorStrategy) GetMissingSymbol(recognizer Parser) Token {
var currentSymbol = recognizer.GetCurrentToken()
var expecting = d.GetExpectedTokens(recognizer)
var expectedTokenType = expecting.first()
currentSymbol := recognizer.GetCurrentToken()
expecting := d.GetExpectedTokens(recognizer)
expectedTokenType := expecting.first()
var tokenText string
if expectedTokenType == TokenEOF {
@ -542,8 +542,8 @@ func (d *DefaultErrorStrategy) GetMissingSymbol(recognizer Parser) Token {
tokenText = "<missing undefined>" // TODO matches the JS impl
}
}
var current = currentSymbol
var lookback = recognizer.GetTokenStream().LT(-1)
current := currentSymbol
lookback := recognizer.GetTokenStream().LT(-1)
if current.GetTokenType() == TokenEOF && lookback != nil {
current = lookback
}
@ -572,7 +572,7 @@ func (d *DefaultErrorStrategy) GetTokenErrorDisplay(t Token) string {
if t == nil {
return "<no token>"
}
var s = t.GetText()
s := t.GetText()
if s == "" {
if t.GetTokenType() == TokenEOF {
s = "<EOF>"
@ -683,14 +683,14 @@ func (d *DefaultErrorStrategy) escapeWSAndQuote(s string) string {
// at run-time upon error to avoid overhead during parsing.
//
func (d *DefaultErrorStrategy) getErrorRecoverySet(recognizer Parser) *IntervalSet {
var atn = recognizer.GetInterpreter().atn
var ctx = recognizer.GetParserRuleContext()
var recoverSet = NewIntervalSet()
atn := recognizer.GetInterpreter().atn
ctx := recognizer.GetParserRuleContext()
recoverSet := NewIntervalSet()
for ctx != nil && ctx.GetInvokingState() >= 0 {
// compute what follows who invoked us
var invokingState = atn.states[ctx.GetInvokingState()]
var rt = invokingState.GetTransitions()[0]
var follow = atn.NextTokens(rt.(*RuleTransition).followState, nil)
invokingState := atn.states[ctx.GetInvokingState()]
rt := invokingState.GetTransitions()[0]
follow := atn.NextTokens(rt.(*RuleTransition).followState, nil)
recoverSet.addSet(follow)
ctx = ctx.GetParent().(ParserRuleContext)
}
@ -700,7 +700,7 @@ func (d *DefaultErrorStrategy) getErrorRecoverySet(recognizer Parser) *IntervalS
// Consume tokens until one Matches the given token set.//
func (d *DefaultErrorStrategy) consumeUntil(recognizer Parser, set *IntervalSet) {
var ttype = recognizer.GetTokenStream().LA(1)
ttype := recognizer.GetTokenStream().LA(1)
for ttype != TokenEOF && !set.contains(ttype) {
recognizer.Consume()
ttype = recognizer.GetTokenStream().LA(1)
@ -756,7 +756,7 @@ func NewBailErrorStrategy() *BailErrorStrategy {
// original {@link RecognitionException}.
//
func (b *BailErrorStrategy) Recover(recognizer Parser, e RecognitionException) {
var context = recognizer.GetParserRuleContext()
context := recognizer.GetParserRuleContext()
for context != nil {
context.SetException(e)
context = context.GetParent().(ParserRuleContext)

View File

@ -29,7 +29,7 @@ func NewBaseRecognitionException(message string, recognizer Recognizer, input In
// if (!!Error.captureStackTrace) {
// Error.captureStackTrace(this, RecognitionException)
// } else {
// var stack = NewError().stack
// stack := NewError().stack
// }
// TODO may be able to use - "runtime" func Stack(buf []byte, all bool) int
@ -112,7 +112,7 @@ func NewLexerNoViableAltException(lexer Lexer, input CharStream, startIndex int,
}
func (l *LexerNoViableAltException) String() string {
var symbol = ""
symbol := ""
if l.startIndex >= 0 && l.startIndex < l.input.Size() {
symbol = l.input.(CharStream).GetTextFromInterval(NewInterval(l.startIndex, l.startIndex))
}
@ -204,8 +204,8 @@ func NewFailedPredicateException(recognizer Parser, predicate string, message st
f.BaseRecognitionException = NewBaseRecognitionException(f.formatMessage(predicate, message), recognizer, recognizer.GetInputStream(), recognizer.GetParserRuleContext())
var s = recognizer.GetInterpreter().atn.states[recognizer.GetState()]
var trans = s.GetTransitions()[0]
s := recognizer.GetInterpreter().atn.states[recognizer.GetState()]
trans := s.GetTransitions()[0]
if trans2, ok := trans.(*PredicateTransition); ok {
f.ruleIndex = trans2.ruleIndex
f.predicateIndex = trans2.predIndex

View File

@ -41,7 +41,7 @@ func (is *InputStream) LA(offset int) int {
if offset < 0 {
offset++ // e.g., translate LA(-1) to use offset=0
}
var pos = is.index + offset - 1
pos := is.index + offset - 1
if pos < 0 || pos >= is.size { // invalid
return TokenEOF

View File

@ -77,7 +77,7 @@ func (i *IntervalSet) addInterval(v *Interval) {
} else {
// find insert pos
for k := 0; k < len(i.intervals); k++ {
var interval = i.intervals[k]
interval := i.intervals[k]
// ditinct range -> insert
if v.stop < interval.start {
// i.intervals = splice(k, 0, v)
@ -106,7 +106,7 @@ func (i *IntervalSet) addSet(other *IntervalSet) *IntervalSet {
fmt.Println(len(other.intervals))
}
for k := 0; k < len(other.intervals); k++ {
var i2 = other.intervals[k]
i2 := other.intervals[k]
i.addInterval(NewInterval(i2.start, i2.stop))
}
}
@ -116,8 +116,8 @@ func (i *IntervalSet) addSet(other *IntervalSet) *IntervalSet {
func (i *IntervalSet) reduce(k int) {
// only need to reduce if k is not the last
if k < len(i.intervals)-1 {
var l = i.intervals[k]
var r = i.intervals[k+1]
l := i.intervals[k]
r := i.intervals[k+1]
// if r contained in l
if l.stop >= r.stop {
i.intervals = i.intervals[0 : len(i.intervals)-1] // pop(k + 1)
@ -130,7 +130,7 @@ func (i *IntervalSet) reduce(k int) {
}
func (i *IntervalSet) complement(start int, stop int) *IntervalSet {
var result = NewIntervalSet()
result := NewIntervalSet()
result.addInterval(NewInterval(start, stop+1))
for j := 0; j < len(i.intervals); j++ {
result.removeRange(i.intervals[j])
@ -166,13 +166,13 @@ func (i *IntervalSet) removeRange(v *Interval) {
} else if i.intervals != nil {
k := 0
for n := 0; n < len(i.intervals); n++ {
var ni = i.intervals[k]
ni := i.intervals[k]
// intervals are ordered
if v.stop <= ni.start {
return
} else if v.start > ni.start && v.stop < ni.stop {
i.intervals[k] = NewInterval(ni.start, v.start)
var x = NewInterval(v.stop, ni.stop)
x := NewInterval(v.stop, ni.stop)
// i.intervals.splice(k, 0, x)
i.intervals = append(i.intervals[0:k], append([]*Interval{x}, i.intervals[k:]...)...)
return
@ -193,12 +193,12 @@ func (i *IntervalSet) removeRange(v *Interval) {
func (i *IntervalSet) removeOne(v int) {
if i.intervals != nil {
for k := 0; k < len(i.intervals); k++ {
var ki = i.intervals[k]
ki := i.intervals[k]
// intervals i ordered
if v < ki.start {
return
} else if v == ki.start && v == ki.stop-1 {
// i.intervals.splice(k, 1);
// i.intervals.splice(k, 1)
i.intervals = append(i.intervals[0:k], i.intervals[k+1:]...)
return
} else if v == ki.start {
@ -208,9 +208,9 @@ func (i *IntervalSet) removeOne(v int) {
i.intervals[k] = NewInterval(ki.start, ki.stop-1)
return
} else if v < ki.stop-1 {
var x = NewInterval(ki.start, v)
x := NewInterval(ki.start, v)
ki.start = v + 1
// i.intervals.splice(k, 0, x);
// i.intervals.splice(k, 0, x)
i.intervals = append(i.intervals[0:k], append([]*Interval{x}, i.intervals[k:]...)...)
return
}
@ -236,10 +236,10 @@ func (i *IntervalSet) StringVerbose(literalNames []string, symbolicNames []strin
}
func (i *IntervalSet) toCharString() string {
var names = make([]string, len(i.intervals))
names := make([]string, len(i.intervals))
for j := 0; j < len(i.intervals); j++ {
var v = i.intervals[j]
v := i.intervals[j]
if v.stop == v.start+1 {
if v.start == TokenEOF {
names = append(names, "<EOF>")
@ -259,9 +259,9 @@ func (i *IntervalSet) toCharString() string {
func (i *IntervalSet) toIndexString() string {
var names = make([]string, 0)
names := make([]string, 0)
for j := 0; j < len(i.intervals); j++ {
var v = i.intervals[j]
v := i.intervals[j]
if v.stop == v.start+1 {
if v.start == TokenEOF {
names = append(names, "<EOF>")
@ -280,7 +280,7 @@ func (i *IntervalSet) toIndexString() string {
}
func (i *IntervalSet) toTokenString(literalNames []string, symbolicNames []string) string {
var names = make([]string, 0)
names := make([]string, 0)
for _, v := range i.intervals {
for j := v.start; j < v.stop; j++ {
names = append(names, i.elementName(literalNames, symbolicNames, j))

View File

@ -181,7 +181,7 @@ func (b *BaseLexer) NextToken() Token {
panic("NextToken requires a non-nil input stream.")
}
var tokenStartMarker = b.input.Mark()
tokenStartMarker := b.input.Mark()
// previously in finally block
defer func() {
@ -325,7 +325,7 @@ func (b *BaseLexer) Emit() Token {
if PortDebug {
fmt.Println("emit base lexer")
}
var t = b.factory.Create(b.tokenFactorySourcePair, b.thetype, b.text, b.channel, b.TokenStartCharIndex, b.GetCharIndex()-1, b.TokenStartLine, b.TokenStartColumn)
t := b.factory.Create(b.tokenFactorySourcePair, b.thetype, b.text, b.channel, b.TokenStartCharIndex, b.GetCharIndex()-1, b.TokenStartLine, b.TokenStartColumn)
b.EmitToken(t)
return t
}
@ -336,7 +336,7 @@ func (b *BaseLexer) EmitEOF() Token {
if PortDebug {
fmt.Println("emitEOF")
}
var eof = b.factory.Create(b.tokenFactorySourcePair, TokenEOF, "", TokenDefaultChannel, b.input.Index(), b.input.Index()-1, lpos, cpos)
eof := b.factory.Create(b.tokenFactorySourcePair, TokenEOF, "", TokenDefaultChannel, b.input.Index(), b.input.Index()-1, lpos, cpos)
b.EmitToken(eof)
return eof
}
@ -387,9 +387,9 @@ func (b *BaseLexer) getAllTokens() []Token {
if PortDebug {
fmt.Println("getAllTokens")
}
var vl = b.Virt
var tokens = make([]Token, 0)
var t = vl.NextToken()
vl := b.Virt
tokens := make([]Token, 0)
t := vl.NextToken()
for t.GetTokenType() != TokenEOF {
tokens = append(tokens, t)
if PortDebug {
@ -401,11 +401,11 @@ func (b *BaseLexer) getAllTokens() []Token {
}
func (b *BaseLexer) notifyListeners(e RecognitionException) {
var start = b.TokenStartCharIndex
var stop = b.input.Index()
var text = b.input.GetTextFromInterval(NewInterval(start, stop))
var msg = "token recognition error at: '" + text + "'"
var listener = b.GetErrorListenerDispatch()
start := b.TokenStartCharIndex
stop := b.input.Index()
text := b.input.GetTextFromInterval(NewInterval(start, stop))
msg := "token recognition error at: '" + text + "'"
listener := b.GetErrorListenerDispatch()
listener.SyntaxError(b, nil, b.TokenStartLine, b.TokenStartColumn, msg, e)
}

View File

@ -57,9 +57,9 @@ func LexerActionExecutorappend(lexerActionExecutor *LexerActionExecutor, lexerAc
return NewLexerActionExecutor([]LexerAction{lexerAction})
}
var lexerActions = append(lexerActionExecutor.lexerActions, lexerAction)
lexerActions := append(lexerActionExecutor.lexerActions, lexerAction)
// var lexerActions = lexerActionExecutor.lexerActions.concat([ lexerAction ])
// lexerActions := lexerActionExecutor.lexerActions.concat([ lexerAction ])
return NewLexerActionExecutor(lexerActions)
}
@ -137,8 +137,8 @@ func (l *LexerActionExecutor) execute(lexer Lexer, input CharStream, startIndex
fmt.Println("execute")
fmt.Println("len(lexerActions)", len(l.lexerActions))
}
var requiresSeek = false
var stopIndex = input.Index()
requiresSeek := false
stopIndex := input.Index()
defer func() {
if requiresSeek {
@ -147,12 +147,12 @@ func (l *LexerActionExecutor) execute(lexer Lexer, input CharStream, startIndex
}()
for i := 0; i < len(l.lexerActions); i++ {
var lexerAction = l.lexerActions[i]
lexerAction := l.lexerActions[i]
if la, ok := lexerAction.(*LexerIndexedCustomAction); ok {
if PortDebug {
fmt.Printf("LexerIndexedCustomAction")
}
var offset = la.offset
offset := la.offset
input.Seek(startIndex + offset)
lexerAction = la.lexerAction
requiresSeek = (startIndex + offset) != stopIndex

View File

@ -5,6 +5,16 @@ import (
"strconv"
)
var (
LexerATNSimulatorDebug = false
LexerATNSimulatorDFADebug = false
LexerATNSimulatorMinDFAEdge = 0
LexerATNSimulatorMaxDFAEdge = 127 // forces unicode to stay in ATN
LexerATNSimulatorMatchCalls = 0
)
type ILexerATNSimulator interface {
IATNSimulator
@ -55,14 +65,6 @@ func NewLexerATNSimulator(recog Lexer, atn *ATN, decisionToDFA []*DFA, sharedCon
return l
}
var LexerATNSimulatorDebug = false
var LexerATNSimulatorDFADebug = false
var LexerATNSimulatorMinDFAEdge = 0
var LexerATNSimulatorMaxDFAEdge = 127 // forces unicode to stay in ATN
var LexerATNSimulatorMatchCalls = 0
func (l *LexerATNSimulator) copyState(simulator *LexerATNSimulator) {
l.CharPositionInLine = simulator.CharPositionInLine
l.Line = simulator.Line
@ -77,7 +79,7 @@ func (l *LexerATNSimulator) Match(input CharStream, mode int) int {
l.MatchCalls++
l.mode = mode
var mark = input.Mark()
mark := input.Mark()
defer func() {
if PortDebug {
@ -89,7 +91,7 @@ func (l *LexerATNSimulator) Match(input CharStream, mode int) int {
l.startIndex = input.Index()
l.prevAccept.reset()
var dfa = l.decisionToDFA[mode]
dfa := l.decisionToDFA[mode]
if dfa.s0 == nil {
if PortDebug {
@ -115,23 +117,23 @@ func (l *LexerATNSimulator) reset() {
}
func (l *LexerATNSimulator) MatchATN(input CharStream) int {
var startState = l.atn.modeToStartState[l.mode]
startState := l.atn.modeToStartState[l.mode]
if LexerATNSimulatorDebug {
fmt.Println("MatchATN mode " + strconv.Itoa(l.mode) + " start: " + startState.String())
}
var oldMode = l.mode
var s0Closure = l.computeStartState(input, startState)
var suppressEdge = s0Closure.hasSemanticContext
oldMode := l.mode
s0Closure := l.computeStartState(input, startState)
suppressEdge := s0Closure.hasSemanticContext
s0Closure.hasSemanticContext = false
var next = l.addDFAState(s0Closure)
next := l.addDFAState(s0Closure)
if !suppressEdge {
l.decisionToDFA[l.mode].s0 = next
}
var predict = l.execATN(input, next)
predict := l.execATN(input, next)
if LexerATNSimulatorDebug {
fmt.Println("DFA after MatchATN: " + l.decisionToDFA[oldMode].ToLexerString())
@ -148,8 +150,8 @@ func (l *LexerATNSimulator) execATN(input CharStream, ds0 *DFAState) int {
// allow zero-length tokens
l.captureSimState(l.prevAccept, input, ds0)
}
var t = input.LA(1)
var s = ds0 // s is current/from DFA state
t := input.LA(1)
s := ds0 // s is current/from DFA state
if PortDebug {
fs,ok := input.(*FileStream)
@ -182,7 +184,7 @@ func (l *LexerATNSimulator) execATN(input CharStream, ds0 *DFAState) int {
// This optimization makes a lot of sense for loops within DFA.
// A character will take us back to an existing DFA state
// that already has lots of edges out of it. e.g., .* in comments.
var target = l.getExistingTargetState(s, t)
target := l.getExistingTargetState(s, t)
if PortDebug {
fmt.Println(t)
fmt.Println(target != nil)
@ -234,7 +236,7 @@ func (l *LexerATNSimulator) getExistingTargetState(s *DFAState, t int) *DFAState
return nil
}
var target = s.edges[t-LexerATNSimulatorMinDFAEdge]
target := s.edges[t-LexerATNSimulatorMinDFAEdge]
if PortDebug {
fmt.Println("len edges", len(s.edges), t, t-LexerATNSimulatorMinDFAEdge)
}
@ -256,7 +258,7 @@ func (l *LexerATNSimulator) getExistingTargetState(s *DFAState, t int) *DFAState
// {@code t}. If {@code t} does not lead to a valid DFA state, l method
// returns {@link //ERROR}.
func (l *LexerATNSimulator) computeTargetState(input CharStream, s *DFAState, t int) *DFAState {
var reach = NewOrderedATNConfigSet()
reach := NewOrderedATNConfigSet()
// if we don't find an existing DFA state
// Fill reach starting from closure, following t transitions
@ -280,7 +282,7 @@ func (l *LexerATNSimulator) failOrAccept(prevAccept *SimState, input CharStream,
if PortDebug {
fmt.Println(prevAccept.dfaState)
}
var lexerActionExecutor = prevAccept.dfaState.lexerActionExecutor
lexerActionExecutor := prevAccept.dfaState.lexerActionExecutor
l.accept(input, lexerActionExecutor, l.startIndex, prevAccept.index, prevAccept.line, prevAccept.column)
if PortDebug {
@ -303,7 +305,7 @@ func (l *LexerATNSimulator) failOrAccept(prevAccept *SimState, input CharStream,
func (l *LexerATNSimulator) getReachableConfigSet(input CharStream, closure ATNConfigSet, reach ATNConfigSet, t int) {
// l is used to Skip processing for configs which have a lower priority
// than a config that already reached an accept state for the same rule
var SkipAlt = ATNInvalidAltNumber
SkipAlt := ATNInvalidAltNumber
if PortDebug {
fmt.Println("getReachableConfigSet")
@ -311,7 +313,7 @@ func (l *LexerATNSimulator) getReachableConfigSet(input CharStream, closure ATNC
}
for _, cfg := range closure.GetItems() {
var currentAltReachedAcceptState = (cfg.GetAlt() == SkipAlt)
currentAltReachedAcceptState := (cfg.GetAlt() == SkipAlt)
if currentAltReachedAcceptState && cfg.(*LexerATNConfig).passedThroughNonGreedyDecision {
continue
}
@ -322,14 +324,14 @@ func (l *LexerATNSimulator) getReachableConfigSet(input CharStream, closure ATNC
}
for _, trans := range cfg.GetState().GetTransitions() {
var target = l.getReachableTarget(trans, t)
target := l.getReachableTarget(trans, t)
if target != nil {
var lexerActionExecutor = cfg.(*LexerATNConfig).lexerActionExecutor
lexerActionExecutor := cfg.(*LexerATNConfig).lexerActionExecutor
if lexerActionExecutor != nil {
lexerActionExecutor = lexerActionExecutor.fixOffsetBeforeMatch(input.Index() - l.startIndex)
}
var treatEOFAsEpsilon = (t == TokenEOF)
var config = NewLexerATNConfig3(cfg.(*LexerATNConfig), target, lexerActionExecutor)
treatEOFAsEpsilon := (t == TokenEOF)
config := NewLexerATNConfig3(cfg.(*LexerATNConfig), target, lexerActionExecutor)
if l.closure(input, config, reach,
currentAltReachedAcceptState, true, treatEOFAsEpsilon) {
// any remaining configs for l alt have a lower priority
@ -368,10 +370,10 @@ func (l *LexerATNSimulator) computeStartState(input CharStream, p ATNState) *Ord
fmt.Println("Num transitions" + strconv.Itoa(len(p.GetTransitions())))
}
var configs = NewOrderedATNConfigSet()
configs := NewOrderedATNConfigSet()
for i := 0; i < len(p.GetTransitions()); i++ {
var target = p.GetTransitions()[i].getTarget()
var cfg = NewLexerATNConfig6(target, i+1, BasePredictionContextEMPTY)
target := p.GetTransitions()[i].getTarget()
cfg := NewLexerATNConfig6(target, i+1, BasePredictionContextEMPTY)
l.closure(input, cfg, configs, false, false, false)
}
@ -416,8 +418,8 @@ func (l *LexerATNSimulator) closure(input CharStream, config *LexerATNConfig, co
if config.context != nil && !config.context.isEmpty() {
for i := 0; i < config.context.length(); i++ {
if config.context.getReturnState(i) != BasePredictionContextEmptyReturnState {
var newContext = config.context.GetParent(i) // "pop" return state
var returnState = l.atn.states[config.context.getReturnState(i)]
newContext := config.context.GetParent(i) // "pop" return state
returnState := l.atn.states[config.context.getReturnState(i)]
cfg := NewLexerATNConfig2(config, returnState, newContext)
currentAltReachedAcceptState = l.closure(input, cfg, configs, currentAltReachedAcceptState, speculative, treatEOFAsEpsilon)
}
@ -432,7 +434,7 @@ func (l *LexerATNSimulator) closure(input CharStream, config *LexerATNConfig, co
}
}
for j := 0; j < len(config.state.GetTransitions()); j++ {
var trans = config.state.GetTransitions()[j]
trans := config.state.GetTransitions()[j]
cfg := l.getEpsilonTarget(input, config, trans, configs, speculative, treatEOFAsEpsilon)
if cfg != nil {
currentAltReachedAcceptState = l.closure(input, cfg, configs,
@ -451,7 +453,7 @@ func (l *LexerATNSimulator) getEpsilonTarget(input CharStream, config *LexerATNC
if trans.getSerializationType() == TransitionRULE {
rt := trans.(*RuleTransition)
var newContext = SingletonBasePredictionContextCreate(config.context, rt.followState.GetStateNumber())
newContext := SingletonBasePredictionContextCreate(config.context, rt.followState.GetStateNumber())
cfg = NewLexerATNConfig2(config, trans.getTarget(), newContext)
} else if trans.getSerializationType() == TransitionPRECEDENCE {
@ -498,7 +500,7 @@ func (l *LexerATNSimulator) getEpsilonTarget(input CharStream, config *LexerATNC
// getEpsilonTarget to return two configurations, so
// additional modifications are needed before we can support
// the split operation.
var lexerActionExecutor = LexerActionExecutorappend(config.lexerActionExecutor, l.atn.lexerActions[trans.(*ActionTransition).actionIndex])
lexerActionExecutor := LexerActionExecutorappend(config.lexerActionExecutor, l.atn.lexerActions[trans.(*ActionTransition).actionIndex])
cfg = NewLexerATNConfig3(config, trans.getTarget(), lexerActionExecutor)
} else {
// ignore actions in referenced rules
@ -546,10 +548,10 @@ func (l *LexerATNSimulator) evaluatePredicate(input CharStream, ruleIndex, predI
if !speculative {
return l.recog.Sempred(nil, ruleIndex, predIndex)
}
var savedcolumn = l.CharPositionInLine
var savedLine = l.Line
var index = input.Index()
var marker = input.Mark()
savedcolumn := l.CharPositionInLine
savedLine := l.Line
index := input.Index()
marker := input.Mark()
defer func() {
l.CharPositionInLine = savedcolumn
@ -585,7 +587,7 @@ func (l *LexerATNSimulator) addDFAEdge(from *DFAState, tk int, to *DFAState, cfg
// If that gets us to a previously created (but dangling) DFA
// state, we can continue in pure DFA mode from there.
// /
var suppressEdge = cfgs.HasSemanticContext()
suppressEdge := cfgs.HasSemanticContext()
cfgs.SetHasSemanticContext(false)
to = l.addDFAState(cfgs)
@ -617,7 +619,7 @@ func (l *LexerATNSimulator) addDFAEdge(from *DFAState, tk int, to *DFAState, cfg
// traversing the DFA, we will know which rule to accept.
func (l *LexerATNSimulator) addDFAState(configs ATNConfigSet) *DFAState {
var proposed = NewDFAState(-1, configs)
proposed := NewDFAState(-1, configs)
var firstConfigWithRuleStopState ATNConfig
for _, cfg := range configs.GetItems() {
@ -634,13 +636,13 @@ func (l *LexerATNSimulator) addDFAState(configs ATNConfigSet) *DFAState {
proposed.lexerActionExecutor = firstConfigWithRuleStopState.(*LexerATNConfig).lexerActionExecutor
proposed.setPrediction(l.atn.ruleToTokenType[firstConfigWithRuleStopState.GetState().GetRuleIndex()])
}
var hash = proposed.Hash()
var dfa = l.decisionToDFA[l.mode]
var existing = dfa.GetStates()[hash]
hash := proposed.Hash()
dfa := l.decisionToDFA[l.mode]
existing := dfa.GetStates()[hash]
if existing != nil {
return existing
}
var newState = proposed
newState := proposed
newState.stateNumber = len(dfa.GetStates())
configs.SetReadOnly(true)
newState.configs = configs
@ -662,7 +664,7 @@ func (l *LexerATNSimulator) Consume(input CharStream) {
if PortDebug {
fmt.Println("consume", input.Index(), input.Size())
}
var curChar = input.LA(1)
curChar := input.LA(1)
if curChar == int('\n') {
l.Line++
l.CharPositionInLine = 0

View File

@ -34,12 +34,12 @@ func (la *LL1Analyzer) getDecisionLookahead(s ATNState) []*IntervalSet {
if s == nil {
return nil
}
var count = len(s.GetTransitions())
var look = make([]*IntervalSet, count)
count := len(s.GetTransitions())
look := make([]*IntervalSet, count)
for alt := 0; alt < count; alt++ {
look[alt] = NewIntervalSet()
var lookBusy = NewSet(nil, nil)
var seeThruPreds = false // fail to get lookahead upon pred
lookBusy := NewSet(nil, nil)
seeThruPreds := false // fail to get lookahead upon pred
la.look1(s.GetTransitions()[alt].getTarget(), nil, BasePredictionContextEMPTY, look[alt], lookBusy, NewBitSet(), seeThruPreds, false)
// Wipe out lookahead for la alternative if we found nothing
// or we had a predicate when we !seeThruPreds
@ -69,8 +69,8 @@ func (la *LL1Analyzer) getDecisionLookahead(s ATNState) []*IntervalSet {
// specified {@code ctx}.
///
func (la *LL1Analyzer) Look(s, stopState ATNState, ctx RuleContext) *IntervalSet {
var r = NewIntervalSet()
var seeThruPreds = true // ignore preds get all lookahead
r := NewIntervalSet()
seeThruPreds := true // ignore preds get all lookahead
var lookContext PredictionContext
if ctx != nil {
lookContext = predictionContextFromRuleContext(s.GetATN(), ctx)

View File

@ -136,7 +136,7 @@ func (p *BaseParser) Match(ttype int) Token {
if PortDebug {
fmt.Println("get current token")
}
var t = p.GetCurrentToken()
t := p.GetCurrentToken()
if PortDebug {
fmt.Println("TOKEN IS " + t.GetText())
@ -179,7 +179,7 @@ func (p *BaseParser) Match(ttype int) Token {
// symbol
func (p *BaseParser) MatchWildcard() Token {
var t = p.GetCurrentToken()
t := p.GetCurrentToken()
if t.GetTokenType() > 0 {
p.errHandler.ReportMatch(p)
p.Consume()
@ -288,7 +288,7 @@ func (p *BaseParser) removeParseListeners() {
// Notify any parse listeners of an enter rule event.
func (p *BaseParser) TriggerEnterRuleEvent() {
if p.parseListeners != nil {
var ctx = p.ctx
ctx := p.ctx
for _, listener := range p.parseListeners {
listener.EnterEveryRule(ctx)
ctx.EnterRule(listener)
@ -343,13 +343,13 @@ func (p *BaseParser) GetATNWithBypassAlts() {
// TODO
panic("Not implemented!")
// var serializedAtn = p.getSerializedATN()
// serializedAtn := p.getSerializedATN()
// if (serializedAtn == nil) {
// panic("The current parser does not support an ATN with bypass alternatives.")
// }
// var result = p.bypassAltsAtnCache[serializedAtn]
// result := p.bypassAltsAtnCache[serializedAtn]
// if (result == nil) {
// var deserializationOptions = NewATNDeserializationOptions(nil)
// deserializationOptions := NewATNDeserializationOptions(nil)
// deserializationOptions.generateRuleBypassTransitions = true
// result = NewATNDeserializer(deserializationOptions).deserialize(serializedAtn)
// p.bypassAltsAtnCache[serializedAtn] = result
@ -374,7 +374,7 @@ func (p *BaseParser) compileParseTreePattern(pattern, patternRuleIndex, lexer Le
//
// if (lexer == nil) {
// if (p.GetTokenStream() != nil) {
// var tokenSource = p.GetTokenStream().GetTokenSource()
// tokenSource := p.GetTokenStream().GetTokenSource()
// if _, ok := tokenSource.(ILexer); ok {
// lexer = tokenSource
// }
@ -384,7 +384,7 @@ func (p *BaseParser) compileParseTreePattern(pattern, patternRuleIndex, lexer Le
// panic("Parser can't discover a lexer to use")
// }
// var m = NewParseTreePatternMatcher(lexer, p)
// m := NewParseTreePatternMatcher(lexer, p)
// return m.compile(pattern, patternRuleIndex)
}
@ -419,14 +419,14 @@ func (p *BaseParser) NotifyErrorListeners(msg string, offendingToken Token, err
offendingToken = p.GetCurrentToken()
}
p._SyntaxErrors++
var line = offendingToken.GetLine()
var column = offendingToken.GetColumn()
line := offendingToken.GetLine()
column := offendingToken.GetColumn()
listener := p.GetErrorListenerDispatch()
listener.SyntaxError(p, offendingToken, line, column, msg, err)
}
func (p *BaseParser) Consume() Token {
var o = p.GetCurrentToken()
o := p.GetCurrentToken()
if o.GetTokenType() != TokenEOF {
if PortDebug {
fmt.Println("Consuming")
@ -436,10 +436,10 @@ func (p *BaseParser) Consume() Token {
fmt.Println("Done consuming")
}
}
var hasListener = p.parseListeners != nil && len(p.parseListeners) > 0
hasListener := p.parseListeners != nil && len(p.parseListeners) > 0
if p.BuildParseTrees || hasListener {
if p.errHandler.inErrorRecoveryMode(p) {
var node = p.ctx.AddErrorNode(o)
node := p.ctx.AddErrorNode(o)
if p.parseListeners != nil {
for _, l := range p.parseListeners {
l.VisitErrorNode(node)
@ -534,7 +534,7 @@ func (p *BaseParser) EnterRecursionRule(localctx ParserRuleContext, state, ruleI
// Like {@link //EnterRule} but for recursive rules.
func (p *BaseParser) PushNewRecursionContext(localctx ParserRuleContext, state, ruleIndex int) {
var previous = p.ctx
previous := p.ctx
previous.SetParent(localctx)
previous.SetInvokingState(state)
previous.SetStop(p.input.LT(-1))
@ -553,7 +553,7 @@ func (p *BaseParser) PushNewRecursionContext(localctx ParserRuleContext, state,
func (p *BaseParser) UnrollRecursionContexts(parentCtx ParserRuleContext) {
p.precedenceStack.Pop()
p.ctx.SetStop(p.input.LT(-1))
var retCtx = p.ctx // save current ctx (return value)
retCtx := p.ctx // save current ctx (return value)
// unroll so ctx is as it was before call to recursive method
if p.parseListeners != nil {
for p.ctx != parentCtx {
@ -572,7 +572,7 @@ func (p *BaseParser) UnrollRecursionContexts(parentCtx ParserRuleContext) {
}
func (p *BaseParser) GetInvokingContext(ruleIndex int) ParserRuleContext {
var ctx = p.ctx
ctx := p.ctx
for ctx != nil {
if ctx.GetRuleIndex() == ruleIndex {
return ctx
@ -606,10 +606,10 @@ func (p *BaseParser) inContext(context ParserRuleContext) bool {
// the ATN, otherwise {@code false}.
func (p *BaseParser) IsExpectedToken(symbol int) bool {
var atn = p.Interpreter.atn
var ctx = p.ctx
var s = atn.states[p.state]
var following = atn.NextTokens(s, nil)
atn := p.Interpreter.atn
ctx := p.ctx
s := atn.states[p.state]
following := atn.NextTokens(s, nil)
if following.contains(symbol) {
return true
}
@ -617,8 +617,8 @@ func (p *BaseParser) IsExpectedToken(symbol int) bool {
return false
}
for ctx != nil && ctx.GetInvokingState() >= 0 && following.contains(TokenEpsilon) {
var invokingState = atn.states[ctx.GetInvokingState()]
var rt = invokingState.GetTransitions()[0]
invokingState := atn.states[ctx.GetInvokingState()]
rt := invokingState.GetTransitions()[0]
following = atn.NextTokens(rt.(*RuleTransition).followState, nil)
if following.contains(symbol) {
return true
@ -643,8 +643,8 @@ func (p *BaseParser) GetExpectedTokens() *IntervalSet {
}
func (p *BaseParser) GetExpectedTokensWithinCurrentRule() *IntervalSet {
var atn = p.Interpreter.atn
var s = atn.states[p.state]
atn := p.Interpreter.atn
s := atn.states[p.state]
return atn.NextTokens(s, nil)
}
@ -669,10 +669,10 @@ func (p *BaseParser) GetRuleInvocationStack(c ParserRuleContext) []string {
if c == nil {
c = p.ctx
}
var stack = make([]string, 0)
stack := make([]string, 0)
for c != nil {
// compute what follows who invoked us
var ruleIndex = c.GetRuleIndex()
ruleIndex := c.GetRuleIndex()
if ruleIndex < 0 {
stack = append(stack, "n/a")
} else {
@ -697,7 +697,7 @@ func (p *BaseParser) GetDFAStrings() string {
// For debugging and other purposes.//
func (p *BaseParser) DumpDFA() {
var seenOne = false
seenOne := false
for _, dfa := range p.Interpreter.decisionToDFA {
if len(dfa.GetStates()) > 0 {
if seenOne {

View File

@ -6,6 +6,13 @@ import (
"strings"
)
var (
ParserATNSimulatorDebug = false
ParserATNSimulatorListATNDecisions = false
ParserATNSimulatorDFADebug = false
ParserATNSimulatorRetryDebug = false
)
type ParserATNSimulator struct {
*BaseATNSimulator
@ -46,11 +53,6 @@ func NewParserATNSimulator(parser Parser, atn *ATN, decisionToDFA []*DFA, shared
return p
}
var ParserATNSimulatorDebug = false
var ParserATNSimulatorListATNDecisions = false
var ParserATNSimulatorDFADebug = false
var ParserATNSimulatorRetryDebug = false
func (p *ParserATNSimulator) GetPredictionMode() int {
return p.predictionMode
}
@ -80,10 +82,10 @@ func (p *ParserATNSimulator) AdaptivePredict(input TokenStream, decision int, ou
p.startIndex = input.Index()
p.outerContext = outerContext
var dfa = p.decisionToDFA[decision]
dfa := p.decisionToDFA[decision]
p.dfa = dfa
var m = input.Mark()
var index = input.Index()
m := input.Mark()
index := input.Index()
defer func() {
p.dfa = nil
@ -118,15 +120,15 @@ func (p *ParserATNSimulator) AdaptivePredict(input TokenStream, decision int, ou
// closure block that determines whether a precedence rule
// should continue or complete.
var t2 ATNState = dfa.atnStartState
t2 := dfa.atnStartState
t, ok := t2.(*StarLoopEntryState)
if !dfa.precedenceDfa && ok {
if t.precedenceRuleDecision {
dfa.setPrecedenceDfa(true)
}
}
var fullCtx = false
var s0Closure = p.computeStartState(dfa.atnStartState, RuleContextEmpty, fullCtx)
fullCtx := false
s0Closure := p.computeStartState(dfa.atnStartState, RuleContextEmpty, fullCtx)
if dfa.precedenceDfa {
// If p is a precedence DFA, we use applyPrecedenceFilter
@ -149,7 +151,7 @@ func (p *ParserATNSimulator) AdaptivePredict(input TokenStream, decision int, ou
dfa.s0 = s0
}
}
var alt = p.execATN(dfa, s0, input, index, outerContext)
alt := p.execATN(dfa, s0, input, index, outerContext)
if ParserATNSimulatorDebug {
fmt.Println("DFA after predictATN: " + dfa.String(p.parser.GetLiteralNames(), nil))
}
@ -195,14 +197,14 @@ func (p *ParserATNSimulator) execATN(dfa *DFA, s0 *DFAState, input TokenStream,
" line " + strconv.Itoa(input.LT(1).GetLine()) + ":" + strconv.Itoa(input.LT(1).GetColumn()))
}
var previousD = s0
previousD := s0
if ParserATNSimulatorDebug {
fmt.Println("s0 = " + s0.String())
}
var t = input.LA(1)
t := input.LA(1)
for { // for more work
var D = p.getExistingTargetState(previousD, t)
D := p.getExistingTargetState(previousD, t)
if D == nil {
D = p.computeTargetState(dfa, previousD, t)
}
@ -227,12 +229,12 @@ func (p *ParserATNSimulator) execATN(dfa *DFA, s0 *DFAState, input TokenStream,
}
if D.requiresFullContext && p.predictionMode != PredictionModeSLL {
// IF PREDS, MIGHT RESOLVE TO SINGLE ALT => SLL (or syntax error)
var conflictingAlts = D.configs.GetConflictingAlts()
conflictingAlts := D.configs.GetConflictingAlts()
if D.predicates != nil {
if ParserATNSimulatorDebug {
fmt.Println("DFA state has preds in DFA sim LL failover")
}
var conflictIndex = input.Index()
conflictIndex := input.Index()
if conflictIndex != startIndex {
input.Seek(startIndex)
}
@ -252,8 +254,8 @@ func (p *ParserATNSimulator) execATN(dfa *DFA, s0 *DFAState, input TokenStream,
if ParserATNSimulatorDFADebug {
fmt.Println("ctx sensitive state " + outerContext.String(nil, nil) + " in " + D.String())
}
var fullCtx = true
var s0Closure = p.computeStartState(dfa.atnStartState, outerContext, fullCtx)
fullCtx := true
s0Closure := p.computeStartState(dfa.atnStartState, outerContext, fullCtx)
p.ReportAttemptingFullContext(dfa, conflictingAlts, D.configs, startIndex, input.Index())
alt := p.execATNWithFullContext(dfa, D, s0Closure, input, startIndex, outerContext)
return alt
@ -262,9 +264,9 @@ func (p *ParserATNSimulator) execATN(dfa *DFA, s0 *DFAState, input TokenStream,
if D.predicates == nil {
return D.prediction
}
var stopIndex = input.Index()
stopIndex := input.Index()
input.Seek(startIndex)
var alts = p.evalSemanticContext(D.predicates, outerContext, true)
alts := p.evalSemanticContext(D.predicates, outerContext, true)
if alts.length() == 0 {
panic(p.noViableAlt(input, outerContext, D.configs, startIndex))
} else if alts.length() == 1 {
@ -297,7 +299,7 @@ func (p *ParserATNSimulator) execATN(dfa *DFA, s0 *DFAState, input TokenStream,
// already cached
func (p *ParserATNSimulator) getExistingTargetState(previousD *DFAState, t int) *DFAState {
var edges = previousD.edges
edges := previousD.edges
if edges == nil {
return nil
}
@ -317,19 +319,19 @@ func (p *ParserATNSimulator) getExistingTargetState(previousD *DFAState, t int)
// returns {@link //ERROR}.
func (p *ParserATNSimulator) computeTargetState(dfa *DFA, previousD *DFAState, t int) *DFAState {
var reach = p.computeReachSet(previousD.configs, t, false)
reach := p.computeReachSet(previousD.configs, t, false)
if reach == nil {
p.addDFAEdge(dfa, previousD, t, ATNSimulatorError)
return ATNSimulatorError
}
// create Newtarget state we'll add to DFA after it's complete
var D = NewDFAState(-1, reach)
D := NewDFAState(-1, reach)
var predictedAlt = p.getUniqueAlt(reach)
predictedAlt := p.getUniqueAlt(reach)
if ParserATNSimulatorDebug {
var altSubSets = PredictionModegetConflictingAltSubsets(reach)
altSubSets := PredictionModegetConflictingAltSubsets(reach)
fmt.Println("SLL altSubSets=" + fmt.Sprint(altSubSets) +
", previous=" + previousD.configs.String() +
", configs=" + reach.String() +
@ -365,11 +367,11 @@ func (p *ParserATNSimulator) computeTargetState(dfa *DFA, previousD *DFAState, t
func (p *ParserATNSimulator) predicateDFAState(dfaState *DFAState, decisionState DecisionState) {
// We need to test all predicates, even in DFA states that
// uniquely predict alternative.
var nalts = len(decisionState.GetTransitions())
nalts := len(decisionState.GetTransitions())
// Update DFA so reach becomes accept state with (predicate,alt)
// pairs if preds found for conflicting alts
var altsToCollectPredsFrom = p.getConflictingAltsOrUniqueAlt(dfaState.configs)
var altToPred = p.getPredsForAmbigAlts(altsToCollectPredsFrom, dfaState.configs, nalts)
altsToCollectPredsFrom := p.getConflictingAltsOrUniqueAlt(dfaState.configs)
altToPred := p.getPredsForAmbigAlts(altsToCollectPredsFrom, dfaState.configs, nalts)
if altToPred != nil {
dfaState.predicates = p.getPredicatePredictions(altsToCollectPredsFrom, altToPred)
dfaState.setPrediction(ATNInvalidAltNumber) // make sure we use preds
@ -388,13 +390,13 @@ func (p *ParserATNSimulator) execATNWithFullContext(dfa *DFA, D *DFAState, s0 AT
fmt.Println("execATNWithFullContext " + s0.String())
}
var fullCtx = true
var foundExactAmbig = false
fullCtx := true
foundExactAmbig := false
var reach ATNConfigSet
var previous = s0
previous := s0
input.Seek(startIndex)
var t = input.LA(1)
var predictedAlt = -1
t := input.LA(1)
predictedAlt := -1
for { // for more work
reach = p.computeReachSet(previous, t, fullCtx)
@ -408,16 +410,16 @@ func (p *ParserATNSimulator) execATNWithFullContext(dfa *DFA, D *DFAState, s0 AT
// ATN states in SLL implies LL will also get nowhere.
// If conflict in states that dip out, choose min since we
// will get error no matter what.
var e = p.noViableAlt(input, outerContext, previous, startIndex)
e := p.noViableAlt(input, outerContext, previous, startIndex)
input.Seek(startIndex)
var alt = p.getSynValidOrSemInvalidAltThatFinishedDecisionEntryRule(previous, outerContext)
alt := p.getSynValidOrSemInvalidAltThatFinishedDecisionEntryRule(previous, outerContext)
if alt != ATNInvalidAltNumber {
return alt
}
panic(e)
}
var altSubSets = PredictionModegetConflictingAltSubsets(reach)
altSubSets := PredictionModegetConflictingAltSubsets(reach)
if ParserATNSimulatorDebug {
fmt.Println("LL altSubSets=" + fmt.Sprint(altSubSets) + ", predict=" +
strconv.Itoa(PredictionModegetUniqueAlt(altSubSets)) + ", resolvesToJustOneViableAlt=" +
@ -497,7 +499,7 @@ func (p *ParserATNSimulator) computeReachSet(closure ATNConfigSet, t int, fullCt
if p.mergeCache == nil {
p.mergeCache = NewDoubleDict()
}
var intermediate = NewBaseATNConfigSet(fullCtx)
intermediate := NewBaseATNConfigSet(fullCtx)
// Configurations already in a rule stop state indicate reaching the end
// of the decision rule (local context) or end of the start rule (full
@ -533,10 +535,10 @@ func (p *ParserATNSimulator) computeReachSet(closure ATNConfigSet, t int, fullCt
}
for j := 0; j < len(c.GetState().GetTransitions()); j++ {
var trans = c.GetState().GetTransitions()[j]
var target = p.getReachableTarget(trans, t)
trans := c.GetState().GetTransitions()[j]
target := p.getReachableTarget(trans, t)
if target != nil {
var cfg = NewBaseATNConfig4(c, target)
cfg := NewBaseATNConfig4(c, target)
intermediate.Add(cfg, p.mergeCache)
if ParserATNSimulatorDebug {
fmt.Println("added " + cfg.String() + " to intermediate")
@ -574,8 +576,8 @@ func (p *ParserATNSimulator) computeReachSet(closure ATNConfigSet, t int, fullCt
//
if reach == nil {
reach = NewBaseATNConfigSet(fullCtx)
var closureBusy = NewSet(nil, nil)
var treatEOFAsEpsilon = t == TokenEOF
closureBusy := NewSet(nil, nil)
treatEOFAsEpsilon := t == TokenEOF
for k := 0; k < len(intermediate.configs); k++ {
p.closure(intermediate.configs[k], reach, closureBusy, false, fullCtx, treatEOFAsEpsilon)
}
@ -644,7 +646,7 @@ func (p *ParserATNSimulator) removeAllConfigsNotInRuleStopState(configs ATNConfi
if PredictionModeallConfigsInRuleStopStates(configs) {
return configs
}
var result = NewBaseATNConfigSet(configs.FullContext())
result := NewBaseATNConfigSet(configs.FullContext())
for _, config := range configs.GetItems() {
_, ok := config.GetState().(*RuleStopState)
@ -654,9 +656,9 @@ func (p *ParserATNSimulator) removeAllConfigsNotInRuleStopState(configs ATNConfi
continue
}
if lookToEndOfRule && config.GetState().GetEpsilonOnlyTransitions() {
var NextTokens = p.atn.NextTokens(config.GetState(), nil)
NextTokens := p.atn.NextTokens(config.GetState(), nil)
if NextTokens.contains(TokenEpsilon) {
var endOfRuleState = p.atn.ruleToStopState[config.GetState().GetRuleIndex()]
endOfRuleState := p.atn.ruleToStopState[config.GetState().GetRuleIndex()]
result.Add(NewBaseATNConfig4(config, endOfRuleState), p.mergeCache)
}
}
@ -670,12 +672,12 @@ func (p *ParserATNSimulator) computeStartState(a ATNState, ctx RuleContext, full
fmt.Println("computeStartState")
}
// always at least the implicit call to start rule
var initialContext = predictionContextFromRuleContext(p.atn, ctx)
var configs = NewBaseATNConfigSet(fullCtx)
initialContext := predictionContextFromRuleContext(p.atn, ctx)
configs := NewBaseATNConfigSet(fullCtx)
for i := 0; i < len(a.GetTransitions()); i++ {
var target = a.GetTransitions()[i].getTarget()
var c = NewBaseATNConfig6(target, i+1, initialContext)
var closureBusy = NewSet(nil, nil)
target := a.GetTransitions()[i].getTarget()
c := NewBaseATNConfig6(target, i+1, initialContext)
closureBusy := NewSet(nil, nil)
p.closure(c, configs, closureBusy, true, fullCtx, false)
}
return configs
@ -739,8 +741,8 @@ func (p *ParserATNSimulator) computeStartState(a ATNState, ctx RuleContext, full
//
func (p *ParserATNSimulator) applyPrecedenceFilter(configs ATNConfigSet) ATNConfigSet {
var statesFromAlt1 = make(map[int]PredictionContext)
var configSet = NewBaseATNConfigSet(configs.FullContext())
statesFromAlt1 := make(map[int]PredictionContext)
configSet := NewBaseATNConfigSet(configs.FullContext())
if PortDebug {
fmt.Println("len", len(configs.GetItems()))
@ -756,7 +758,7 @@ func (p *ParserATNSimulator) applyPrecedenceFilter(configs ATNConfigSet) ATNConf
}
continue
}
var updatedContext = config.GetSemanticContext().evalPrecedence(p.parser, p.outerContext)
updatedContext := config.GetSemanticContext().evalPrecedence(p.parser, p.outerContext)
if updatedContext == nil {
// the configuration was eliminated
continue
@ -790,7 +792,7 @@ func (p *ParserATNSimulator) applyPrecedenceFilter(configs ATNConfigSet) ATNConf
if PortDebug {
fmt.Println("!getPrecedenceFilterSuppressed")
}
var context = statesFromAlt1[config.GetState().GetStateNumber()]
context := statesFromAlt1[config.GetState().GetStateNumber()]
if context != nil && context.equals(config.GetContext()) {
// eliminated
continue
@ -814,15 +816,15 @@ func (p *ParserATNSimulator) getReachableTarget(trans Transition, ttype int) ATN
func (p *ParserATNSimulator) getPredsForAmbigAlts(ambigAlts *BitSet, configs ATNConfigSet, nalts int) []SemanticContext {
var altToPred = make([]SemanticContext, nalts+1)
altToPred := make([]SemanticContext, nalts+1)
for _, c := range configs.GetItems() {
if ambigAlts.contains(c.GetAlt()) {
altToPred[c.GetAlt()] = SemanticContextorContext(altToPred[c.GetAlt()], c.GetSemanticContext())
}
}
var nPredAlts = 0
nPredAlts := 0
for i := 1; i < nalts+1; i++ {
var pred = altToPred[i]
pred := altToPred[i]
if pred == nil {
altToPred[i] = SemanticContextNone
} else if pred != SemanticContextNone {
@ -840,10 +842,10 @@ func (p *ParserATNSimulator) getPredsForAmbigAlts(ambigAlts *BitSet, configs ATN
}
func (p *ParserATNSimulator) getPredicatePredictions(ambigAlts *BitSet, altToPred []SemanticContext) []*PredPrediction {
var pairs = make([]*PredPrediction, 0)
var containsPredicate = false
pairs := make([]*PredPrediction, 0)
containsPredicate := false
for i := 1; i < len(altToPred); i++ {
var pred = altToPred[i]
pred := altToPred[i]
// unpredicated is indicated by SemanticContextNONE
if ambigAlts != nil && ambigAlts.contains(i) {
pairs = append(pairs, NewPredPrediction(pred, i))
@ -905,10 +907,10 @@ func (p *ParserATNSimulator) getPredicatePredictions(ambigAlts *BitSet, altToPre
// identified and {@link //AdaptivePredict} should Report an error instead.
//
func (p *ParserATNSimulator) getSynValidOrSemInvalidAltThatFinishedDecisionEntryRule(configs ATNConfigSet, outerContext ParserRuleContext) int {
var cfgs = p.splitAccordingToSemanticValidity(configs, outerContext)
var semValidConfigs = cfgs[0]
var semInvalidConfigs = cfgs[1]
var alt = p.GetAltThatFinishedDecisionEntryRule(semValidConfigs)
cfgs := p.splitAccordingToSemanticValidity(configs, outerContext)
semValidConfigs := cfgs[0]
semInvalidConfigs := cfgs[1]
alt := p.GetAltThatFinishedDecisionEntryRule(semValidConfigs)
if alt != ATNInvalidAltNumber { // semantically/syntactically viable path exists
return alt
}
@ -923,7 +925,7 @@ func (p *ParserATNSimulator) getSynValidOrSemInvalidAltThatFinishedDecisionEntry
}
func (p *ParserATNSimulator) GetAltThatFinishedDecisionEntryRule(configs ATNConfigSet) int {
var alts = NewIntervalSet()
alts := NewIntervalSet()
for _, c := range configs.GetItems() {
_, ok := c.GetState().(*RuleStopState)
@ -953,12 +955,12 @@ type ATNConfigSetPair struct {
}
func (p *ParserATNSimulator) splitAccordingToSemanticValidity(configs ATNConfigSet, outerContext ParserRuleContext) []ATNConfigSet {
var succeeded = NewBaseATNConfigSet(configs.FullContext())
var failed = NewBaseATNConfigSet(configs.FullContext())
succeeded := NewBaseATNConfigSet(configs.FullContext())
failed := NewBaseATNConfigSet(configs.FullContext())
for _, c := range configs.GetItems() {
if c.GetSemanticContext() != SemanticContextNone {
var predicateEvaluationResult = c.GetSemanticContext().evaluate(p.parser, outerContext)
predicateEvaluationResult := c.GetSemanticContext().evaluate(p.parser, outerContext)
if predicateEvaluationResult {
succeeded.Add(c, nil)
} else {
@ -978,9 +980,9 @@ func (p *ParserATNSimulator) splitAccordingToSemanticValidity(configs ATNConfigS
// includes pairs with nil predicates.
//
func (p *ParserATNSimulator) evalSemanticContext(predPredictions []*PredPrediction, outerContext ParserRuleContext, complete bool) *BitSet {
var predictions = NewBitSet()
predictions := NewBitSet()
for i := 0; i < len(predPredictions); i++ {
var pair = predPredictions[i]
pair := predPredictions[i]
if pair.pred == SemanticContextNone {
predictions.add(pair.alt)
if !complete {
@ -989,7 +991,7 @@ func (p *ParserATNSimulator) evalSemanticContext(predPredictions []*PredPredicti
continue
}
var predicateEvaluationResult = pair.pred.evaluate(p.parser, outerContext)
predicateEvaluationResult := pair.pred.evaluate(p.parser, outerContext)
if ParserATNSimulatorDebug || ParserATNSimulatorDFADebug {
fmt.Println("eval pred " + pair.String() + "=" + fmt.Sprint(predicateEvaluationResult))
}
@ -1007,7 +1009,7 @@ func (p *ParserATNSimulator) evalSemanticContext(predPredictions []*PredPredicti
}
func (p *ParserATNSimulator) closure(config ATNConfig, configs ATNConfigSet, closureBusy *Set, collectPredicates, fullCtx, treatEOFAsEpsilon bool) {
var initialDepth = 0
initialDepth := 0
p.closureCheckingStopState(config, configs, closureBusy, collectPredicates,
fullCtx, initialDepth, treatEOFAsEpsilon)
}
@ -1077,7 +1079,7 @@ func (p *ParserATNSimulator) closureWork(config ATNConfig, configs ATNConfigSet,
if PortDebug {
fmt.Println("closureWork")
}
var state = config.GetState()
state := config.GetState()
// optimization
if !state.GetEpsilonOnlyTransitions() {
configs.Add(config, p.mergeCache)
@ -1085,10 +1087,10 @@ func (p *ParserATNSimulator) closureWork(config ATNConfig, configs ATNConfigSet,
// both epsilon transitions and non-epsilon transitions.
}
for i := 0; i < len(state.GetTransitions()); i++ {
var t = state.GetTransitions()[i]
t := state.GetTransitions()[i]
_, ok := t.(*ActionTransition)
var continueCollecting = collectPredicates && !ok
var c = p.getEpsilonTarget(config, t, continueCollecting, depth == 0, fullCtx, treatEOFAsEpsilon)
continueCollecting := collectPredicates && !ok
c := p.getEpsilonTarget(config, t, continueCollecting, depth == 0, fullCtx, treatEOFAsEpsilon)
if ci, ok := c.(*BaseATNConfig); ok && ci != nil {
if PortDebug {
fmt.Println("DEBUG 1 ok")
@ -1097,7 +1099,7 @@ func (p *ParserATNSimulator) closureWork(config ATNConfig, configs ATNConfigSet,
// avoid infinite recursion for EOF* and EOF+
continue
}
var newDepth = depth
newDepth := depth
if _, ok := config.GetState().(*RuleStopState); ok {
@ -1236,9 +1238,9 @@ func (p *ParserATNSimulator) precedenceTransition(config ATNConfig,
// during closure, which dramatically reduces the size of
// the config sets. It also obviates the need to test predicates
// later during conflict resolution.
var currentPosition = p.input.Index()
currentPosition := p.input.Index()
p.input.Seek(p.startIndex)
var predSucceeds = pt.getPredicate().evaluate(p.parser, p.outerContext)
predSucceeds := pt.getPredicate().evaluate(p.parser, p.outerContext)
p.input.Seek(currentPosition)
if predSucceeds {
c = NewBaseATNConfig4(config, pt.getTarget()) // no pred context
@ -1272,15 +1274,15 @@ func (p *ParserATNSimulator) predTransition(config ATNConfig, pt *PredicateTrans
// during closure, which dramatically reduces the size of
// the config sets. It also obviates the need to test predicates
// later during conflict resolution.
var currentPosition = p.input.Index()
currentPosition := p.input.Index()
p.input.Seek(p.startIndex)
var predSucceeds = pt.getPredicate().evaluate(p.parser, p.outerContext)
predSucceeds := pt.getPredicate().evaluate(p.parser, p.outerContext)
p.input.Seek(currentPosition)
if predSucceeds {
c = NewBaseATNConfig4(config, pt.getTarget()) // no pred context
}
} else {
var newSemCtx = SemanticContextandContext(config.GetSemanticContext(), pt.getPredicate())
newSemCtx := SemanticContextandContext(config.GetSemanticContext(), pt.getPredicate())
c = NewBaseATNConfig3(config, pt.getTarget(), newSemCtx)
}
} else {
@ -1296,13 +1298,13 @@ func (p *ParserATNSimulator) ruleTransition(config ATNConfig, t *RuleTransition)
if ParserATNSimulatorDebug {
fmt.Println("CALL rule " + p.getRuleName(t.getTarget().GetRuleIndex()) + ", ctx=" + config.GetContext().String())
}
var returnState = t.followState
var newContext = SingletonBasePredictionContextCreate(config.GetContext(), returnState.GetStateNumber())
returnState := t.followState
newContext := SingletonBasePredictionContextCreate(config.GetContext(), returnState.GetStateNumber())
return NewBaseATNConfig1(config, t.getTarget(), newContext)
}
func (p *ParserATNSimulator) getConflictingAlts(configs ATNConfigSet) *BitSet {
var altsets = PredictionModegetConflictingAltSubsets(configs)
altsets := PredictionModegetConflictingAltSubsets(configs)
return PredictionModeGetAlts(altsets)
}
@ -1392,7 +1394,7 @@ func (p *ParserATNSimulator) dumpDeadEndConfigs(nvae *NoViableAltException) {
//
// for i:=0; i<len(decs); i++ {
//
// var c = decs[i]
// c := decs[i]
// var trans = "no edges"
// if (len(c.state.GetTransitions())>0) {
// var t = c.state.GetTransitions()[0]
@ -1418,7 +1420,7 @@ func (p *ParserATNSimulator) noViableAlt(input TokenStream, outerContext ParserR
}
func (p *ParserATNSimulator) getUniqueAlt(configs ATNConfigSet) int {
var alt = ATNInvalidAltNumber
alt := ATNInvalidAltNumber
for _, c := range configs.GetItems() {
if alt == ATNInvalidAltNumber {
alt = c.GetAlt() // found first alt
@ -1495,7 +1497,7 @@ func (p *ParserATNSimulator) addDFAState(dfa *DFA, D *DFAState) *DFAState {
if D == ATNSimulatorError {
return D
}
var hash = D.Hash()
hash := D.Hash()
var existing, ok = dfa.GetStates()[hash]
if ok {
return existing
@ -1514,7 +1516,7 @@ func (p *ParserATNSimulator) addDFAState(dfa *DFA, D *DFAState) *DFAState {
func (p *ParserATNSimulator) ReportAttemptingFullContext(dfa *DFA, conflictingAlts *BitSet, configs ATNConfigSet, startIndex, stopIndex int) {
if ParserATNSimulatorDebug || ParserATNSimulatorRetryDebug {
var interval = NewInterval(startIndex, stopIndex+1)
interval := NewInterval(startIndex, stopIndex+1)
fmt.Println("ReportAttemptingFullContext decision=" + strconv.Itoa(dfa.decision) + ":" + configs.String() +
", input=" + p.parser.GetTokenStream().GetTextFromInterval(interval))
}
@ -1525,7 +1527,7 @@ func (p *ParserATNSimulator) ReportAttemptingFullContext(dfa *DFA, conflictingAl
func (p *ParserATNSimulator) ReportContextSensitivity(dfa *DFA, prediction int, configs ATNConfigSet, startIndex, stopIndex int) {
if ParserATNSimulatorDebug || ParserATNSimulatorRetryDebug {
var interval = NewInterval(startIndex, stopIndex+1)
interval := NewInterval(startIndex, stopIndex+1)
fmt.Println("ReportContextSensitivity decision=" + strconv.Itoa(dfa.decision) + ":" + configs.String() +
", input=" + p.parser.GetTokenStream().GetTextFromInterval(interval))
}
@ -1538,7 +1540,7 @@ func (p *ParserATNSimulator) ReportContextSensitivity(dfa *DFA, prediction int,
func (p *ParserATNSimulator) ReportAmbiguity(dfa *DFA, D *DFAState, startIndex, stopIndex int,
exact bool, ambigAlts *BitSet, configs ATNConfigSet) {
if ParserATNSimulatorDebug || ParserATNSimulatorRetryDebug {
var interval = NewInterval(startIndex, stopIndex+1)
interval := NewInterval(startIndex, stopIndex+1)
fmt.Println("ReportAmbiguity " + ambigAlts.String() + ":" + configs.String() +
", input=" + p.parser.GetTokenStream().GetTextFromInterval(interval))
}

View File

@ -128,7 +128,7 @@ func (prc *BaseParserRuleContext) RemoveLastChild() {
func (prc *BaseParserRuleContext) AddTokenNode(token Token) *TerminalNodeImpl {
var node = NewTerminalNodeImpl(token)
node := NewTerminalNodeImpl(token)
prc.addTerminalNodeChild(node)
node.parentCtx = prc
return node
@ -136,7 +136,7 @@ func (prc *BaseParserRuleContext) AddTokenNode(token Token) *TerminalNodeImpl {
}
func (prc *BaseParserRuleContext) AddErrorNode(badToken Token) *ErrorNodeImpl {
var node = NewErrorNodeImpl(badToken)
node := NewErrorNodeImpl(badToken)
prc.addTerminalNodeChild(node)
node.parentCtx = prc
return node
@ -156,7 +156,7 @@ func (prc *BaseParserRuleContext) GetChildOfType(i int, childType reflect.Type)
}
for j := 0; j < len(prc.children); j++ {
var child = prc.children[j]
child := prc.children[j]
if reflect.TypeOf(child) == childType {
if i == 0 {
return child.(RuleContext)
@ -200,7 +200,7 @@ func (prc *BaseParserRuleContext) GetStop() Token {
func (prc *BaseParserRuleContext) GetToken(ttype int, i int) TerminalNode {
for j := 0; j < len(prc.children); j++ {
var child = prc.children[j]
child := prc.children[j]
if c2, ok := child.(TerminalNode); ok {
if c2.GetSymbol().GetTokenType() == ttype {
if i == 0 {
@ -219,10 +219,10 @@ func (prc *BaseParserRuleContext) GetTokens(ttype int) []TerminalNode {
return make([]TerminalNode, 0)
}
var tokens = make([]TerminalNode, 0)
tokens := make([]TerminalNode, 0)
for j := 0; j < len(prc.children); j++ {
var child = prc.children[j]
child := prc.children[j]
if tchild, ok := child.(TerminalNode); ok {
if tchild.GetSymbol().GetTokenType() == ttype {
tokens = append(tokens, tchild)
@ -242,7 +242,7 @@ func (prc *BaseParserRuleContext) getChild(ctxType reflect.Type, i int) RuleCont
return nil
}
var j = -1 // what element have we found with ctxType?
j := -1 // what element have we found with ctxType?
for _, o := range prc.children {
childType := reflect.TypeOf(o)
@ -269,7 +269,7 @@ func (prc *BaseParserRuleContext) GetTypedRuleContexts(ctxType reflect.Type) []R
return make([]RuleContext, 0)
}
var contexts = make([]RuleContext, 0)
contexts := make([]RuleContext, 0)
for _, child := range prc.children {
childType := reflect.TypeOf(child)
@ -306,14 +306,14 @@ func (prc *BaseParserRuleContext) GetSourceInterval() *Interval {
func (prc *BaseParserRuleContext) String(ruleNames []string, stop RuleContext) string {
var p ParserRuleContext = prc
var s = "["
s := "["
for p != nil && p != stop {
if ruleNames == nil {
if !p.IsEmpty() {
s += strconv.Itoa(p.GetInvokingState())
}
} else {
var ri = p.GetRuleIndex()
ri := p.GetRuleIndex()
var ruleName string
if ri >= 0 && ri < len(ruleNames) {
ruleName = ruleNames[ri]

View File

@ -5,6 +5,23 @@ import (
"strconv"
)
// Represents {@code $} in local context prediction, which means wildcard.
// {@code//+x =//}.
// /
const (
BasePredictionContextEmptyReturnState = 0x7FFFFFFF
)
// Represents {@code $} in an array in full context mode, when {@code $}
// doesn't mean wildcard: {@code $ + x = [$,x]}. Here,
// {@code $} = {@link //EmptyReturnState}.
// /
var (
BasePredictionContextglobalNodeCount = 1
BasePredictionContextid = BasePredictionContextglobalNodeCount
)
type PredictionContext interface {
Hash() string
GetParent(int) PredictionContext
@ -21,28 +38,12 @@ type BasePredictionContext struct {
}
func NewBasePredictionContext(cachedHashString string) *BasePredictionContext {
pc := new(BasePredictionContext)
pc.cachedHashString = cachedHashString
return pc
}
// Represents {@code $} in local context prediction, which means wildcard.
// {@code//+x =//}.
// /
const (
BasePredictionContextEmptyReturnState = 0x7FFFFFFF
)
// Represents {@code $} in an array in full context mode, when {@code $}
// doesn't mean wildcard: {@code $ + x = [$,x]}. Here,
// {@code $} = {@link //EmptyReturnState}.
// /
var BasePredictionContextglobalNodeCount = 1
var BasePredictionContextid = BasePredictionContextglobalNodeCount
// Stores the computed hash code of this {@link BasePredictionContext}. The hash
// code is computed in parts to Match the following reference algorithm.
//
@ -106,7 +107,7 @@ func (p *PredictionContextCache) add(ctx PredictionContext) PredictionContext {
if ctx == BasePredictionContextEMPTY {
return BasePredictionContextEMPTY
}
var existing = p.cache[ctx]
existing := p.cache[ctx]
if existing != nil {
return existing
}
@ -322,7 +323,7 @@ func (a *ArrayPredictionContext) String() string {
return "[]"
}
var s = "["
s := "["
for i := 0; i < len(a.returnStates); i++ {
if i > 0 {
s = s + ", "
@ -355,15 +356,15 @@ func predictionContextFromRuleContext(a *ATN, outerContext RuleContext) Predicti
return BasePredictionContextEMPTY
}
// If we have a parent, convert it to a BasePredictionContext graph
var parent = predictionContextFromRuleContext(a, outerContext.GetParent().(RuleContext))
var state = a.states[outerContext.GetInvokingState()]
var transition = state.GetTransitions()[0]
parent := predictionContextFromRuleContext(a, outerContext.GetParent().(RuleContext))
state := a.states[outerContext.GetInvokingState()]
transition := state.GetTransitions()[0]
return SingletonBasePredictionContextCreate(parent, transition.(*RuleTransition).followState.GetStateNumber())
}
func calculateListsHashString(parents []BasePredictionContext, returnStates []int) string {
var s = ""
s := ""
for _, p := range parents {
s += fmt.Sprint(p)
@ -441,7 +442,7 @@ func merge(a, b PredictionContext, rootIsWildcard bool, mergeCache *DoubleDict)
// /
func mergeSingletons(a, b *BaseSingletonPredictionContext, rootIsWildcard bool, mergeCache *DoubleDict) PredictionContext {
if mergeCache != nil {
var previous = mergeCache.Get(a.Hash(), b.Hash())
previous := mergeCache.Get(a.Hash(), b.Hash())
if previous != nil {
return previous.(PredictionContext)
}
@ -451,7 +452,7 @@ func mergeSingletons(a, b *BaseSingletonPredictionContext, rootIsWildcard bool,
}
}
var rootMerge = mergeRoot(a, b, rootIsWildcard)
rootMerge := mergeRoot(a, b, rootIsWildcard)
if rootMerge != nil {
if mergeCache != nil {
mergeCache.set(a.Hash(), b.Hash(), rootMerge)
@ -459,7 +460,7 @@ func mergeSingletons(a, b *BaseSingletonPredictionContext, rootIsWildcard bool,
return rootMerge
}
if a.returnState == b.returnState {
var parent = merge(a.parentCtx, b.parentCtx, rootIsWildcard, mergeCache)
parent := merge(a.parentCtx, b.parentCtx, rootIsWildcard, mergeCache)
// if parent is same as existing a or b parent or reduced to a parent,
// return it
if parent == a.parentCtx {
@ -472,7 +473,7 @@ func mergeSingletons(a, b *BaseSingletonPredictionContext, rootIsWildcard bool,
// merge parents x and y, giving array node with x,y then remainders
// of those graphs. dup a, a' points at merged array
// Newjoined parent so create Newsingleton pointing to it, a'
var spc = SingletonBasePredictionContextCreate(parent, a.returnState)
spc := SingletonBasePredictionContextCreate(parent, a.returnState)
if mergeCache != nil {
mergeCache.set(a.Hash(), b.Hash(), spc)
}
@ -488,13 +489,13 @@ func mergeSingletons(a, b *BaseSingletonPredictionContext, rootIsWildcard bool,
}
if singleParent != nil { // parents are same
// sort payloads and use same parent
var payloads = []int{a.returnState, b.returnState}
payloads := []int{a.returnState, b.returnState}
if a.returnState > b.returnState {
payloads[0] = b.returnState
payloads[1] = a.returnState
}
var parents = []PredictionContext{singleParent, singleParent}
var apc = NewArrayPredictionContext(parents, payloads)
parents := []PredictionContext{singleParent, singleParent}
apc := NewArrayPredictionContext(parents, payloads)
if mergeCache != nil {
mergeCache.set(a.Hash(), b.Hash(), apc)
}
@ -503,14 +504,14 @@ func mergeSingletons(a, b *BaseSingletonPredictionContext, rootIsWildcard bool,
// parents differ and can't merge them. Just pack together
// into array can't merge.
// ax + by = [ax,by]
var payloads = []int{a.returnState, b.returnState}
var parents = []PredictionContext{a.parentCtx, b.parentCtx}
payloads := []int{a.returnState, b.returnState}
parents := []PredictionContext{a.parentCtx, b.parentCtx}
if a.returnState > b.returnState { // sort by payload
payloads[0] = b.returnState
payloads[1] = a.returnState
parents = []PredictionContext{b.parentCtx, a.parentCtx}
}
var apc = NewArrayPredictionContext(parents, payloads)
apc := NewArrayPredictionContext(parents, payloads)
if mergeCache != nil {
mergeCache.set(a.Hash(), b.Hash(), apc)
}
@ -567,12 +568,12 @@ func mergeRoot(a, b SingletonPredictionContext, rootIsWildcard bool) PredictionC
if a == BasePredictionContextEMPTY && b == BasePredictionContextEMPTY {
return BasePredictionContextEMPTY // $ + $ = $
} else if a == BasePredictionContextEMPTY { // $ + x = [$,x]
var payloads = []int{b.getReturnState(-1), BasePredictionContextEmptyReturnState}
var parents = []PredictionContext{b.GetParent(-1), nil}
payloads := []int{b.getReturnState(-1), BasePredictionContextEmptyReturnState}
parents := []PredictionContext{b.GetParent(-1), nil}
return NewArrayPredictionContext(parents, payloads)
} else if b == BasePredictionContextEMPTY { // x + $ = [$,x] ($ is always first if present)
var payloads = []int{a.getReturnState(-1), BasePredictionContextEmptyReturnState}
var parents = []PredictionContext{a.GetParent(-1), nil}
payloads := []int{a.getReturnState(-1), BasePredictionContextEmptyReturnState}
parents := []PredictionContext{a.GetParent(-1), nil}
return NewArrayPredictionContext(parents, payloads)
}
}
@ -601,7 +602,7 @@ func mergeRoot(a, b SingletonPredictionContext, rootIsWildcard bool) PredictionC
// /
func mergeArrays(a, b *ArrayPredictionContext, rootIsWildcard bool, mergeCache *DoubleDict) PredictionContext {
if mergeCache != nil {
var previous = mergeCache.Get(a.Hash(), b.Hash())
previous := mergeCache.Get(a.Hash(), b.Hash())
if previous != nil {
return previous.(PredictionContext)
}
@ -611,29 +612,29 @@ func mergeArrays(a, b *ArrayPredictionContext, rootIsWildcard bool, mergeCache *
}
}
// merge sorted payloads a + b => M
var i = 0 // walks a
var j = 0 // walks b
var k = 0 // walks target M array
i := 0 // walks a
j := 0 // walks b
k := 0 // walks target M array
var mergedReturnStates = make([]int, len(a.returnStates) + len(b.returnStates))
var mergedParents = make([]PredictionContext, len(a.returnStates) + len(b.returnStates))
mergedReturnStates := make([]int, len(a.returnStates) + len(b.returnStates))
mergedParents := make([]PredictionContext, len(a.returnStates) + len(b.returnStates))
// walk and merge to yield mergedParents, mergedReturnStates
for i < len(a.returnStates) && j < len(b.returnStates) {
var aParent = a.parents[i]
var bParent = b.parents[j]
aParent := a.parents[i]
bParent := b.parents[j]
if a.returnStates[i] == b.returnStates[j] {
// same payload (stack tops are equal), must yield merged singleton
var payload = a.returnStates[i]
payload := a.returnStates[i]
// $+$ = $
var bothDollars = payload == BasePredictionContextEmptyReturnState && aParent == nil && bParent == nil
var axAX = (aParent != nil && bParent != nil && aParent == bParent) // ax+ax
bothDollars := payload == BasePredictionContextEmptyReturnState && aParent == nil && bParent == nil
axAX := (aParent != nil && bParent != nil && aParent == bParent) // ax+ax
// ->
// ax
if bothDollars || axAX {
mergedParents[k] = aParent // choose left
mergedReturnStates[k] = payload
} else { // ax+ay -> a'[x,y]
var mergedParent = merge(aParent, bParent, rootIsWildcard, mergeCache)
mergedParent := merge(aParent, bParent, rootIsWildcard, mergeCache)
mergedParents[k] = mergedParent
mergedReturnStates[k] = payload
}
@ -667,7 +668,7 @@ func mergeArrays(a, b *ArrayPredictionContext, rootIsWildcard bool, mergeCache *
// trim merged if we combined a few that had same stack tops
if k < len(mergedParents) { // write index < last position trim
if k == 1 { // for just one merged element, return singleton top
var pc = SingletonBasePredictionContextCreate(mergedParents[0], mergedReturnStates[0])
pc := SingletonBasePredictionContextCreate(mergedParents[0], mergedReturnStates[0])
if mergeCache != nil {
mergeCache.set(a.Hash(), b.Hash(), pc)
}
@ -677,7 +678,7 @@ func mergeArrays(a, b *ArrayPredictionContext, rootIsWildcard bool, mergeCache *
mergedReturnStates = mergedReturnStates[0:k]
}
var M = NewArrayPredictionContext(mergedParents, mergedReturnStates)
M := NewArrayPredictionContext(mergedParents, mergedReturnStates)
// if we created same array as a or b, return that instead
// TODO: track whether this is possible above during merge sort for speed
@ -706,10 +707,10 @@ func mergeArrays(a, b *ArrayPredictionContext, rootIsWildcard bool, mergeCache *
// ones.
// /
func combineCommonParents(parents []PredictionContext) {
var uniqueParents = make(map[PredictionContext]PredictionContext)
uniqueParents := make(map[PredictionContext]PredictionContext)
for p := 0; p < len(parents); p++ {
var parent = parents[p]
parent := parents[p]
if uniqueParents[parent] == nil {
uniqueParents[parent] = parent
}
@ -724,7 +725,7 @@ func getCachedBasePredictionContext(context PredictionContext, contextCache *Pre
if context.isEmpty() {
return context
}
var existing = visited[context]
existing := visited[context]
if existing != nil {
return existing
}
@ -733,10 +734,10 @@ func getCachedBasePredictionContext(context PredictionContext, contextCache *Pre
visited[context] = existing
return existing
}
var changed = false
var parents = make([]PredictionContext, context.length())
changed := false
parents := make([]PredictionContext, context.length())
for i := 0; i < len(parents); i++ {
var parent = getCachedBasePredictionContext(context.GetParent(i), contextCache, visited)
parent := getCachedBasePredictionContext(context.GetParent(i), contextCache, visited)
if changed || parent != context.GetParent(i) {
if !changed {
parents = make([]PredictionContext, context.length())
@ -767,24 +768,3 @@ func getCachedBasePredictionContext(context PredictionContext, contextCache *Pre
return updated
}
// ter's recursive version of Sam's getAllNodes()
//func getAllContextNodes(context, nodes, visited) {
// if (nodes == nil) {
// nodes = []
// return getAllContextNodes(context, nodes, visited)
// } else if (visited == nil) {
// visited = {}
// return getAllContextNodes(context, nodes, visited)
// } else {
// if (context == nil || visited[context] != nil) {
// return nodes
// }
// visited[context] = context
// nodes.push(context)
// for i := 0; i < len(context); i++ {
// getAllContextNodes(context.GetParent(i), nodes, visited)
// }
// return nodes
// }
//}

View File

@ -180,7 +180,7 @@ func PredictionModehasSLLConflictTerminatingPrediction(mode int, configs ATNConf
// since we'll often fail over anyway.
if configs.HasSemanticContext() {
// dup configs, tossing out semantic predicates
var dup = NewBaseATNConfigSet(false)
dup := NewBaseATNConfigSet(false)
for _, c := range configs.GetItems() {
// NewBaseATNConfig({semanticContext:}, c)
@ -192,7 +192,7 @@ func PredictionModehasSLLConflictTerminatingPrediction(mode int, configs ATNConf
// now we have combined contexts for configs with dissimilar preds
}
// pure SLL or combined SLL+LL mode parsing
var altsets = PredictionModegetConflictingAltSubsets(configs)
altsets := PredictionModegetConflictingAltSubsets(configs)
return PredictionModehasConflictingAltSet(altsets) && !PredictionModehasStateAssociatedWithOneAlt(configs)
}
@ -398,7 +398,7 @@ func PredictionModeallSubsetsConflict(altsets []*BitSet) bool {
//
func PredictionModehasNonConflictingAltSet(altsets []*BitSet) bool {
for i := 0; i < len(altsets); i++ {
var alts = altsets[i]
alts := altsets[i]
if alts.length() == 1 {
return true
}
@ -416,7 +416,7 @@ func PredictionModehasNonConflictingAltSet(altsets []*BitSet) bool {
//
func PredictionModehasConflictingAltSet(altsets []*BitSet) bool {
for i := 0; i < len(altsets); i++ {
var alts = altsets[i]
alts := altsets[i]
if alts.length() > 1 {
return true
}
@ -435,7 +435,7 @@ func PredictionModeallSubsetsEqual(altsets []*BitSet) bool {
var first *BitSet
for i := 0; i < len(altsets); i++ {
var alts = altsets[i]
alts := altsets[i]
if first == nil {
first = alts
} else if alts != first {
@ -454,7 +454,7 @@ func PredictionModeallSubsetsEqual(altsets []*BitSet) bool {
// @param altsets a collection of alternative subsets
//
func PredictionModegetUniqueAlt(altsets []*BitSet) int {
var all = PredictionModeGetAlts(altsets)
all := PredictionModeGetAlts(altsets)
if all.length() == 1 {
return all.minValue()
}
@ -470,7 +470,7 @@ func PredictionModegetUniqueAlt(altsets []*BitSet) int {
// @return the set of represented alternatives in {@code altsets}
//
func PredictionModeGetAlts(altsets []*BitSet) *BitSet {
var all = NewBitSet()
all := NewBitSet()
for _, alts := range altsets {
all.or(alts)
}
@ -487,11 +487,11 @@ func PredictionModeGetAlts(altsets []*BitSet) *BitSet {
// </pre>
//
func PredictionModegetConflictingAltSubsets(configs ATNConfigSet) []*BitSet {
var configToAlts = make(map[string]*BitSet)
configToAlts := make(map[string]*BitSet)
for _, c := range configs.GetItems() {
var key = "key_" + strconv.Itoa(c.GetState().GetStateNumber()) + "/" + c.GetContext().String()
var alts = configToAlts[key]
key := "key_" + strconv.Itoa(c.GetState().GetStateNumber()) + "/" + c.GetContext().String()
alts := configToAlts[key]
if alts == nil {
alts = NewBitSet()
configToAlts[key] = alts
@ -499,7 +499,7 @@ func PredictionModegetConflictingAltSubsets(configs ATNConfigSet) []*BitSet {
alts.add(c.GetAlt())
}
var values = make([]*BitSet, 0)
values := make([]*BitSet, 0)
for k := range configToAlts {
if strings.Index(k, "key_") != 0 {
@ -519,10 +519,10 @@ func PredictionModegetConflictingAltSubsets(configs ATNConfigSet) []*BitSet {
// </pre>
//
func PredictionModeGetStateToAltMap(configs ATNConfigSet) *AltDict {
var m = NewAltDict()
m := NewAltDict()
for _, c := range configs.GetItems() {
var alts = m.Get(c.GetState().String())
alts := m.Get(c.GetState().String())
if alts == nil {
alts = NewBitSet()
m.put(c.GetState().String(), alts)
@ -533,7 +533,7 @@ func PredictionModeGetStateToAltMap(configs ATNConfigSet) *AltDict {
}
func PredictionModehasStateAssociatedWithOneAlt(configs ATNConfigSet) bool {
var values = PredictionModeGetStateToAltMap(configs).values()
values := PredictionModeGetStateToAltMap(configs).values()
for i := 0; i < len(values); i++ {
if values[i].(*BitSet).length() == 1 {
return true
@ -543,11 +543,11 @@ func PredictionModehasStateAssociatedWithOneAlt(configs ATNConfigSet) bool {
}
func PredictionModegetSingleViableAlt(altsets []*BitSet) int {
var result = ATNInvalidAltNumber
result := ATNInvalidAltNumber
for i := 0; i < len(altsets); i++ {
var alts = altsets[i]
var minAlt = alts.minValue()
alts := altsets[i]
minAlt := alts.minValue()
if result == ATNInvalidAltNumber {
result = minAlt
} else if result != minAlt { // more than 1 viable alt

View File

@ -45,7 +45,7 @@ var tokenTypeMapCache = make(map[string]int)
var ruleIndexMapCache = make(map[string]int)
func (b *BaseRecognizer) checkVersion(toolVersion string) {
var runtimeVersion = "4.5.2"
runtimeVersion := "4.5.2"
if runtimeVersion != toolVersion {
fmt.Println("ANTLR runtime and generated code versions disagree: " + runtimeVersion + "!=" + toolVersion)
}
@ -136,37 +136,37 @@ func (b *BaseRecognizer) GetTokenType(tokenName string) int {
}
//func (b *Recognizer) GetTokenTypeMap() map[string]int {
// Vocabulary vocabulary = getVocabulary();
// Vocabulary vocabulary = getVocabulary()
//
// Synchronized (tokenTypeMapCache) {
// Map<String, Integer> result = tokenTypeMapCache.Get(vocabulary);
// Map<String, Integer> result = tokenTypeMapCache.Get(vocabulary)
// if (result == null) {
// result = new HashMap<String, Integer>();
// result = new HashMap<String, Integer>()
// for (int i = 0; i < GetATN().maxTokenType; i++) {
// String literalName = vocabulary.getLiteralName(i);
// String literalName = vocabulary.getLiteralName(i)
// if (literalName != null) {
// result.put(literalName, i);
// result.put(literalName, i)
// }
//
// String symbolicName = vocabulary.GetSymbolicName(i);
// String symbolicName = vocabulary.GetSymbolicName(i)
// if (symbolicName != null) {
// result.put(symbolicName, i);
// result.put(symbolicName, i)
// }
// }
//
// result.put("EOF", Token.EOF);
// result = Collections.unmodifiableMap(result);
// tokenTypeMapCache.put(vocabulary, result);
// result.put("EOF", Token.EOF)
// result = Collections.unmodifiableMap(result)
// tokenTypeMapCache.put(vocabulary, result)
// }
//
// return result;
// return result
// }
//}
// What is the error header, normally line/character position information?//
func (b *BaseRecognizer) GetErrorHeader(e RecognitionException) string {
var line = e.GetOffendingToken().GetLine()
var column = e.GetOffendingToken().GetColumn()
line := e.GetOffendingToken().GetLine()
column := e.GetOffendingToken().GetColumn()
return "line " + strconv.Itoa(line) + ":" + strconv.Itoa(column)
}
@ -187,7 +187,7 @@ func (b *BaseRecognizer) GetTokenErrorDisplay(t Token) string {
if t == nil {
return "<no token>"
}
var s = t.GetText()
s := t.GetText()
if s == "" {
if t.GetTokenType() == TokenEOF {
s = "<EOF>"

View File

@ -28,7 +28,7 @@ func SemanticContextandContext(a, b SemanticContext) SemanticContext {
if b == nil || b == SemanticContextNone {
return a
}
var result = NewAND(a, b)
result := NewAND(a, b)
if len(result.opnds) == 1 {
return result.opnds[0]
}
@ -46,7 +46,7 @@ func SemanticContextorContext(a, b SemanticContext) SemanticContext {
if a == SemanticContextNone || b == SemanticContextNone {
return SemanticContextNone
}
var result = NewOR(a, b)
result := NewOR(a, b)
if len(result.opnds) == 1 {
return result.opnds[0]
}
@ -156,7 +156,7 @@ func (p *PrecedencePredicate) String() string {
}
func PrecedencePredicatefilterPrecedencePredicates(set *Set) []*PrecedencePredicate {
var result = make([]*PrecedencePredicate, 0)
result := make([]*PrecedencePredicate, 0)
for _, v := range set.values() {
if c2, ok := v.(*PrecedencePredicate); ok {
@ -176,7 +176,7 @@ type AND struct {
func NewAND(a, b SemanticContext) *AND {
var operands = NewSet(nil, nil)
operands := NewSet(nil, nil)
if aa, ok := a.(*AND); ok {
for _, o := range aa.opnds {
operands.add(o)
@ -192,7 +192,7 @@ func NewAND(a, b SemanticContext) *AND {
} else {
operands.add(b)
}
var precedencePredicates = PrecedencePredicatefilterPrecedencePredicates(operands)
precedencePredicates := PrecedencePredicatefilterPrecedencePredicates(operands)
if len(precedencePredicates) > 0 {
// interested in the transition with the lowest precedence
var reduced *PrecedencePredicate
@ -254,12 +254,12 @@ func (a *AND) evaluate(parser Recognizer, outerContext RuleContext) bool {
}
func (a *AND) evalPrecedence(parser Recognizer, outerContext RuleContext) SemanticContext {
var differs = false
var operands = make([]SemanticContext, 0)
differs := false
operands := make([]SemanticContext, 0)
for i := 0; i < len(a.opnds); i++ {
var context = a.opnds[i]
var evaluated = context.evalPrecedence(parser, outerContext)
context := a.opnds[i]
evaluated := context.evalPrecedence(parser, outerContext)
differs = differs || (evaluated != context)
if evaluated == nil {
// The AND context is false if any element is false
@ -292,7 +292,7 @@ func (a *AND) evalPrecedence(parser Recognizer, outerContext RuleContext) Semant
}
func (a *AND) String() string {
var s = ""
s := ""
for _, o := range a.opnds {
s += "&& " + fmt.Sprint(o)
@ -316,7 +316,7 @@ type OR struct {
func NewOR(a, b SemanticContext) *OR {
var operands = NewSet(nil, nil)
operands := NewSet(nil, nil)
if aa, ok := a.(*OR); ok {
for _, o := range aa.opnds {
operands.add(o)
@ -332,7 +332,7 @@ func NewOR(a, b SemanticContext) *OR {
} else {
operands.add(b)
}
var precedencePredicates = PrecedencePredicatefilterPrecedencePredicates(operands)
precedencePredicates := PrecedencePredicatefilterPrecedencePredicates(operands)
if len(precedencePredicates) > 0 {
// interested in the transition with the lowest precedence
var reduced *PrecedencePredicate
@ -392,11 +392,11 @@ func (o *OR) evaluate(parser Recognizer, outerContext RuleContext) bool {
}
func (o *OR) evalPrecedence(parser Recognizer, outerContext RuleContext) SemanticContext {
var differs = false
var operands = make([]SemanticContext, 0)
differs := false
operands := make([]SemanticContext, 0)
for i := 0; i < len(o.opnds); i++ {
var context = o.opnds[i]
var evaluated = context.evalPrecedence(parser, outerContext)
context := o.opnds[i]
evaluated := context.evalPrecedence(parser, outerContext)
differs = differs || (evaluated != context)
if evaluated == SemanticContextNone {
// The OR context is true if any element is true
@ -427,7 +427,7 @@ func (o *OR) evalPrecedence(parser Recognizer, outerContext RuleContext) Semanti
}
func (o *OR) String() string {
var s = ""
s := ""
for _, o := range o.opnds {
s += "|| " + fmt.Sprint(o)

View File

@ -156,7 +156,7 @@ func NewCommonToken(source *TokenSourceCharStreamPair, tokenType, channel, start
// @param oldToken The token to copy.
//
func (c *CommonToken) clone() *CommonToken {
var t = NewCommonToken(c.source, c.tokenType, c.channel, c.start, c.stop)
t := NewCommonToken(c.source, c.tokenType, c.channel, c.start, c.stop)
t.tokenIndex = c.GetTokenIndex()
t.line = c.GetLine()
t.column = c.GetColumn()
@ -168,11 +168,11 @@ func (c *CommonToken) GetText() string {
if c.text != "" {
return c.text
}
var input = c.GetInputStream()
input := c.GetInputStream()
if input == nil {
return ""
}
var n = input.Size()
n := input.Size()
if c.start < n && c.stop < n {
return input.GetTextFromInterval(NewInterval(c.start, c.stop))
}
@ -184,7 +184,7 @@ func (c *CommonToken) SetText(text string) {
}
func (c *CommonToken) String() string {
var txt = c.GetText()
txt := c.GetText()
if txt != "" {
txt = strings.Replace(txt, "\n", "\\n", -1)
txt = strings.Replace(txt, "\r", "\\r", -1)

View File

@ -140,7 +140,7 @@ func NewAtomTransition(target ATNState, intervalSet int) *AtomTransition {
}
func (t *AtomTransition) makeLabel() *IntervalSet {
var s = NewIntervalSet()
s := NewIntervalSet()
s.addOne(t.label)
return s
}
@ -222,7 +222,7 @@ func NewRangeTransition(target ATNState, start, stop int) *RangeTransition {
}
func (t *RangeTransition) makeLabel() *IntervalSet {
var s = NewIntervalSet()
s := NewIntervalSet()
s.addRange(t.start, t.stop)
return s
}

View File

@ -68,7 +68,7 @@ func (v *BaseParseTreeVisitor) VisitErrorNode(node ErrorNode) interface{} { re
// TODO
//func (this ParseTreeVisitor) Visit(ctx) {
// if (Utils.isArray(ctx)) {
// var self = this
// self := this
// return ctx.map(function(child) { return VisitAtom(self, child)})
// } else {
// return VisitAtom(this, ctx)
@ -80,8 +80,8 @@ func (v *BaseParseTreeVisitor) VisitErrorNode(node ErrorNode) interface{} { re
// return
// }
//
// var name = ctx.parser.ruleNames[ctx.ruleIndex]
// var funcName = "Visit" + Utils.titleCase(name)
// name := ctx.parser.ruleNames[ctx.ruleIndex]
// funcName := "Visit" + Utils.titleCase(name)
//
// return Visitor[funcName](ctx)
//}
@ -151,7 +151,7 @@ func (t *TerminalNodeImpl) GetSourceInterval() *Interval {
if t.symbol == nil {
return TreeInvalidInterval
}
var tokenIndex = t.symbol.GetTokenIndex()
tokenIndex := t.symbol.GetTokenIndex()
return NewInterval(tokenIndex, tokenIndex)
}
@ -219,7 +219,7 @@ func (p *ParseTreeWalker) Walk(listener ParseTreeListener, t Tree) {
default:
p.EnterRule(listener, t.(RuleNode))
for i := 0; i < t.GetChildCount(); i++ {
var child = t.GetChild(i)
child := t.GetChild(i)
p.Walk(listener, child)
}
p.ExitRule(listener, t.(RuleNode))
@ -233,13 +233,13 @@ func (p *ParseTreeWalker) Walk(listener ParseTreeListener, t Tree) {
// the rule specific. We to them in reverse order upon finishing the node.
//
func (p *ParseTreeWalker) EnterRule(listener ParseTreeListener, r RuleNode) {
var ctx = r.GetRuleContext().(ParserRuleContext)
ctx := r.GetRuleContext().(ParserRuleContext)
listener.EnterEveryRule(ctx)
ctx.EnterRule(listener)
}
func (p *ParseTreeWalker) ExitRule(listener ParseTreeListener, r RuleNode) {
var ctx = r.GetRuleContext().(ParserRuleContext)
ctx := r.GetRuleContext().(ParserRuleContext)
ctx.ExitRule(listener)
listener.ExitEveryRule(ctx)
}

View File

@ -13,14 +13,14 @@ func TreesStringTree(tree Tree, ruleNames []string, recog Recognizer) string {
ruleNames = recog.GetRuleNames()
}
var s = TreesGetNodeText(tree, ruleNames, nil)
s := TreesGetNodeText(tree, ruleNames, nil)
s = EscapeWhitespace(s, false)
var c = tree.GetChildCount()
c := tree.GetChildCount()
if c == 0 {
return s
}
var res = "(" + s + " "
res := "(" + s + " "
if c > 0 {
s = TreesStringTree(tree.GetChild(0), ruleNames, nil)
res += s
@ -42,7 +42,7 @@ func TreesGetNodeText(t Tree, ruleNames []string, recog Parser) string {
switch t2 := t.(type) {
case RuleNode:
t3 := t2.GetRuleContext()
var altNumber = t3.GetAltNumber()
altNumber := t3.GetAltNumber()
if altNumber != ATNInvalidAltNumber {
return fmt.Sprintf("%s:%d", ruleNames[t3.GetRuleIndex()], altNumber)
@ -58,7 +58,7 @@ func TreesGetNodeText(t Tree, ruleNames []string, recog Parser) string {
}
// no recog for rule names
var payload = t.GetPayload()
payload := t.GetPayload()
if p2, ok := payload.(Token); ok {
return p2.GetText()
}
@ -68,7 +68,7 @@ func TreesGetNodeText(t Tree, ruleNames []string, recog Parser) string {
// Return ordered list of all children of this node
func TreesGetChildren(t Tree) []Tree {
var list = make([]Tree, 0)
list := make([]Tree, 0)
for i := 0; i < t.GetChildCount(); i++ {
list = append(list, t.GetChild(i))
}
@ -79,7 +79,7 @@ func TreesGetChildren(t Tree) []Tree {
// list is the root and the last is the parent of this node.
//
func TreesgetAncestors(t Tree) []Tree {
var ancestors = make([]Tree, 0)
ancestors := make([]Tree, 0)
t = t.GetParent()
for t != nil {
f := []Tree{t}
@ -98,7 +98,7 @@ func TreesfindAllRuleNodes(t ParseTree, ruleIndex int) []ParseTree {
}
func TreesfindAllNodes(t ParseTree, index int, findTokens bool) []ParseTree {
var nodes = make([]ParseTree, 0)
nodes := make([]ParseTree, 0)
TreesFindAllNodes(t, index, findTokens, nodes)
return nodes
}
@ -125,7 +125,7 @@ func TreesFindAllNodes(t ParseTree, index int, findTokens bool, nodes []ParseTre
}
func TreesDescendants(t ParseTree) []ParseTree {
var nodes = []ParseTree{t}
nodes := []ParseTree{t}
for i := 0; i < t.GetChildCount(); i++ {
nodes = append(nodes, TreesDescendants(t.GetChild(i).(ParseTree))...)
}

View File

@ -119,8 +119,8 @@ func (s *Set) length() int {
func (s *Set) add(value interface{}) interface{} {
var hash = s.hashFunction(value)
var key = "hash_" + hashCode(hash)
hash := s.hashFunction(value)
key := "hash_" + hashCode(hash)
values := s.data[key]
@ -158,7 +158,7 @@ func (s *Set) contains(value interface{}) bool {
}
func (s *Set) values() []interface{} {
var l = make([]interface{}, 0)
l := make([]interface{}, 0)
for key := range s.data {
if strings.Index(key, "hash_") == 0 {
@ -310,7 +310,7 @@ func NewDoubleDict() *DoubleDict {
}
func (d *DoubleDict) Get(a string, b string) interface{} {
var data = d.data[a]
data := d.data[a]
if data == nil {
return nil
@ -320,7 +320,7 @@ func (d *DoubleDict) Get(a string, b string) interface{} {
}
func (d *DoubleDict) set(a, b string, o interface{}) {
var data = d.data[a]
data := d.data[a]
if data == nil {
data = make(map[string]interface{})