More, more refactoring cleanup

This commit is contained in:
Peter Boyer 2015-12-31 17:53:41 -05:00
parent ddb296cf01
commit 39c1321d40
28 changed files with 350 additions and 370 deletions

View File

@ -24,8 +24,8 @@ type ATNConfig interface {
GetAlt() int
GetSemanticContext() SemanticContext
GetContext() IPredictionContext
SetContext(IPredictionContext)
GetContext() PredictionContext
SetContext(PredictionContext)
GetReachesIntoOuterContext() int
SetReachesIntoOuterContext(int)
@ -39,7 +39,7 @@ type BaseATNConfig struct {
precedenceFilterSuppressed bool
state ATNState
alt int
context IPredictionContext
context PredictionContext
semanticContext SemanticContext
reachesIntoOuterContext int
}
@ -54,11 +54,11 @@ func NewBaseATNConfig7(old *BaseATNConfig) *BaseATNConfig { // dup
return a
}
func NewBaseATNConfig6(state ATNState, alt int, context IPredictionContext) *BaseATNConfig {
return NewBaseATNConfig5(state, alt, context, SemanticContextNONE)
func NewBaseATNConfig6(state ATNState, alt int, context PredictionContext) *BaseATNConfig {
return NewBaseATNConfig5(state, alt, context, SemanticContextNone)
}
func NewBaseATNConfig5(state ATNState, alt int, context IPredictionContext, semanticContext SemanticContext) *BaseATNConfig {
func NewBaseATNConfig5(state ATNState, alt int, context PredictionContext, semanticContext SemanticContext) *BaseATNConfig {
a := new(BaseATNConfig)
if (semanticContext == nil){
@ -85,11 +85,11 @@ func NewBaseATNConfig2(c ATNConfig, semanticContext SemanticContext) *BaseATNCon
return NewBaseATNConfig(c, c.GetState(), c.GetContext(), semanticContext)
}
func NewBaseATNConfig1(c ATNConfig, state ATNState, context IPredictionContext) *BaseATNConfig {
func NewBaseATNConfig1(c ATNConfig, state ATNState, context PredictionContext) *BaseATNConfig {
return NewBaseATNConfig(c, state, context, c.GetSemanticContext())
}
func NewBaseATNConfig(c ATNConfig, state ATNState, context IPredictionContext, semanticContext SemanticContext) *BaseATNConfig {
func NewBaseATNConfig(c ATNConfig, state ATNState, context PredictionContext, semanticContext SemanticContext) *BaseATNConfig {
a := new(BaseATNConfig)
if (semanticContext == nil){
@ -121,10 +121,10 @@ func (this *BaseATNConfig) GetAlt() int {
return this.alt
}
func (this *BaseATNConfig) SetContext(v IPredictionContext) {
func (this *BaseATNConfig) SetContext(v PredictionContext) {
this.context = v
}
func (this *BaseATNConfig) GetContext() IPredictionContext {
func (this *BaseATNConfig) GetContext() PredictionContext {
return this.context
}
@ -178,7 +178,7 @@ func (this *BaseATNConfig) String() string {
}
var b string
if this.semanticContext != SemanticContextNONE {
if this.semanticContext != SemanticContextNone {
b = "," + fmt.Sprint(this.semanticContext)
}
@ -201,22 +201,22 @@ type LexerATNConfig struct {
passedThroughNonGreedyDecision bool
}
func NewLexerATNConfig6(state ATNState, alt int, context IPredictionContext) *LexerATNConfig {
func NewLexerATNConfig6(state ATNState, alt int, context PredictionContext) *LexerATNConfig {
this := new(LexerATNConfig)
this.BaseATNConfig = NewBaseATNConfig5(state, alt, context, SemanticContextNONE)
this.BaseATNConfig = NewBaseATNConfig5(state, alt, context, SemanticContextNone)
this.passedThroughNonGreedyDecision = false
this.lexerActionExecutor = nil
return this
}
func NewLexerATNConfig5(state ATNState, alt int, context IPredictionContext, lexerActionExecutor *LexerActionExecutor) *LexerATNConfig {
func NewLexerATNConfig5(state ATNState, alt int, context PredictionContext, lexerActionExecutor *LexerActionExecutor) *LexerATNConfig {
this := new(LexerATNConfig)
this.BaseATNConfig = NewBaseATNConfig5(state, alt, context, SemanticContextNONE)
this.BaseATNConfig = NewBaseATNConfig5(state, alt, context, SemanticContextNone)
this.lexerActionExecutor = lexerActionExecutor
this.passedThroughNonGreedyDecision = false
return this
@ -242,7 +242,7 @@ func NewLexerATNConfig3(c *LexerATNConfig, state ATNState, lexerActionExecutor *
return this
}
func NewLexerATNConfig2(c *LexerATNConfig, state ATNState, context IPredictionContext) *LexerATNConfig {
func NewLexerATNConfig2(c *LexerATNConfig, state ATNState, context PredictionContext) *LexerATNConfig {
this := new(LexerATNConfig)
@ -252,11 +252,11 @@ func NewLexerATNConfig2(c *LexerATNConfig, state ATNState, context IPredictionCo
return this
}
func NewLexerATNConfig1(state ATNState, alt int, context IPredictionContext) *LexerATNConfig {
func NewLexerATNConfig1(state ATNState, alt int, context PredictionContext) *LexerATNConfig {
this := new(LexerATNConfig)
this.BaseATNConfig = NewBaseATNConfig5(state, alt, context, SemanticContextNONE)
this.BaseATNConfig = NewBaseATNConfig5(state, alt, context, SemanticContextNone)
this.lexerActionExecutor = nil
this.passedThroughNonGreedyDecision = false

View File

@ -118,7 +118,7 @@ func (this *BaseATNConfigSet) Add(config ATNConfig, mergeCache *DoubleDict) bool
if this.readOnly {
panic("This set is readonly")
}
if config.GetSemanticContext() != SemanticContextNONE {
if config.GetSemanticContext() != SemanticContextNone {
this.hasSemanticContext = true
}
if config.GetReachesIntoOuterContext() > 0 {
@ -166,7 +166,7 @@ func (this *BaseATNConfigSet) GetPredicates() []SemanticContext {
var preds = make([]SemanticContext, 0)
for i := 0; i < len(this.configs); i++ {
c := this.configs[i].GetSemanticContext()
if c != SemanticContextNONE {
if c != SemanticContextNone {
preds = append(preds, c)
}
}

View File

@ -10,17 +10,16 @@ import (
// This is the earliest supported serialized UUID.
// stick to serialized version for now, we don't need a UUID instance
var BASE_SERIALIZED_UUID = "AADB8D7E-AEEF-4415-AD2B-8204D6CF042E"
var BaseSerializedUUID = "AADB8D7E-AEEF-4415-AD2B-8204D6CF042E"
// This list contains all of the currently supported UUIDs, ordered by when
// the feature first appeared in this branch.
var SUPPORTED_UUIDS = []string{BASE_SERIALIZED_UUID}
var SupportedUUIDs = []string{BaseSerializedUUID}
var SERIALIZED_VERSION = 3
var SerializedVersion = 3
// This is the current serialized UUID.
var SERIALIZED_UUID = BASE_SERIALIZED_UUID
var SerializedUUID = BaseSerializedUUID
type LoopEndStateIntPair struct {
item0 *LoopEndState
@ -74,11 +73,11 @@ func stringInSlice(a string, list []string) int {
// introduced otherwise, {@code false}.
func (this *ATNDeserializer) isFeatureSupported(feature, actualUuid string) bool {
var idx1 = stringInSlice(feature, SUPPORTED_UUIDS)
var idx1 = stringInSlice(feature, SupportedUUIDs)
if idx1 < 0 {
return false
}
var idx2 = stringInSlice(actualUuid, SUPPORTED_UUIDS)
var idx2 = stringInSlice(actualUuid, SupportedUUIDs)
return idx2 >= idx1
}
@ -125,15 +124,15 @@ func (this *ATNDeserializer) reset(data []rune) {
func (this *ATNDeserializer) checkVersion() {
var version = this.readInt()
if version != SERIALIZED_VERSION {
panic("Could not deserialize ATN with version " + strconv.Itoa(version) + " (expected " + strconv.Itoa(SERIALIZED_VERSION) + ").")
if version != SerializedVersion {
panic("Could not deserialize ATN with version " + strconv.Itoa(version) + " (expected " + strconv.Itoa(SerializedVersion) + ").")
}
}
func (this *ATNDeserializer) checkUUID() {
var uuid = this.readUUID()
if stringInSlice(uuid, SUPPORTED_UUIDS) < 0 {
panic("Could not deserialize ATN with UUID: " + uuid + " (expected " + SERIALIZED_UUID + " or a legacy UUID).")
if stringInSlice(uuid, SupportedUUIDs) < 0 {
panic("Could not deserialize ATN with UUID: " + uuid + " (expected " + SerializedUUID + " or a legacy UUID).")
}
this.uuid = uuid
}
@ -163,7 +162,7 @@ func (this *ATNDeserializer) readStates(atn *ATN) {
ruleIndex = -1
}
var s = this.stateFactory(stype, ruleIndex)
if stype == ATNStateLOOP_END {
if stype == ATNStateLoopEnd {
var loopBackStateNumber = this.readInt()
loopBackStateNumbers = append(loopBackStateNumbers, LoopEndStateIntPair{s.(*LoopEndState), loopBackStateNumber})
} else if s2, ok := s.(IBlockStartState); ok {
@ -630,29 +629,29 @@ func (this *ATNDeserializer) stateFactory(typeIndex, ruleIndex int) ATNState {
switch typeIndex {
case ATNStateInvalidType:
return nil
case ATNStateBASIC:
case ATNStateBasic:
s = NewBasicState()
case ATNStateRULE_START:
case ATNStateRuleStart:
s = NewRuleStartState()
case ATNStateBLOCK_START:
case ATNStateBlockStart:
s = NewBasicBlockStartState()
case ATNStatePLUS_BLOCK_START:
case ATNStatePlusBlockStart:
s = NewPlusBlockStartState()
case ATNStateSTAR_BLOCK_START:
case ATNStateStarBlockStart:
s = NewStarBlockStartState()
case ATNStateTOKEN_START:
case ATNStateTokenStart:
s = NewTokensStartState()
case ATNStateRULE_STOP:
case ATNStateRuleStop:
s = NewRuleStopState()
case ATNStateBLOCK_END:
case ATNStateBlockEnd:
s = NewBlockEndState()
case ATNStateSTAR_LOOP_BACK:
case ATNStateStarLoopBack:
s = NewStarLoopbackState()
case ATNStateSTAR_LOOP_ENTRY:
case ATNStateStarLoopEntry:
s = NewStarLoopEntryState()
case ATNStatePLUS_LOOP_BACK:
case ATNStatePlusLoopBack:
s = NewPlusLoopbackState()
case ATNStateLOOP_END:
case ATNStateLoopEnd:
s = NewLoopEndState()
default:
message := fmt.Sprintf("The specified state type %d is not valid.", typeIndex)
@ -665,21 +664,21 @@ func (this *ATNDeserializer) stateFactory(typeIndex, ruleIndex int) ATNState {
func (this *ATNDeserializer) lexerActionFactory(typeIndex, data1, data2 int) LexerAction {
switch typeIndex {
case LexerActionTypeCHANNEL:
case LexerActionTypeChannel:
return NewLexerChannelAction(data1)
case LexerActionTypeCUSTOM:
case LexerActionTypeCustom:
return NewLexerCustomAction(data1, data2)
case LexerActionTypeMODE:
case LexerActionTypeMode:
return NewLexerModeAction(data1)
case LexerActionTypeMORE:
case LexerActionTypeMore:
return LexerMoreActionINSTANCE
case LexerActionTypePOP_MODE:
case LexerActionTypePopMode:
return LexerPopModeActionINSTANCE
case LexerActionTypePUSH_MODE:
case LexerActionTypePushMode:
return NewLexerPushModeAction(data1)
case LexerActionTypeSKIP:
case LexerActionTypeSkip:
return LexerSkipActionINSTANCE
case LexerActionTypeTYPE:
case LexerActionTypeType:
return NewLexerTypeAction(data1)
default:
message := fmt.Sprintf("The specified lexer action typeIndex%d is not valid.", typeIndex)

View File

@ -17,10 +17,10 @@ func NewBaseATNSimulator(atn *ATN, sharedContextCache *PredictionContextCache) *
var ATNSimulatorError = NewDFAState(0x7FFFFFFF, NewBaseATNConfigSet(false))
func (this *BaseATNSimulator) getCachedContext(context IPredictionContext) IPredictionContext {
func (this *BaseATNSimulator) getCachedContext(context PredictionContext) PredictionContext {
if this.sharedContextCache == nil {
return context
}
var visited = make(map[IPredictionContext]IPredictionContext)
return getCachedPredictionContext(context, this.sharedContextCache, visited)
var visited = make(map[PredictionContext]PredictionContext)
return getCachedBasePredictionContext(context, this.sharedContextCache, visited)
}

View File

@ -5,38 +5,23 @@ import "strconv"
const (
// constants for serialization
ATNStateInvalidType = 0
ATNStateBASIC = 1
ATNStateRULE_START = 2
ATNStateBLOCK_START = 3
ATNStatePLUS_BLOCK_START = 4
ATNStateSTAR_BLOCK_START = 5
ATNStateTOKEN_START = 6
ATNStateRULE_STOP = 7
ATNStateBLOCK_END = 8
ATNStateSTAR_LOOP_BACK = 9
ATNStateSTAR_LOOP_ENTRY = 10
ATNStatePLUS_LOOP_BACK = 11
ATNStateLOOP_END = 12
ATNStateBasic = 1
ATNStateRuleStart = 2
ATNStateBlockStart = 3
ATNStatePlusBlockStart = 4
ATNStateStarBlockStart = 5
ATNStateTokenStart = 6
ATNStateRuleStop = 7
ATNStateBlockEnd = 8
ATNStateStarLoopBack = 9
ATNStateStarLoopEntry = 10
ATNStatePlusLoopBack = 11
ATNStateLoopEnd = 12
ATNStateINVALID_STATE_NUMBER = -1
ATNStateInvalidStateNumber = -1
)
//var ATNState.serializationNames = [
// "INVALID",
// "BASIC",
// "RULE_START",
// "BLOCK_START",
// "PLUS_BLOCK_START",
// "STAR_BLOCK_START",
// "TOKEN_START",
// "RULE_STOP",
// "BLOCK_END",
// "STAR_LOOP_BACK",
// "STAR_LOOP_ENTRY",
// "PLUS_LOOP_BACK",
// "LOOP_END" ]
var INITIAL_NUM_TRANSITIONS = 4
var ATNStateInitialNumTransitions = 4
type ATNState interface {
GetEpsilonOnlyTransitions() bool
@ -81,7 +66,7 @@ func NewBaseATNState() *BaseATNState {
// Which ATN are we in?
as.atn = nil
as.stateNumber = ATNStateINVALID_STATE_NUMBER
as.stateNumber = ATNStateInvalidStateNumber
as.stateType = ATNStateInvalidType
as.ruleIndex = 0 // at runtime, we don't have Rule objects
as.epsilonOnlyTransitions = false
@ -178,7 +163,7 @@ func NewBasicState() *BasicState {
this := new(BasicState)
this.BaseATNState = NewBaseATNState()
this.stateType = ATNStateBASIC
this.stateType = ATNStateBasic
return this
}
@ -273,7 +258,7 @@ func NewBasicBlockStartState() *BasicBlockStartState {
this.BlockStartState = NewBlockStartState()
this.stateType = ATNStateBLOCK_START
this.stateType = ATNStateBlockStart
return this
}
@ -290,7 +275,7 @@ func NewBlockEndState() *BlockEndState {
this := new(BlockEndState)
this.BaseATNState = NewBaseATNState()
this.stateType = ATNStateBLOCK_END
this.stateType = ATNStateBlockEnd
this.startState = nil
return this
@ -309,7 +294,7 @@ func NewRuleStopState() *RuleStopState {
this := new(RuleStopState)
this.BaseATNState = NewBaseATNState()
this.stateType = ATNStateRULE_STOP
this.stateType = ATNStateRuleStop
return this
}
@ -325,7 +310,7 @@ func NewRuleStartState() *RuleStartState {
this := new(RuleStartState)
this.BaseATNState = NewBaseATNState()
this.stateType = ATNStateRULE_START
this.stateType = ATNStateRuleStart
this.stopState = nil
this.isPrecedenceRule = false
@ -345,7 +330,7 @@ func NewPlusLoopbackState() *PlusLoopbackState {
this.BaseDecisionState = NewBaseDecisionState()
this.stateType = ATNStatePLUS_LOOP_BACK
this.stateType = ATNStatePlusLoopBack
return this
}
@ -366,7 +351,7 @@ func NewPlusBlockStartState() *PlusBlockStartState {
this.BlockStartState = NewBlockStartState()
this.stateType = ATNStatePLUS_BLOCK_START
this.stateType = ATNStatePlusBlockStart
this.loopBackState = nil
return this
@ -383,7 +368,7 @@ func NewStarBlockStartState() *StarBlockStartState {
this.BlockStartState = NewBlockStartState()
this.stateType = ATNStateSTAR_BLOCK_START
this.stateType = ATNStateStarBlockStart
return this
}
@ -398,7 +383,7 @@ func NewStarLoopbackState() *StarLoopbackState {
this.BaseATNState = NewBaseATNState()
this.stateType = ATNStateSTAR_LOOP_BACK
this.stateType = ATNStateStarLoopBack
return this
}
@ -415,7 +400,7 @@ func NewStarLoopEntryState() *StarLoopEntryState {
this.BaseDecisionState = NewBaseDecisionState()
this.stateType = ATNStateSTAR_LOOP_ENTRY
this.stateType = ATNStateStarLoopEntry
this.loopBackState = nil
// Indicates whether this state can benefit from a precedence DFA during SLL decision making.
@ -437,7 +422,7 @@ func NewLoopEndState() *LoopEndState {
this.BaseATNState = NewBaseATNState()
this.stateType = ATNStateLOOP_END
this.stateType = ATNStateLoopEnd
this.loopBackState = nil
return this
@ -454,6 +439,6 @@ func NewTokensStartState() *TokensStartState {
this.BaseDecisionState = NewBaseDecisionState()
this.stateType = ATNStateTOKEN_START
this.stateType = ATNStateTokenStart
return this
}

View File

@ -7,7 +7,7 @@ package antlr4
import "fmt"
type TokenFactory interface {
Create(source *TokenSourceCharStreamPair, ttype int, text string, channel, start, stop, line, column int) IToken
Create(source *TokenSourceCharStreamPair, ttype int, text string, channel, start, stop, line, column int) Token
}
type CommonTokenFactory struct {
@ -46,7 +46,7 @@ func NewCommonTokenFactory(copyText bool) *CommonTokenFactory {
//
var CommonTokenFactoryDEFAULT = NewCommonTokenFactory(false)
func (this *CommonTokenFactory) Create(source *TokenSourceCharStreamPair, ttype int, text string, channel, start, stop, line, column int) IToken {
func (this *CommonTokenFactory) Create(source *TokenSourceCharStreamPair, ttype int, text string, channel, start, stop, line, column int) Token {
if PortDebug {
fmt.Println("Token factory creating: " + text)
@ -65,7 +65,7 @@ func (this *CommonTokenFactory) Create(source *TokenSourceCharStreamPair, ttype
}
func (this *CommonTokenFactory) createThin(ttype int, text string) IToken {
func (this *CommonTokenFactory) createThin(ttype int, text string) Token {
if PortDebug {
fmt.Println("Token factory creating: " + text)

View File

@ -34,7 +34,7 @@ func (bt *CommonTokenStream) Seek(index int) {
bt.index = bt.adjustSeekIndex(index)
}
func (bt *CommonTokenStream) Get(index int) IToken {
func (bt *CommonTokenStream) Get(index int) Token {
bt.lazyInit()
return bt.tokens[index]
}
@ -97,7 +97,7 @@ func (bt *CommonTokenStream) fetch(n int) int {
}
for i := 0; i < n; i++ {
var t IToken = bt.tokenSource.nextToken()
var t Token = bt.tokenSource.nextToken()
if PortDebug {
fmt.Println("fetch loop")
}
@ -116,13 +116,13 @@ func (bt *CommonTokenStream) fetch(n int) int {
}
// Get all tokens from start..stop inclusively///
func (bt *CommonTokenStream) GetTokens(start int, stop int, types *IntervalSet) []IToken {
func (bt *CommonTokenStream) GetTokens(start int, stop int, types *IntervalSet) []Token {
if start < 0 || stop < 0 {
return nil
}
bt.lazyInit()
var subset = make([]IToken, 0)
var subset = make([]Token, 0)
if stop >= len(bt.tokens) {
stop = len(bt.tokens) - 1
}
@ -160,7 +160,7 @@ func (bt *CommonTokenStream) GetTokenSource() TokenSource {
// Reset bt token stream by setting its token source.///
func (bt *CommonTokenStream) SetTokenSource(tokenSource TokenSource) {
bt.tokenSource = tokenSource
bt.tokens = make([]IToken, 0)
bt.tokens = make([]Token, 0)
bt.index = -1
}
@ -198,7 +198,7 @@ func (bt *CommonTokenStream) previousTokenOnChannel(i, channel int) int {
// Collect all tokens on specified channel to the right of
// the current token up until we see a token on DEFAULT_TOKEN_CHANNEL or
// EOF. If channel is -1, find any non default channel token.
func (bt *CommonTokenStream) getHiddenTokensToRight(tokenIndex, channel int) []IToken {
func (bt *CommonTokenStream) getHiddenTokensToRight(tokenIndex, channel int) []Token {
bt.lazyInit()
if tokenIndex < 0 || tokenIndex >= len(bt.tokens) {
panic(strconv.Itoa(tokenIndex) + " not in 0.." + strconv.Itoa(len(bt.tokens)-1))
@ -218,7 +218,7 @@ func (bt *CommonTokenStream) getHiddenTokensToRight(tokenIndex, channel int) []I
// Collect all tokens on specified channel to the left of
// the current token up until we see a token on DEFAULT_TOKEN_CHANNEL.
// If channel is -1, find any non default channel token.
func (bt *CommonTokenStream) getHiddenTokensToLeft(tokenIndex, channel int) []IToken {
func (bt *CommonTokenStream) getHiddenTokensToLeft(tokenIndex, channel int) []Token {
bt.lazyInit()
if tokenIndex < 0 || tokenIndex >= len(bt.tokens) {
panic(strconv.Itoa(tokenIndex) + " not in 0.." + strconv.Itoa(len(bt.tokens)-1))
@ -233,8 +233,8 @@ func (bt *CommonTokenStream) getHiddenTokensToLeft(tokenIndex, channel int) []IT
return bt.filterForChannel(from_, to, channel)
}
func (bt *CommonTokenStream) filterForChannel(left, right, channel int) []IToken {
var hidden = make([]IToken, 0)
func (bt *CommonTokenStream) filterForChannel(left, right, channel int) []Token {
var hidden = make([]Token, 0)
for i := left; i < right+1; i++ {
var t = bt.tokens[i]
if channel == -1 {
@ -267,7 +267,7 @@ func (bt *CommonTokenStream) GetAllText() string {
return bt.GetTextFromInterval(nil)
}
func (bt *CommonTokenStream) GetTextFromTokens(start, end IToken) string {
func (bt *CommonTokenStream) GetTextFromTokens(start, end Token) string {
return bt.GetTextFromInterval(NewInterval(start.GetTokenIndex(), end.GetTokenIndex()))
}
@ -317,7 +317,7 @@ func (bt *CommonTokenStream) fill() {
type CommonTokenStream struct {
tokenSource TokenSource
tokens []IToken
tokens []Token
index int
fetchedEOF bool
channel int
@ -333,7 +333,7 @@ func NewCommonTokenStream(lexer Lexer, channel int) *CommonTokenStream {
// A collection of all tokens fetched from the token source. The list is
// considered a complete view of the input once {@link //fetchedEOF} is set
// to {@code true}.
ts.tokens = make([]IToken, 0)
ts.tokens = make([]Token, 0)
// The index into {@link //tokens} of the current token (next token to
// {@link //consume}). {@link //tokens}{@code [}{@link //p}{@code ]} should
@ -372,7 +372,7 @@ func (ts *CommonTokenStream) adjustSeekIndex(i int) int {
return ts.nextTokenOnChannel(i, ts.channel)
}
func (ts *CommonTokenStream) LB(k int) IToken {
func (ts *CommonTokenStream) LB(k int) Token {
if k == 0 || ts.index-k < 0 {
return nil
}
@ -390,7 +390,7 @@ func (ts *CommonTokenStream) LB(k int) IToken {
return ts.tokens[i]
}
func (ts *CommonTokenStream) LT(k int) IToken {
func (ts *CommonTokenStream) LT(k int) Token {
ts.lazyInit()
if k == 0 {
return nil

View File

@ -9,7 +9,7 @@ import (
// default implementation of each method does nothing, but can be overridden as
// necessary.
type IErrorListener interface {
type ErrorListener interface {
SyntaxError(recognizer Recognizer, offendingSymbol interface{}, line, column int, msg string, e RecognitionException)
ReportAmbiguity(recognizer Parser, dfa *DFA, startIndex, stopIndex int, exact bool, ambigAlts *BitSet, configs ATNConfigSet)
ReportAttemptingFullContext(recognizer Parser, dfa *DFA, startIndex, stopIndex int, conflictingAlts *BitSet, configs ATNConfigSet)
@ -78,10 +78,10 @@ func (this *ConsoleErrorListener) SyntaxError(recognizer Recognizer, offendingSy
type ProxyErrorListener struct {
*DefaultErrorListener
delegates []IErrorListener
delegates []ErrorListener
}
func NewProxyErrorListener(delegates []IErrorListener) *ProxyErrorListener {
func NewProxyErrorListener(delegates []ErrorListener) *ProxyErrorListener {
if delegates == nil {
panic("delegates is not provided")
}

View File

@ -7,9 +7,9 @@ import (
"strings"
)
type IErrorStrategy interface {
type ErrorStrategy interface {
reset(Parser)
RecoverInline(Parser) IToken
RecoverInline(Parser) Token
Recover(Parser, RecognitionException)
Sync(Parser)
inErrorRecoveryMode(Parser) bool
@ -241,22 +241,22 @@ func (this *DefaultErrorStrategy) Sync(recognizer Parser) {
}
switch s.GetStateType() {
case ATNStateBLOCK_START:
case ATNStateBlockStart:
fallthrough
case ATNStateSTAR_BLOCK_START:
case ATNStateStarBlockStart:
fallthrough
case ATNStatePLUS_BLOCK_START:
case ATNStatePlusBlockStart:
fallthrough
case ATNStateSTAR_LOOP_ENTRY:
case ATNStateStarLoopEntry:
// Report error and recover if possible
if this.singleTokenDeletion(recognizer) != nil {
return
} else {
panic(NewInputMisMatchException(recognizer))
}
case ATNStatePLUS_LOOP_BACK:
case ATNStatePlusLoopBack:
fallthrough
case ATNStateSTAR_LOOP_BACK:
case ATNStateStarLoopBack:
this.ReportUnwantedToken(recognizer)
var expecting = NewIntervalSet()
expecting.addSet(recognizer.getExpectedTokens())
@ -430,7 +430,7 @@ func (this *DefaultErrorStrategy) ReportMissingToken(recognizer Parser) {
// is in the set of tokens that can follow the {@code ')'} token reference
// in rule {@code atom}. It can assume that you forgot the {@code ')'}.
//
func (this *DefaultErrorStrategy) RecoverInline(recognizer Parser) IToken {
func (this *DefaultErrorStrategy) RecoverInline(recognizer Parser) Token {
// SINGLE TOKEN DELETION
var MatchedSymbol = this.singleTokenDeletion(recognizer)
if MatchedSymbol != nil {
@ -499,7 +499,7 @@ func (this *DefaultErrorStrategy) singleTokenInsertion(recognizer Parser) bool {
// deletion successfully recovers from the misMatched input, otherwise
// {@code nil}
//
func (this *DefaultErrorStrategy) singleTokenDeletion(recognizer Parser) IToken {
func (this *DefaultErrorStrategy) singleTokenDeletion(recognizer Parser) Token {
var nextTokenType = recognizer.GetTokenStream().LA(2)
var expecting = this.getExpectedTokens(recognizer)
if expecting.contains(nextTokenType) {
@ -537,7 +537,7 @@ func (this *DefaultErrorStrategy) singleTokenDeletion(recognizer Parser) IToken
// If you change what tokens must be created by the lexer,
// override this method to create the appropriate tokens.
//
func (this *DefaultErrorStrategy) getMissingSymbol(recognizer Parser) IToken {
func (this *DefaultErrorStrategy) getMissingSymbol(recognizer Parser) Token {
var currentSymbol = recognizer.getCurrentToken()
var expecting = this.getExpectedTokens(recognizer)
var expectedTokenType = expecting.first()
@ -573,7 +573,7 @@ func (this *DefaultErrorStrategy) getExpectedTokens(recognizer Parser) *Interval
// your token objects because you don't have to go modify your lexer
// so that it creates a NewJava type.
//
func (this *DefaultErrorStrategy) GetTokenErrorDisplay(t IToken) string {
func (this *DefaultErrorStrategy) GetTokenErrorDisplay(t Token) string {
if t == nil {
return "<no token>"
}

View File

@ -9,7 +9,7 @@ import ()
// and what kind of problem occurred.
type RecognitionException interface {
GetOffendingToken() IToken
GetOffendingToken() Token
GetMessage() string
GetInputStream() IntStream
}
@ -17,7 +17,7 @@ type RecognitionException interface {
type BaseRecognitionException struct {
message string
recognizer Recognizer
offendingToken IToken
offendingToken Token
offendingState int
ctx RuleContext
input IntStream
@ -62,7 +62,7 @@ func (this *BaseRecognitionException) GetMessage() string {
return this.message
}
func (this *BaseRecognitionException) GetOffendingToken() IToken {
func (this *BaseRecognitionException) GetOffendingToken() Token {
return this.offendingToken
}
@ -124,8 +124,8 @@ func (this *LexerNoViableAltException) String() string {
type NoViableAltException struct {
*BaseRecognitionException
startToken IToken
offendingToken IToken
startToken Token
offendingToken Token
ctx ParserRuleContext
deadEndConfigs ATNConfigSet
}
@ -135,7 +135,7 @@ type NoViableAltException struct {
// of the offending input and also knows where the parser was
// in the various paths when the error. Reported by ReportNoViableAlternative()
//
func NewNoViableAltException(recognizer Parser, input TokenStream, startToken IToken, offendingToken IToken, deadEndConfigs ATNConfigSet, ctx ParserRuleContext) *NoViableAltException {
func NewNoViableAltException(recognizer Parser, input TokenStream, startToken Token, offendingToken Token, deadEndConfigs ATNConfigSet, ctx ParserRuleContext) *NoViableAltException {
if ctx == nil {
ctx = recognizer.GetParserRuleContext()

View File

@ -40,7 +40,7 @@ func (la *LL1Analyzer) getDecisionLookahead(s ATNState) []*IntervalSet {
look[alt] = NewIntervalSet()
var lookBusy = NewSet(nil, nil)
var seeThruPreds = false // fail to get lookahead upon pred
la._LOOK(s.GetTransitions()[alt].getTarget(), nil, PredictionContextEMPTY, look[alt], lookBusy, NewBitSet(), seeThruPreds, false)
la._LOOK(s.GetTransitions()[alt].getTarget(), nil, BasePredictionContextEMPTY, look[alt], lookBusy, NewBitSet(), seeThruPreds, false)
// Wipe out lookahead for la alternative if we found nothing
// or we had a predicate when we !seeThruPreds
if look[alt].length() == 0 || look[alt].contains(LL1AnalyzerHIT_PRED) {
@ -71,7 +71,7 @@ func (la *LL1Analyzer) getDecisionLookahead(s ATNState) []*IntervalSet {
func (la *LL1Analyzer) LOOK(s, stopState ATNState, ctx RuleContext) *IntervalSet {
var r = NewIntervalSet()
var seeThruPreds = true // ignore preds get all lookahead
var lookContext IPredictionContext
var lookContext PredictionContext
if ctx != nil {
lookContext = predictionContextFromRuleContext(s.GetATN(), ctx)
}
@ -123,7 +123,7 @@ func (la *LL1Analyzer) LOOK(s, stopState ATNState, ctx RuleContext) *IntervalSet
// is {@code nil}.
func (la *LL1Analyzer) __LOOK(s, stopState ATNState, ctx IPredictionContext, look *IntervalSet, lookBusy *Set, calledRuleStack *BitSet, seeThruPreds, addEOF bool, i int){
func (la *LL1Analyzer) __LOOK(s, stopState ATNState, ctx PredictionContext, look *IntervalSet, lookBusy *Set, calledRuleStack *BitSet, seeThruPreds, addEOF bool, i int){
returnState := la.atn.states[ctx.getReturnState(i)]
@ -140,7 +140,7 @@ func (la *LL1Analyzer) __LOOK(s, stopState ATNState, ctx IPredictionContext, loo
}
func (la *LL1Analyzer) _LOOK(s, stopState ATNState, ctx IPredictionContext, look *IntervalSet, lookBusy *Set, calledRuleStack *BitSet, seeThruPreds, addEOF bool) {
func (la *LL1Analyzer) _LOOK(s, stopState ATNState, ctx PredictionContext, look *IntervalSet, lookBusy *Set, calledRuleStack *BitSet, seeThruPreds, addEOF bool) {
c := NewBaseATNConfig6(s, 0, ctx)
@ -174,7 +174,7 @@ func (la *LL1Analyzer) _LOOK(s, stopState ATNState, ctx IPredictionContext, look
return
}
if ctx != PredictionContextEMPTY {
if ctx != BasePredictionContextEMPTY {
if PortDebug {
fmt.Println("DEBUG 7")
}
@ -204,7 +204,7 @@ func (la *LL1Analyzer) _LOOK(s, stopState ATNState, ctx IPredictionContext, look
continue
}
newContext := SingletonPredictionContextCreate(ctx, t1.followState.GetStateNumber())
newContext := SingletonBasePredictionContextCreate(ctx, t1.followState.GetStateNumber())
la.___LOOK(stopState, newContext, look, lookBusy, calledRuleStack, seeThruPreds, addEOF, t1)
@ -249,9 +249,9 @@ func (la *LL1Analyzer) _LOOK(s, stopState ATNState, ctx IPredictionContext, look
}
}
func (la *LL1Analyzer) ___LOOK(stopState ATNState, ctx IPredictionContext, look *IntervalSet, lookBusy *Set, calledRuleStack *BitSet, seeThruPreds, addEOF bool, t1 *RuleTransition) {
func (la *LL1Analyzer) ___LOOK(stopState ATNState, ctx PredictionContext, look *IntervalSet, lookBusy *Set, calledRuleStack *BitSet, seeThruPreds, addEOF bool, t1 *RuleTransition) {
newContext := SingletonPredictionContextCreate(ctx, t1.followState.GetStateNumber())
newContext := SingletonBasePredictionContextCreate(ctx, t1.followState.GetStateNumber())
defer func() {
calledRuleStack.remove(t1.getTarget().GetRuleIndex())

View File

@ -31,7 +31,7 @@ type BaseLexer struct {
_input CharStream
_factory TokenFactory
_tokenFactorySourcePair *TokenSourceCharStreamPair
_token IToken
_token Token
_tokenStartCharIndex int
_tokenStartLine int
_tokenStartColumn int
@ -167,7 +167,7 @@ func (l *BaseLexer) safeMatch() (ret int) {
}
// Return a token from l source i.e., Match a token on the char stream.
func (l *BaseLexer) nextToken() IToken {
func (l *BaseLexer) nextToken() Token {
if l._input == nil {
panic("nextToken requires a non-nil input stream.")
}
@ -287,7 +287,7 @@ func (l *BaseLexer) setInputStream(input CharStream) {
// and GetToken (to push tokens into a list and pull from that list
// rather than a single variable as l implementation does).
// /
func (l *BaseLexer) emitToken(token IToken) {
func (l *BaseLexer) emitToken(token Token) {
l._token = token
}
@ -297,7 +297,7 @@ func (l *BaseLexer) emitToken(token IToken) {
// use that to set the token's text. Override l method to emit
// custom Token objects or provide a Newfactory.
// /
func (l *BaseLexer) emit() IToken {
func (l *BaseLexer) emit() Token {
if PortDebug {
fmt.Println("emit")
}
@ -306,7 +306,7 @@ func (l *BaseLexer) emit() IToken {
return t
}
func (l *BaseLexer) emitEOF() IToken {
func (l *BaseLexer) emitEOF() Token {
cpos := l.getCharPositionInLine()
lpos := l.getLine()
if PortDebug {
@ -359,11 +359,11 @@ func (this *BaseLexer) GetATN() *ATN {
// Return a list of all Token objects in input char stream.
// Forces load of all tokens. Does not include EOF token.
// /
func (l *BaseLexer) getAllTokens() []IToken {
func (l *BaseLexer) getAllTokens() []Token {
if PortDebug {
fmt.Println("getAllTokens")
}
var tokens = make([]IToken, 0)
var tokens = make([]Token, 0)
var t = l.nextToken()
for t.GetTokenType() != TokenEOF {
tokens = append(tokens, t)

View File

@ -374,7 +374,7 @@ func (this *LexerATNSimulator) computeStartState(input CharStream, p ATNState) *
var configs = NewOrderedATNConfigSet()
for i := 0; i < len(p.GetTransitions()); i++ {
var target = p.GetTransitions()[i].getTarget()
var cfg = NewLexerATNConfig6(target, i+1, PredictionContextEMPTY)
var cfg = NewLexerATNConfig6(target, i+1, BasePredictionContextEMPTY)
this.closure(input, cfg, configs.BaseATNConfigSet, false, false, false)
}
@ -416,13 +416,13 @@ func (this *LexerATNSimulator) closure(input CharStream, config *LexerATNConfig,
configs.Add(config, nil)
return true
} else {
configs.Add(NewLexerATNConfig2(config, config.state, PredictionContextEMPTY), nil)
configs.Add(NewLexerATNConfig2(config, config.state, BasePredictionContextEMPTY), nil)
currentAltReachedAcceptState = true
}
}
if config.context != nil && !config.context.isEmpty() {
for i := 0; i < config.context.length(); i++ {
if config.context.getReturnState(i) != PredictionContextEMPTY_RETURN_STATE {
if config.context.getReturnState(i) != BasePredictionContextEMPTY_RETURN_STATE {
var newContext = config.context.GetParent(i) // "pop" return state
var returnState = this.atn.states[config.context.getReturnState(i)]
cfg := NewLexerATNConfig2(config, returnState, newContext)
@ -458,7 +458,7 @@ func (this *LexerATNSimulator) getEpsilonTarget(input CharStream, config *LexerA
if trans.getSerializationType() == TransitionRULE {
rt := trans.(*RuleTransition)
var newContext = SingletonPredictionContextCreate(config.context, rt.followState.GetStateNumber())
var newContext = SingletonBasePredictionContextCreate(config.context, rt.followState.GetStateNumber())
cfg = NewLexerATNConfig2(config, trans.getTarget(), newContext)
} else if trans.getSerializationType() == TransitionPRECEDENCE {

View File

@ -3,14 +3,14 @@ package antlr4
import "strconv"
const (
LexerActionTypeCHANNEL = 0 //The type of a {@link LexerChannelAction} action.
LexerActionTypeCUSTOM = 1 //The type of a {@link LexerCustomAction} action.
LexerActionTypeMODE = 2 //The type of a {@link LexerModeAction} action.
LexerActionTypeMORE = 3 //The type of a {@link LexerMoreAction} action.
LexerActionTypePOP_MODE = 4 //The type of a {@link LexerPopModeAction} action.
LexerActionTypePUSH_MODE = 5 //The type of a {@link LexerPushModeAction} action.
LexerActionTypeSKIP = 6 //The type of a {@link LexerSkipAction} action.
LexerActionTypeTYPE = 7 //The type of a {@link LexerTypeAction} action.
LexerActionTypeChannel = 0 //The type of a {@link LexerChannelAction} action.
LexerActionTypeCustom = 1 //The type of a {@link LexerCustomAction} action.
LexerActionTypeMode = 2 //The type of a {@link LexerModeAction} action.
LexerActionTypeMore = 3 //The type of a {@link LexerMoreAction} action.
LexerActionTypePopMode = 4 //The type of a {@link LexerPopModeAction} action.
LexerActionTypePushMode = 5 //The type of a {@link LexerPushModeAction} action.
LexerActionTypeSkip = 6 //The type of a {@link LexerSkipAction} action.
LexerActionTypeType = 7 //The type of a {@link LexerTypeAction} action.
)
type LexerAction interface {
@ -67,7 +67,7 @@ type LexerSkipAction struct {
func NewLexerSkipAction() *LexerSkipAction {
la := new(LexerSkipAction)
la.BaseLexerAction = NewBaseLexerAction(LexerActionTypeSKIP)
la.BaseLexerAction = NewBaseLexerAction(LexerActionTypeSkip)
return la
}
@ -92,7 +92,7 @@ type LexerTypeAction struct {
func NewLexerTypeAction(_type int) *LexerTypeAction {
this := new(LexerTypeAction)
this.BaseLexerAction = NewBaseLexerAction(LexerActionTypeTYPE)
this.BaseLexerAction = NewBaseLexerAction(LexerActionTypeType)
this._type = _type
return this
}
@ -130,7 +130,7 @@ type LexerPushModeAction struct {
func NewLexerPushModeAction(mode int) *LexerPushModeAction {
this := new(LexerPushModeAction)
this.BaseLexerAction = NewBaseLexerAction(LexerActionTypePUSH_MODE)
this.BaseLexerAction = NewBaseLexerAction(LexerActionTypePushMode)
this.mode = mode
return this
@ -172,7 +172,7 @@ func NewLexerPopModeAction() *LexerPopModeAction {
this := new(LexerPopModeAction)
this.BaseLexerAction = NewBaseLexerAction(LexerActionTypePOP_MODE)
this.BaseLexerAction = NewBaseLexerAction(LexerActionTypePopMode)
return this
}
@ -199,7 +199,7 @@ type LexerMoreAction struct {
func NewLexerMoreAction() *LexerModeAction {
this := new(LexerModeAction)
this.BaseLexerAction = NewBaseLexerAction(LexerActionTypeMORE)
this.BaseLexerAction = NewBaseLexerAction(LexerActionTypeMore)
return this
}
@ -225,7 +225,7 @@ type LexerModeAction struct {
func NewLexerModeAction(mode int) *LexerModeAction {
this := new(LexerModeAction)
this.BaseLexerAction = NewBaseLexerAction(LexerActionTypeMODE)
this.BaseLexerAction = NewBaseLexerAction(LexerActionTypeMode)
this.mode = mode
return this
}
@ -278,7 +278,7 @@ type LexerCustomAction struct {
func NewLexerCustomAction(ruleIndex, actionIndex int) *LexerCustomAction {
this := new(LexerCustomAction)
this.BaseLexerAction = NewBaseLexerAction(LexerActionTypeCUSTOM)
this.BaseLexerAction = NewBaseLexerAction(LexerActionTypeCustom)
this.ruleIndex = ruleIndex
this.actionIndex = actionIndex
this.isPositionDependent = true
@ -317,7 +317,7 @@ type LexerChannelAction struct {
func NewLexerChannelAction(channel int) *LexerChannelAction {
this := new(LexerChannelAction)
this.BaseLexerAction = NewBaseLexerAction(LexerActionTypeCHANNEL)
this.BaseLexerAction = NewBaseLexerAction(LexerActionTypeChannel)
this.channel = channel
return this
}

View File

@ -5,17 +5,17 @@ type Parser interface {
Recognizer
GetInterpreter() *ParserATNSimulator
GetErrorHandler() IErrorStrategy
GetErrorHandler() ErrorStrategy
GetTokenStream() TokenStream
GetTokenFactory() TokenFactory
GetParserRuleContext() ParserRuleContext
Consume() IToken
Consume() Token
GetParseListeners() []ParseTreeListener
GetInputStream() IntStream
getCurrentToken() IToken
getCurrentToken() Token
getExpectedTokens() *IntervalSet
NotifyErrorListeners(msg string, offendingToken IToken, err RecognitionException)
NotifyErrorListeners(msg string, offendingToken Token, err RecognitionException)
isExpectedToken(symbol int) bool
getPrecedence() int
getRuleInvocationStack(ParserRuleContext) []string
@ -28,7 +28,7 @@ type BaseParser struct {
BuildParseTrees bool
_input TokenStream
_errHandler IErrorStrategy
_errHandler ErrorStrategy
_precedenceStack IntStack
_ctx ParserRuleContext
@ -99,7 +99,7 @@ func (p *BaseParser) reset() {
}
}
func (p *BaseParser) GetErrorHandler() IErrorStrategy {
func (p *BaseParser) GetErrorHandler() ErrorStrategy {
return p._errHandler
}
@ -124,7 +124,7 @@ func (p *BaseParser) GetParseListeners() []ParseTreeListener {
// {@code ttype} and the error strategy could not recover from the
// misMatched symbol
func (p *BaseParser) Match(ttype int) IToken {
func (p *BaseParser) Match(ttype int) Token {
if PortDebug {
fmt.Println("get current token")
@ -171,7 +171,7 @@ func (p *BaseParser) Match(ttype int) IToken {
// a wildcard and the error strategy could not recover from the misMatched
// symbol
func (p *BaseParser) MatchWildcard() IToken {
func (p *BaseParser) MatchWildcard() Token {
var t = p.getCurrentToken()
if t.GetTokenType() > 0 {
p._errHandler.ReportMatch(p)
@ -399,11 +399,11 @@ func (p *BaseParser) setTokenStream(input TokenStream) {
// Match needs to return the current input symbol, which gets put
// into the label for the associated token ref e.g., x=ID.
//
func (p *BaseParser) getCurrentToken() IToken {
func (p *BaseParser) getCurrentToken() Token {
return p._input.LT(1)
}
func (p *BaseParser) NotifyErrorListeners(msg string, offendingToken IToken, err RecognitionException) {
func (p *BaseParser) NotifyErrorListeners(msg string, offendingToken Token, err RecognitionException) {
if offendingToken == nil {
offendingToken = p.getCurrentToken()
}
@ -414,7 +414,7 @@ func (p *BaseParser) NotifyErrorListeners(msg string, offendingToken IToken, err
listener.SyntaxError(p, offendingToken, line, column, msg, err)
}
func (p *BaseParser) Consume() IToken {
func (p *BaseParser) Consume() Token {
var o = p.getCurrentToken()
if o.GetTokenType() != TokenEOF {
if PortDebug {

View File

@ -722,7 +722,7 @@ func (this *ParserATNSimulator) computeStartState(p ATNState, ctx RuleContext, f
//
func (this *ParserATNSimulator) applyPrecedenceFilter(configs ATNConfigSet) ATNConfigSet {
var statesFromAlt1 = make(map[int]IPredictionContext)
var statesFromAlt1 = make(map[int]PredictionContext)
var configSet = NewBaseATNConfigSet(configs.FullContext())
for _,config := range configs.GetItems() {
@ -782,8 +782,8 @@ func (this *ParserATNSimulator) getPredsForAmbigAlts(ambigAlts *BitSet, configs
for i := 1; i < nalts+1; i++ {
var pred = altToPred[i]
if pred == nil {
altToPred[i] = SemanticContextNONE
} else if pred != SemanticContextNONE {
altToPred[i] = SemanticContextNone
} else if pred != SemanticContextNone {
nPredAlts += 1
}
}
@ -806,7 +806,7 @@ func (this *ParserATNSimulator) getPredicatePredictions(ambigAlts *BitSet, altTo
if ambigAlts != nil && ambigAlts.contains(i) {
pairs = append(pairs, NewPredPrediction(pred, i))
}
if pred != SemanticContextNONE {
if pred != SemanticContextNone {
containsPredicate = true
}
}
@ -915,7 +915,7 @@ func (this *ParserATNSimulator) splitAccordingToSemanticValidity(configs ATNConf
var failed = NewBaseATNConfigSet(configs.FullContext())
for _, c := range configs.GetItems() {
if c.GetSemanticContext() != SemanticContextNONE {
if c.GetSemanticContext() != SemanticContextNone {
var predicateEvaluationResult = c.GetSemanticContext().evaluate(this.parser, outerContext)
if predicateEvaluationResult {
succeeded.Add(c, nil)
@ -939,7 +939,7 @@ func (this *ParserATNSimulator) evalSemanticContext(predPredictions []*PredPredi
var predictions = NewBitSet()
for i := 0; i < len(predPredictions); i++ {
var pair = predPredictions[i]
if pair.pred == SemanticContextNONE {
if pair.pred == SemanticContextNone {
predictions.add(pair.alt)
if !complete {
break
@ -985,9 +985,9 @@ func (this *ParserATNSimulator) closureCheckingStopState(config ATNConfig, confi
// run thru all possible stack tops in ctx
if !config.GetContext().isEmpty() {
for i := 0; i < config.GetContext().length(); i++ {
if config.GetContext().getReturnState(i) == PredictionContextEMPTY_RETURN_STATE {
if config.GetContext().getReturnState(i) == BasePredictionContextEMPTY_RETURN_STATE {
if fullCtx {
configs.Add(NewBaseATNConfig1(config, config.GetState(), PredictionContextEMPTY), this.mergeCache)
configs.Add(NewBaseATNConfig1(config, config.GetState(), BasePredictionContextEMPTY), this.mergeCache)
continue
} else {
// we have no context info, just chase follow links (if greedy)
@ -1244,7 +1244,7 @@ func (this *ParserATNSimulator) ruleTransition(config ATNConfig, t *RuleTransiti
fmt.Println("CALL rule " + this.getRuleName(t.getTarget().GetRuleIndex()) + ", ctx=" + config.GetContext().String())
}
var returnState = t.followState
var newContext = SingletonPredictionContextCreate(config.GetContext(), returnState.GetStateNumber())
var newContext = SingletonBasePredictionContextCreate(config.GetContext(), returnState.GetStateNumber())
return NewBaseATNConfig1(config, t.getTarget(), newContext)
}

View File

@ -8,16 +8,16 @@ type ParserRuleContext interface {
RuleContext
SetException(RecognitionException)
addTokenNode(token IToken) *TerminalNodeImpl
addErrorNode(badToken IToken) *ErrorNodeImpl
addTokenNode(token Token) *TerminalNodeImpl
addErrorNode(badToken Token) *ErrorNodeImpl
EnterRule(listener ParseTreeListener)
ExitRule(listener ParseTreeListener)
setStart(IToken)
getStart() IToken
setStart(Token)
getStart() Token
setStop(IToken)
getStop() IToken
setStop(Token)
getStop() Token
addChild(child RuleContext) RuleContext
removeLastChild()
@ -27,7 +27,7 @@ type BaseParserRuleContext struct {
*BaseRuleContext
children []ParseTree
start, stop IToken
start, stop Token
exception RecognitionException
}
@ -117,7 +117,7 @@ func (prc *BaseParserRuleContext) removeLastChild() {
}
}
func (prc *BaseParserRuleContext) addTokenNode(token IToken) *TerminalNodeImpl {
func (prc *BaseParserRuleContext) addTokenNode(token Token) *TerminalNodeImpl {
var node = NewTerminalNodeImpl(token)
prc.addTerminalNodeChild(node)
@ -126,7 +126,7 @@ func (prc *BaseParserRuleContext) addTokenNode(token IToken) *TerminalNodeImpl {
}
func (prc *BaseParserRuleContext) addErrorNode(badToken IToken) *ErrorNodeImpl {
func (prc *BaseParserRuleContext) addErrorNode(badToken Token) *ErrorNodeImpl {
var node = NewErrorNodeImpl(badToken)
prc.addTerminalNodeChild(node)
node.parentCtx = prc
@ -159,19 +159,19 @@ func (prc *BaseParserRuleContext) getChildOfType(i int, childType reflect.Type)
}
}
func (prc *BaseParserRuleContext) setStart(t IToken) {
func (prc *BaseParserRuleContext) setStart(t Token) {
prc.start = t
}
func (prc *BaseParserRuleContext) getStart() IToken {
func (prc *BaseParserRuleContext) getStart() Token {
return prc.start
}
func (prc *BaseParserRuleContext) setStop(t IToken) {
func (prc *BaseParserRuleContext) setStop(t Token) {
prc.stop = t
}
func (prc *BaseParserRuleContext) getStop() IToken {
func (prc *BaseParserRuleContext) getStop() Token {
return prc.stop
}
@ -240,7 +240,7 @@ func (prc *BaseParserRuleContext) getChildCount() int {
func (prc *BaseParserRuleContext) GetSourceInterval() *Interval {
if prc.start == nil || prc.stop == nil {
return TreeINVALID_INTERVAL
return TreeInvalidInterval
} else {
return NewInterval(prc.start.GetTokenIndex(), prc.stop.GetTokenIndex())
}

View File

@ -5,24 +5,24 @@ import (
"strconv"
)
type IPredictionContext interface {
type PredictionContext interface {
Hash() string
GetParent(int) IPredictionContext
GetParent(int) PredictionContext
getReturnState(int) int
equals(IPredictionContext) bool
equals(PredictionContext) bool
length() int
isEmpty() bool
hasEmptyPath() bool
String() string
}
type PredictionContext struct {
type BasePredictionContext struct {
cachedHashString string
}
func NewPredictionContext(cachedHashString string) *PredictionContext {
func NewBasePredictionContext(cachedHashString string) *BasePredictionContext {
pc := new(PredictionContext)
pc := new(BasePredictionContext)
pc.cachedHashString = cachedHashString
return pc
@ -32,7 +32,7 @@ func NewPredictionContext(cachedHashString string) *PredictionContext {
// {@code//+x =//}.
// /
const (
PredictionContextEMPTY_RETURN_STATE = 0x7FFFFFFF
BasePredictionContextEMPTY_RETURN_STATE = 0x7FFFFFFF
)
// Represents {@code $} in an array in full context mode, when {@code $}
@ -40,10 +40,10 @@ const (
// {@code $} = {@link //EMPTY_RETURN_STATE}.
// /
var PredictionContextglobalNodeCount = 1
var PredictionContextid = PredictionContextglobalNodeCount
var BasePredictionContextglobalNodeCount = 1
var BasePredictionContextid = BasePredictionContextglobalNodeCount
// Stores the computed hash code of this {@link PredictionContext}. The hash
// Stores the computed hash code of this {@link BasePredictionContext}. The hash
// code is computed in parts to Match the following reference algorithm.
//
// <pre>
@ -68,15 +68,15 @@ var PredictionContextid = PredictionContextglobalNodeCount
// </pre>
//
func (this *PredictionContext) isEmpty() bool {
func (this *BasePredictionContext) isEmpty() bool {
return false
}
func (this *PredictionContext) Hash() string {
func (this *BasePredictionContext) Hash() string {
return this.cachedHashString
}
func calculateHashString(parent IPredictionContext, returnState int) string {
func calculateHashString(parent PredictionContext, returnState int) string {
return parent.String() + strconv.Itoa(returnState)
}
@ -84,17 +84,17 @@ func calculateEmptyHashString() string {
return ""
}
// Used to cache {@link PredictionContext} objects. Its used for the shared
// Used to cache {@link BasePredictionContext} objects. Its used for the shared
// context cash associated with contexts in DFA states. This cache
// can be used for both lexers and parsers.
type PredictionContextCache struct {
cache map[IPredictionContext]IPredictionContext
cache map[PredictionContext]PredictionContext
}
func NewPredictionContextCache() *PredictionContextCache {
t := new(PredictionContextCache)
t.cache = make(map[IPredictionContext]IPredictionContext)
t.cache = make(map[PredictionContext]PredictionContext)
return t
}
@ -102,9 +102,9 @@ func NewPredictionContextCache() *PredictionContextCache {
// return that one instead and do not add a Newcontext to the cache.
// Protect shared cache from unsafe thread access.
//
func (this *PredictionContextCache) add(ctx IPredictionContext) IPredictionContext {
if ctx == PredictionContextEMPTY {
return PredictionContextEMPTY
func (this *PredictionContextCache) add(ctx PredictionContext) PredictionContext {
if ctx == BasePredictionContextEMPTY {
return BasePredictionContextEMPTY
}
var existing = this.cache[ctx]
if existing != nil {
@ -114,7 +114,7 @@ func (this *PredictionContextCache) add(ctx IPredictionContext) IPredictionConte
return ctx
}
func (this *PredictionContextCache) Get(ctx IPredictionContext) IPredictionContext {
func (this *PredictionContextCache) Get(ctx PredictionContext) PredictionContext {
return this.cache[ctx]
}
@ -122,21 +122,21 @@ func (this *PredictionContextCache) length() int {
return len(this.cache)
}
type ISingletonPredictionContext interface {
IPredictionContext
type SingletonPredictionContext interface {
PredictionContext
}
type SingletonPredictionContext struct {
*PredictionContext
type BaseSingletonPredictionContext struct {
*BasePredictionContext
parentCtx IPredictionContext
parentCtx PredictionContext
returnState int
}
func NewSingletonPredictionContext(parent IPredictionContext, returnState int) *SingletonPredictionContext {
func NewBaseSingletonPredictionContext(parent PredictionContext, returnState int) *BaseSingletonPredictionContext {
s := new(SingletonPredictionContext)
s.PredictionContext = NewPredictionContext("")
s := new(BaseSingletonPredictionContext)
s.BasePredictionContext = NewBasePredictionContext("")
if parent != nil {
s.cachedHashString = calculateHashString(parent, returnState)
@ -150,41 +150,41 @@ func NewSingletonPredictionContext(parent IPredictionContext, returnState int) *
return s
}
func SingletonPredictionContextCreate(parent IPredictionContext, returnState int) IPredictionContext {
if returnState == PredictionContextEMPTY_RETURN_STATE && parent == nil {
func SingletonBasePredictionContextCreate(parent PredictionContext, returnState int) PredictionContext {
if returnState == BasePredictionContextEMPTY_RETURN_STATE && parent == nil {
// someone can pass in the bits of an array ctx that mean $
return PredictionContextEMPTY
return BasePredictionContextEMPTY
} else {
return NewSingletonPredictionContext(parent, returnState)
return NewBaseSingletonPredictionContext(parent, returnState)
}
}
func (this *SingletonPredictionContext) length() int {
func (this *BaseSingletonPredictionContext) length() int {
return 1
}
func (this *SingletonPredictionContext) GetParent(index int) IPredictionContext {
func (this *BaseSingletonPredictionContext) GetParent(index int) PredictionContext {
return this.parentCtx
}
func (this *SingletonPredictionContext) getReturnState(index int) int {
func (this *BaseSingletonPredictionContext) getReturnState(index int) int {
return this.returnState
}
func (this *SingletonPredictionContext) hasEmptyPath() bool {
return this.returnState == PredictionContextEMPTY_RETURN_STATE
func (this *BaseSingletonPredictionContext) hasEmptyPath() bool {
return this.returnState == BasePredictionContextEMPTY_RETURN_STATE
}
func (this *SingletonPredictionContext) equals(other IPredictionContext) bool {
func (this *BaseSingletonPredictionContext) equals(other PredictionContext) bool {
if this == other {
return true
} else if _, ok := other.(*SingletonPredictionContext); !ok {
} else if _, ok := other.(*BaseSingletonPredictionContext); !ok {
return false
} else if this.Hash() != other.Hash() {
return false // can't be same if hash is different
} else {
otherP := other.(*SingletonPredictionContext)
otherP := other.(*BaseSingletonPredictionContext)
if this.returnState != other.getReturnState(0) {
return false
@ -196,11 +196,11 @@ func (this *SingletonPredictionContext) equals(other IPredictionContext) bool {
}
}
func (this *SingletonPredictionContext) Hash() string {
func (this *BaseSingletonPredictionContext) Hash() string {
return this.cachedHashString
}
func (this *SingletonPredictionContext) String() string {
func (this *BaseSingletonPredictionContext) String() string {
var up string
if this.parentCtx == nil {
@ -210,7 +210,7 @@ func (this *SingletonPredictionContext) String() string {
}
if len(up) == 0 {
if this.returnState == PredictionContextEMPTY_RETURN_STATE {
if this.returnState == BasePredictionContextEMPTY_RETURN_STATE {
return "$"
} else {
return strconv.Itoa(this.returnState)
@ -220,17 +220,17 @@ func (this *SingletonPredictionContext) String() string {
}
}
var PredictionContextEMPTY = NewEmptyPredictionContext()
var BasePredictionContextEMPTY = NewEmptyPredictionContext()
type EmptyPredictionContext struct {
*SingletonPredictionContext
*BaseSingletonPredictionContext
}
func NewEmptyPredictionContext() *EmptyPredictionContext {
p := new(EmptyPredictionContext)
p.SingletonPredictionContext = NewSingletonPredictionContext(nil, PredictionContextEMPTY_RETURN_STATE)
p.BaseSingletonPredictionContext = NewBaseSingletonPredictionContext(nil, BasePredictionContextEMPTY_RETURN_STATE)
return p
}
@ -239,7 +239,7 @@ func (this *EmptyPredictionContext) isEmpty() bool {
return true
}
func (this *EmptyPredictionContext) GetParent(index int) IPredictionContext {
func (this *EmptyPredictionContext) GetParent(index int) PredictionContext {
return nil
}
@ -247,7 +247,7 @@ func (this *EmptyPredictionContext) getReturnState(index int) int {
return this.returnState
}
func (this *EmptyPredictionContext) equals(other IPredictionContext) bool {
func (this *EmptyPredictionContext) equals(other PredictionContext) bool {
return this == other
}
@ -256,20 +256,20 @@ func (this *EmptyPredictionContext) String() string {
}
type ArrayPredictionContext struct {
*PredictionContext
*BasePredictionContext
parents []IPredictionContext
parents []PredictionContext
returnStates []int
}
func NewArrayPredictionContext(parents []IPredictionContext, returnStates []int) *ArrayPredictionContext {
func NewArrayPredictionContext(parents []PredictionContext, returnStates []int) *ArrayPredictionContext {
// Parent can be nil only if full ctx mode and we make an array
// from {@link //EMPTY} and non-empty. We merge {@link //EMPTY} by using
// nil parent and
// returnState == {@link //EMPTY_RETURN_STATE}.
c := new(ArrayPredictionContext)
c.PredictionContext = NewPredictionContext("")
c.BasePredictionContext = NewBasePredictionContext("")
for i := range parents {
c.cachedHashString += calculateHashString(parents[i], returnStates[i])
@ -286,20 +286,20 @@ func (c *ArrayPredictionContext) GetReturnStates() []int {
}
func (this *ArrayPredictionContext) hasEmptyPath() bool {
return this.getReturnState(this.length()-1) == PredictionContextEMPTY_RETURN_STATE
return this.getReturnState(this.length()-1) == BasePredictionContextEMPTY_RETURN_STATE
}
func (this *ArrayPredictionContext) isEmpty() bool {
// since EMPTY_RETURN_STATE can only appear in the last position, we
// don't need to verify that size==1
return this.returnStates[0] == PredictionContextEMPTY_RETURN_STATE
return this.returnStates[0] == BasePredictionContextEMPTY_RETURN_STATE
}
func (this *ArrayPredictionContext) length() int {
return len(this.returnStates)
}
func (this *ArrayPredictionContext) GetParent(index int) IPredictionContext {
func (this *ArrayPredictionContext) GetParent(index int) PredictionContext {
return this.parents[index]
}
@ -307,7 +307,7 @@ func (this *ArrayPredictionContext) getReturnState(index int) int {
return this.returnStates[index]
}
func (this *ArrayPredictionContext) equals(other IPredictionContext) bool {
func (this *ArrayPredictionContext) equals(other PredictionContext) bool {
if _, ok := other.(*ArrayPredictionContext); !ok {
return false
} else if this.cachedHashString != other.Hash() {
@ -327,7 +327,7 @@ func (this *ArrayPredictionContext) String() string {
if i > 0 {
s = s + ", "
}
if this.returnStates[i] == PredictionContextEMPTY_RETURN_STATE {
if this.returnStates[i] == BasePredictionContextEMPTY_RETURN_STATE {
s = s + "$"
continue
}
@ -342,27 +342,27 @@ func (this *ArrayPredictionContext) String() string {
}
}
// Convert a {@link RuleContext} tree to a {@link PredictionContext} graph.
// Convert a {@link RuleContext} tree to a {@link BasePredictionContext} graph.
// Return {@link //EMPTY} if {@code outerContext} is empty or nil.
// /
func predictionContextFromRuleContext(a *ATN, outerContext RuleContext) IPredictionContext {
func predictionContextFromRuleContext(a *ATN, outerContext RuleContext) PredictionContext {
if outerContext == nil {
outerContext = RuleContextEMPTY
}
// if we are in RuleContext of start rule, s, then PredictionContext
// if we are in RuleContext of start rule, s, then BasePredictionContext
// is EMPTY. Nobody called us. (if we are empty, return empty)
if outerContext.GetParent() == nil || outerContext == RuleContextEMPTY {
return PredictionContextEMPTY
return BasePredictionContextEMPTY
}
// If we have a parent, convert it to a PredictionContext graph
// If we have a parent, convert it to a BasePredictionContext graph
var parent = predictionContextFromRuleContext(a, outerContext.GetParent().(RuleContext))
var state = a.states[outerContext.getInvokingState()]
var transition = state.GetTransitions()[0]
return SingletonPredictionContextCreate(parent, transition.(*RuleTransition).followState.GetStateNumber())
return SingletonBasePredictionContextCreate(parent, transition.(*RuleTransition).followState.GetStateNumber())
}
func calculateListsHashString(parents []PredictionContext, returnStates []int) string {
func calculateListsHashString(parents []BasePredictionContext, returnStates []int) string {
var s = ""
for _, p := range parents {
@ -376,14 +376,14 @@ func calculateListsHashString(parents []PredictionContext, returnStates []int) s
return s
}
func merge(a, b IPredictionContext, rootIsWildcard bool, mergeCache *DoubleDict) IPredictionContext {
func merge(a, b PredictionContext, rootIsWildcard bool, mergeCache *DoubleDict) PredictionContext {
// share same graph if both same
if a == b {
return a
}
ac, ok1 := a.(*SingletonPredictionContext)
bc, ok2 := a.(*SingletonPredictionContext)
ac, ok1 := a.(*BaseSingletonPredictionContext)
bc, ok2 := a.(*BaseSingletonPredictionContext)
if ok1 && ok2 {
return mergeSingletons(ac, bc, rootIsWildcard, mergeCache)
@ -399,17 +399,17 @@ func merge(a, b IPredictionContext, rootIsWildcard bool, mergeCache *DoubleDict)
}
}
// convert singleton so both are arrays to normalize
if _, ok := a.(*SingletonPredictionContext); ok {
a = NewArrayPredictionContext([]IPredictionContext{a.GetParent(0)}, []int{a.getReturnState(0)})
if _, ok := a.(*BaseSingletonPredictionContext); ok {
a = NewArrayPredictionContext([]PredictionContext{a.GetParent(0)}, []int{a.getReturnState(0)})
}
if _, ok := b.(*SingletonPredictionContext); ok {
b = NewArrayPredictionContext([]IPredictionContext{b.GetParent(0)}, []int{b.getReturnState(0)})
if _, ok := b.(*BaseSingletonPredictionContext); ok {
b = NewArrayPredictionContext([]PredictionContext{b.GetParent(0)}, []int{b.getReturnState(0)})
}
return mergeArrays(a.(*ArrayPredictionContext), b.(*ArrayPredictionContext), rootIsWildcard, mergeCache)
}
//
// Merge two {@link SingletonPredictionContext} instances.
// Merge two {@link SingletonBasePredictionContext} instances.
//
// <p>Stack tops equal, parents merge is same return left graph.<br>
// <embed src="images/SingletonMerge_SameRootSamePar.svg"
@ -433,21 +433,21 @@ func merge(a, b IPredictionContext, rootIsWildcard bool, mergeCache *DoubleDict)
// <embed src="images/SingletonMerge_DiffRootDiffPar.svg"
// type="image/svg+xml"/></p>
//
// @param a the first {@link SingletonPredictionContext}
// @param b the second {@link SingletonPredictionContext}
// @param a the first {@link SingletonBasePredictionContext}
// @param b the second {@link SingletonBasePredictionContext}
// @param rootIsWildcard {@code true} if this is a local-context merge,
// otherwise false to indicate a full-context merge
// @param mergeCache
// /
func mergeSingletons(a, b *SingletonPredictionContext, rootIsWildcard bool, mergeCache *DoubleDict) IPredictionContext {
func mergeSingletons(a, b *BaseSingletonPredictionContext, rootIsWildcard bool, mergeCache *DoubleDict) PredictionContext {
if mergeCache != nil {
var previous = mergeCache.Get(a.Hash(), b.Hash())
if previous != nil {
return previous.(IPredictionContext)
return previous.(PredictionContext)
}
previous = mergeCache.Get(b.Hash(), a.Hash())
if previous != nil {
return previous.(IPredictionContext)
return previous.(PredictionContext)
}
}
@ -472,14 +472,14 @@ func mergeSingletons(a, b *SingletonPredictionContext, rootIsWildcard bool, merg
// merge parents x and y, giving array node with x,y then remainders
// of those graphs. dup a, a' points at merged array
// Newjoined parent so create Newsingleton pointing to it, a'
var spc = SingletonPredictionContextCreate(parent, a.returnState)
var spc = SingletonBasePredictionContextCreate(parent, a.returnState)
if mergeCache != nil {
mergeCache.set(a.Hash(), b.Hash(), spc)
}
return spc
} else { // a != b payloads differ
// see if we can collapse parents due to $+x parents if local ctx
var singleParent IPredictionContext = nil
var singleParent PredictionContext = nil
if a == b || (a.parentCtx != nil && a.parentCtx == b.parentCtx) { // ax +
// bx =
// [a,b]x
@ -492,7 +492,7 @@ func mergeSingletons(a, b *SingletonPredictionContext, rootIsWildcard bool, merg
payloads[0] = b.returnState
payloads[1] = a.returnState
}
var parents = []IPredictionContext{singleParent, singleParent}
var parents = []PredictionContext{singleParent, singleParent}
var apc = NewArrayPredictionContext(parents, payloads)
if mergeCache != nil {
mergeCache.set(a.Hash(), b.Hash(), apc)
@ -503,11 +503,11 @@ func mergeSingletons(a, b *SingletonPredictionContext, rootIsWildcard bool, merg
// into array can't merge.
// ax + by = [ax,by]
var payloads = []int{a.returnState, b.returnState}
var parents = []IPredictionContext{a.parentCtx, b.parentCtx}
var parents = []PredictionContext{a.parentCtx, b.parentCtx}
if a.returnState > b.returnState { // sort by payload
payloads[0] = b.returnState
payloads[1] = a.returnState
parents = []IPredictionContext{b.parentCtx, a.parentCtx}
parents = []PredictionContext{b.parentCtx, a.parentCtx}
}
var a_ = NewArrayPredictionContext(parents, payloads)
if mergeCache != nil {
@ -550,29 +550,29 @@ func mergeSingletons(a, b *SingletonPredictionContext, rootIsWildcard bool, merg
//
// <p><embed src="images/FullMerge_SameRoot.svg" type="image/svg+xml"/></p>
//
// @param a the first {@link SingletonPredictionContext}
// @param b the second {@link SingletonPredictionContext}
// @param a the first {@link SingletonBasePredictionContext}
// @param b the second {@link SingletonBasePredictionContext}
// @param rootIsWildcard {@code true} if this is a local-context merge,
// otherwise false to indicate a full-context merge
// /
func mergeRoot(a, b ISingletonPredictionContext, rootIsWildcard bool) IPredictionContext {
func mergeRoot(a, b SingletonPredictionContext, rootIsWildcard bool) PredictionContext {
if rootIsWildcard {
if a == PredictionContextEMPTY {
return PredictionContextEMPTY // // + b =//
if a == BasePredictionContextEMPTY {
return BasePredictionContextEMPTY // // + b =//
}
if b == PredictionContextEMPTY {
return PredictionContextEMPTY // a +// =//
if b == BasePredictionContextEMPTY {
return BasePredictionContextEMPTY // a +// =//
}
} else {
if a == PredictionContextEMPTY && b == PredictionContextEMPTY {
return PredictionContextEMPTY // $ + $ = $
} else if a == PredictionContextEMPTY { // $ + x = [$,x]
var payloads = []int{b.getReturnState(-1), PredictionContextEMPTY_RETURN_STATE}
var parents = []IPredictionContext{b.GetParent(-1), nil}
if a == BasePredictionContextEMPTY && b == BasePredictionContextEMPTY {
return BasePredictionContextEMPTY // $ + $ = $
} else if a == BasePredictionContextEMPTY { // $ + x = [$,x]
var payloads = []int{b.getReturnState(-1), BasePredictionContextEMPTY_RETURN_STATE}
var parents = []PredictionContext{b.GetParent(-1), nil}
return NewArrayPredictionContext(parents, payloads)
} else if b == PredictionContextEMPTY { // x + $ = [$,x] ($ is always first if present)
var payloads = []int{a.getReturnState(-1), PredictionContextEMPTY_RETURN_STATE}
var parents = []IPredictionContext{a.GetParent(-1), nil}
} else if b == BasePredictionContextEMPTY { // x + $ = [$,x] ($ is always first if present)
var payloads = []int{a.getReturnState(-1), BasePredictionContextEMPTY_RETURN_STATE}
var parents = []PredictionContext{a.GetParent(-1), nil}
return NewArrayPredictionContext(parents, payloads)
}
}
@ -580,7 +580,7 @@ func mergeRoot(a, b ISingletonPredictionContext, rootIsWildcard bool) IPredictio
}
//
// Merge two {@link ArrayPredictionContext} instances.
// Merge two {@link ArrayBasePredictionContext} instances.
//
// <p>Different tops, different parents.<br>
// <embed src="images/ArrayMerge_DiffTopDiffPar.svg" type="image/svg+xml"/></p>
@ -596,18 +596,18 @@ func mergeRoot(a, b ISingletonPredictionContext, rootIsWildcard bool) IPredictio
// type="image/svg+xml"/></p>
//
// <p>Equal tops, merge parents and reduce top to
// {@link SingletonPredictionContext}.<br>
// {@link SingletonBasePredictionContext}.<br>
// <embed src="images/ArrayMerge_EqualTop.svg" type="image/svg+xml"/></p>
// /
func mergeArrays(a, b *ArrayPredictionContext, rootIsWildcard bool, mergeCache *DoubleDict) IPredictionContext {
func mergeArrays(a, b *ArrayPredictionContext, rootIsWildcard bool, mergeCache *DoubleDict) PredictionContext {
if mergeCache != nil {
var previous = mergeCache.Get(a.Hash(), b.Hash())
if previous != nil {
return previous.(IPredictionContext)
return previous.(PredictionContext)
}
previous = mergeCache.Get(b.Hash(), a.Hash())
if previous != nil {
return previous.(IPredictionContext)
return previous.(PredictionContext)
}
}
// merge sorted payloads a + b => M
@ -616,7 +616,7 @@ func mergeArrays(a, b *ArrayPredictionContext, rootIsWildcard bool, mergeCache *
var k = 0 // walks target M array
var mergedReturnStates = make([]int, 0)
var mergedParents = make([]IPredictionContext, 0)
var mergedParents = make([]PredictionContext, 0)
// walk and merge to yield mergedParents, mergedReturnStates
for i < len(a.returnStates) && j < len(b.returnStates) {
var a_parent = a.parents[i]
@ -625,7 +625,7 @@ func mergeArrays(a, b *ArrayPredictionContext, rootIsWildcard bool, mergeCache *
// same payload (stack tops are equal), must yield merged singleton
var payload = a.returnStates[i]
// $+$ = $
var bothDollars = payload == PredictionContextEMPTY_RETURN_STATE && a_parent == nil && b_parent == nil
var bothDollars = payload == BasePredictionContextEMPTY_RETURN_STATE && a_parent == nil && b_parent == nil
var ax_ax = (a_parent != nil && b_parent != nil && a_parent == b_parent) // ax+ax
// ->
// ax
@ -667,7 +667,7 @@ func mergeArrays(a, b *ArrayPredictionContext, rootIsWildcard bool, mergeCache *
// trim merged if we combined a few that had same stack tops
if k < len(mergedParents) { // write index < last position trim
if k == 1 { // for just one merged element, return singleton top
var a_ = SingletonPredictionContextCreate(mergedParents[0], mergedReturnStates[0])
var a_ = SingletonBasePredictionContextCreate(mergedParents[0], mergedReturnStates[0])
if mergeCache != nil {
mergeCache.set(a.Hash(), b.Hash(), a_)
}
@ -705,8 +705,8 @@ func mergeArrays(a, b *ArrayPredictionContext, rootIsWildcard bool, mergeCache *
// Make pass over all <em>M</em> {@code parents} merge any {@code equals()}
// ones.
// /
func combineCommonParents(parents []IPredictionContext) {
var uniqueParents = make(map[IPredictionContext]IPredictionContext)
func combineCommonParents(parents []PredictionContext) {
var uniqueParents = make(map[PredictionContext]PredictionContext)
for p := 0; p < len(parents); p++ {
var parent = parents[p]
@ -719,7 +719,7 @@ func combineCommonParents(parents []IPredictionContext) {
}
}
func getCachedPredictionContext(context IPredictionContext, contextCache *PredictionContextCache, visited map[IPredictionContext]IPredictionContext) IPredictionContext {
func getCachedBasePredictionContext(context PredictionContext, contextCache *PredictionContextCache, visited map[PredictionContext]PredictionContext) PredictionContext {
if (context.isEmpty()) {
return context
@ -734,12 +734,12 @@ func getCachedPredictionContext(context IPredictionContext, contextCache *Predic
return existing
}
var changed = false
var parents = make([]IPredictionContext, context.length())
var parents = make([]PredictionContext, context.length())
for i := 0; i < len(parents); i++ {
var parent = getCachedPredictionContext(context.GetParent(i), contextCache, visited)
var parent = getCachedBasePredictionContext(context.GetParent(i), contextCache, visited)
if (changed || parent != context.GetParent(i)) {
if (!changed) {
parents = make([]IPredictionContext, context.length())
parents = make([]PredictionContext, context.length())
for j := 0; j < context.length(); j++ {
parents[j] = context.GetParent(j)
}
@ -753,11 +753,11 @@ func getCachedPredictionContext(context IPredictionContext, contextCache *Predic
visited[context] = context
return context
}
var updated IPredictionContext = nil
var updated PredictionContext = nil
if (len(parents) == 0) {
updated = PredictionContextEMPTY
updated = BasePredictionContextEMPTY
} else if (len(parents) == 1) {
updated = SingletonPredictionContextCreate(parents[0], context.getReturnState(0))
updated = SingletonBasePredictionContextCreate(parents[0], context.getReturnState(0))
} else {
updated = NewArrayPredictionContext(parents, context.(*ArrayPredictionContext).GetReturnStates())
}

View File

@ -184,7 +184,7 @@ func PredictionModehasSLLConflictTerminatingPrediction(mode int, configs ATNConf
for _, c := range configs.GetItems() {
// NewBaseATNConfig({semanticContext:}, c)
c = NewBaseATNConfig2(c, SemanticContextNONE)
c = NewBaseATNConfig2(c, SemanticContextNone)
dup.Add(c, nil)
}
configs = dup

View File

@ -19,12 +19,12 @@ type Recognizer interface {
SetState(int)
Action(_localctx RuleContext, ruleIndex, actionIndex int)
GetATN() *ATN
getErrorListenerDispatch() IErrorListener
getErrorListenerDispatch() ErrorListener
}
type BaseRecognizer struct {
_listeners []IErrorListener
_listeners []ErrorListener
state int
RuleNames []string
@ -35,7 +35,7 @@ type BaseRecognizer struct {
func NewBaseRecognizer() *BaseRecognizer {
rec := new(BaseRecognizer)
rec._listeners = []IErrorListener{ConsoleErrorListenerINSTANCE}
rec._listeners = []ErrorListener{ConsoleErrorListenerINSTANCE}
rec.state = -1
return rec
}
@ -54,12 +54,12 @@ func (this *BaseRecognizer) Action(context RuleContext, ruleIndex, actionIndex i
panic("action not implemented on Recognizer!")
}
func (this *BaseRecognizer) addErrorListener(listener IErrorListener) {
func (this *BaseRecognizer) addErrorListener(listener ErrorListener) {
this._listeners = append(this._listeners, listener)
}
func (this *BaseRecognizer) removeErrorListeners() {
this._listeners = make([]IErrorListener, 0)
this._listeners = make([]ErrorListener, 0)
}
func (this *BaseRecognizer) GetRuleNames() []string {
@ -181,7 +181,7 @@ func (this *BaseRecognizer) getErrorHeader(e RecognitionException) string {
// feature when necessary. For example, see
// {@link DefaultErrorStrategy//GetTokenErrorDisplay}.
//
func (this *BaseRecognizer) GetTokenErrorDisplay(t IToken) string {
func (this *BaseRecognizer) GetTokenErrorDisplay(t Token) string {
if t == nil {
return "<no token>"
}
@ -200,7 +200,7 @@ func (this *BaseRecognizer) GetTokenErrorDisplay(t IToken) string {
return "'" + s + "'"
}
func (this *BaseRecognizer) getErrorListenerDispatch() IErrorListener {
func (this *BaseRecognizer) getErrorListenerDispatch() ErrorListener {
return NewProxyErrorListener(this._listeners)
}

View File

@ -107,7 +107,7 @@ func (this *BaseRuleContext) isEmpty() bool {
// satisfy the ParseTree / SyntaxTree interface
func (this *BaseRuleContext) GetSourceInterval() *Interval {
return TreeINVALID_INTERVAL
return TreeInvalidInterval
}
func (this *BaseRuleContext) getRuleContext() RuleContext {

View File

@ -21,10 +21,10 @@ type SemanticContext interface {
}
func SemanticContextandContext(a, b SemanticContext) SemanticContext {
if a == nil || a == SemanticContextNONE {
if a == nil || a == SemanticContextNone {
return b
}
if b == nil || b == SemanticContextNONE {
if b == nil || b == SemanticContextNone {
return a
}
var result = NewAND(a, b)
@ -42,8 +42,8 @@ func SemanticContextorContext(a, b SemanticContext) SemanticContext {
if b == nil {
return a
}
if a == SemanticContextNONE || b == SemanticContextNONE {
return SemanticContextNONE
if a == SemanticContextNone || b == SemanticContextNone {
return SemanticContextNone
}
var result = NewOR(a, b)
if len(result.opnds) == 1 {
@ -71,7 +71,7 @@ func NewPredicate(ruleIndex, predIndex int, isCtxDependent bool) *Predicate {
//The default {@link SemanticContext}, which is semantically equivalent to
//a predicate of the form {@code {true}?}.
var SemanticContextNONE SemanticContext = NewPredicate(-1, -1, false)
var SemanticContextNone SemanticContext = NewPredicate(-1, -1, false)
func (this *Predicate) evalPrecedence(parser Recognizer, outerContext RuleContext) SemanticContext {
return this
@ -126,7 +126,7 @@ func (this *PrecedencePredicate) evaluate(parser Recognizer, outerContext RuleCo
func (this *PrecedencePredicate) evalPrecedence(parser Recognizer, outerContext RuleContext) SemanticContext {
if parser.Precpred(outerContext, this.precedence) {
return SemanticContextNONE
return SemanticContextNone
} else {
return nil
}
@ -263,7 +263,7 @@ func (this *AND) evalPrecedence(parser Recognizer, outerContext RuleContext) Sem
if evaluated == nil {
// The AND context is false if any element is false
return nil
} else if evaluated != SemanticContextNONE {
} else if evaluated != SemanticContextNone {
// Reduce the result by skipping true elements
operands = append(operands, evaluated)
}
@ -274,7 +274,7 @@ func (this *AND) evalPrecedence(parser Recognizer, outerContext RuleContext) Sem
if len(operands) == 0 {
// all elements were true, so the AND context is true
return SemanticContextNONE
return SemanticContextNone
}
var result SemanticContext = nil
@ -395,9 +395,9 @@ func (this *OR) evalPrecedence(parser Recognizer, outerContext RuleContext) Sema
var context = this.opnds[i]
var evaluated = context.evalPrecedence(parser, outerContext)
differs = differs || (evaluated != context)
if evaluated == SemanticContextNONE {
if evaluated == SemanticContextNone {
// The OR context is true if any element is true
return SemanticContextNONE
return SemanticContextNone
} else if evaluated != nil {
// Reduce the result by skipping false elements
operands = append(operands, evaluated)

View File

@ -15,7 +15,7 @@ type TokenSourceCharStreamPair struct {
// we obtained this token.
type IToken interface {
type Token interface {
GetSource() *TokenSourceCharStreamPair
GetTokenType() int
GetChannel() int
@ -34,7 +34,7 @@ type IToken interface {
GetInputStream() CharStream
}
type Token struct {
type BaseToken struct {
source *TokenSourceCharStreamPair
tokenType int // token type of the token
channel int // The parser ignores everything not on DEFAULT_CHANNEL
@ -70,59 +70,59 @@ const (
TokenHiddenChannel = 1
)
func (this *Token) GetChannel() int {
func (this *BaseToken) GetChannel() int {
return this.channel
}
func (this *Token) GetStart() int {
func (this *BaseToken) GetStart() int {
return this.start
}
func (this *Token) GetStop() int {
func (this *BaseToken) GetStop() int {
return this.stop
}
func (this *Token) GetLine() int {
func (this *BaseToken) GetLine() int {
return this.line
}
func (this *Token) GetColumn() int {
func (this *BaseToken) GetColumn() int {
return this.column
}
func (this *Token) GetTokenType() int {
func (this *BaseToken) GetTokenType() int {
return this.tokenType
}
func (this *Token) GetSource() *TokenSourceCharStreamPair{
func (this *BaseToken) GetSource() *TokenSourceCharStreamPair{
return this.source
}
func (this *Token) GetTokenIndex() int {
func (this *BaseToken) GetTokenIndex() int {
return this.tokenIndex
}
func (this *Token) SetTokenIndex(v int) {
func (this *BaseToken) SetTokenIndex(v int) {
this.tokenIndex = v
}
func (this *Token) GetTokenSource() TokenSource {
func (this *BaseToken) GetTokenSource() TokenSource {
return this.source.tokenSource
}
func (this *Token) GetInputStream() CharStream {
func (this *BaseToken) GetInputStream() CharStream {
return this.source.charStream
}
type CommonToken struct {
*Token
*BaseToken
}
func NewCommonToken(source *TokenSourceCharStreamPair, tokenType, channel, start, stop int) *CommonToken {
t := new(CommonToken)
t.Token = new(Token)
t.BaseToken = new(BaseToken)
t.source = source
t.tokenType = tokenType

View File

@ -1,7 +1,7 @@
package antlr4
type TokenSource interface {
nextToken() IToken
nextToken() Token
skip()
more()
getLine() int

View File

@ -3,14 +3,14 @@ package antlr4
type TokenStream interface {
IntStream
LT(k int) IToken
LT(k int) Token
Get(index int) IToken
Get(index int) Token
GetTokenSource() TokenSource
SetTokenSource(TokenSource)
GetAllText() string
GetTextFromInterval(*Interval) string
GetTextFromRuleContext(RuleContext) string
GetTextFromTokens(IToken, IToken) string
GetTextFromTokens(Token, Token) string
}

View File

@ -4,7 +4,7 @@ package antlr4
// It is the most abstract interface for all the trees used by ANTLR.
///
var TreeINVALID_INTERVAL = NewInterval(-1, -2)
var TreeInvalidInterval = NewInterval(-1, -2)
type Tree interface {
GetParent() Tree
@ -41,7 +41,7 @@ type RuleNode interface {
type TerminalNode interface {
ParseTree
getSymbol() IToken
getSymbol() Token
}
type ErrorNode interface {
@ -87,10 +87,10 @@ type ParseTreeListener interface {
type TerminalNodeImpl struct {
parentCtx RuleContext
symbol IToken
symbol Token
}
func NewTerminalNodeImpl(symbol IToken) *TerminalNodeImpl {
func NewTerminalNodeImpl(symbol Token) *TerminalNodeImpl {
tn := new(TerminalNodeImpl)
tn.parentCtx = nil
@ -112,7 +112,7 @@ func (this *TerminalNodeImpl) setChildren(t []Tree) {
panic("Cannot set children on terminal node")
}
func (this *TerminalNodeImpl) getSymbol() IToken {
func (this *TerminalNodeImpl) getSymbol() Token {
return this.symbol
}
@ -130,7 +130,7 @@ func (this *TerminalNodeImpl) getPayload() interface{} {
func (this *TerminalNodeImpl) GetSourceInterval() *Interval {
if this.symbol == nil {
return TreeINVALID_INTERVAL
return TreeInvalidInterval
}
var tokenIndex = this.symbol.GetTokenIndex()
return NewInterval(tokenIndex, tokenIndex)
@ -166,7 +166,7 @@ type ErrorNodeImpl struct {
*TerminalNodeImpl
}
func NewErrorNodeImpl(token IToken) *ErrorNodeImpl {
func NewErrorNodeImpl(token Token) *ErrorNodeImpl {
en := new(ErrorNodeImpl)
en.TerminalNodeImpl = NewTerminalNodeImpl(token)
return en

View File

@ -53,7 +53,7 @@ func TreesgetNodeText(t Tree, ruleNames []string, recog *BaseParser) string {
// no recog for rule names
var payload = t.getPayload()
if p2, ok := payload.(IToken); ok {
if p2, ok := payload.(Token); ok {
return p2.GetText()
}

View File

@ -64,12 +64,8 @@ func (l *<file.grammarName>Listener) Exit<lname; format="cap">(ctx antlr4.Parser
}; separator="\n">
>>
VisitorFile(file, header) ::= <<
<fileHeader(file.grammarFileName, file.ANTLRVersion)>
package parser // <file.grammarName>
@ -413,7 +409,7 @@ StarBlock(choice, alts, Sync, iteration) ::= <<
p.SetState(<choice.stateNumber>)
p.GetErrorHandler().Sync(p)
_alt := p.GetInterpreter().AdaptivePredict(p.GetTokenStream(),<choice.decision>,p.GetParserRuleContext())
for _alt!=<choice.exitAlt> && _alt!= antlr4.ATNINVALID_ALT_NUMBER {
for _alt!=<choice.exitAlt> && _alt!= antlr4.ATNInvalidAltNumber {
if(_alt==1<if(!choice.ast.greedy)>+1<endif>) {
<iteration>
<alts> <! should only be one !>
@ -429,7 +425,7 @@ PlusBlock(choice, alts, error) ::= <<
p.SetState(<choice.blockStartStateNumber>) <! alt block decision !>
p.GetErrorHandler().Sync(p)
_alt := 1<if(!choice.ast.greedy)>+1<endif>
for ok := true; ok; ok = _alt!=<choice.exitAlt> && _alt!= antlr4.ATNINVALID_ALT_NUMBER {
for ok := true; ok; ok = _alt!=<choice.exitAlt> && _alt!= antlr4.ATNInvalidAltNumber {
switch _alt {
<alts:{alt|
case <i><if(!choice.ast.greedy)>+1<endif>: