forked from jasder/antlr
fix numerous compiler errors
This commit is contained in:
parent
7c812fb320
commit
4441b6f673
|
@ -1,19 +1,19 @@
|
|||
package antlr4
|
||||
|
||||
type ATN struct {
|
||||
grammarType *ATNType
|
||||
grammarType int
|
||||
maxTokenType int
|
||||
states []*ATNState
|
||||
decisionToState []DecisionState
|
||||
ruleToStartState []RuleStartState
|
||||
ruleToStopState []RuleStopState
|
||||
modeNameToStartState map[string]TokensStartState
|
||||
states []IATNState
|
||||
decisionToState []*DecisionState
|
||||
ruleToStartState []*RuleStartState
|
||||
ruleToStopState []*RuleStopState
|
||||
modeNameToStartState map[string]*TokensStartState
|
||||
modeToStartState []*TokensStartState
|
||||
ruleToTokenType []int
|
||||
lexerActions []LexerAction
|
||||
modeToStartState []TokensStartState
|
||||
lexerActions []ILexerAction
|
||||
}
|
||||
|
||||
func NewATN(grammarType *ATNType, maxTokenType int) *ATN {
|
||||
func NewATN(grammarType int, maxTokenType int) *ATN {
|
||||
|
||||
atn := new(ATN)
|
||||
|
||||
|
@ -22,16 +22,16 @@ func NewATN(grammarType *ATNType, maxTokenType int) *ATN {
|
|||
atn.grammarType = grammarType
|
||||
// The maximum value for any symbol recognized by a transition in the ATN.
|
||||
atn.maxTokenType = maxTokenType
|
||||
atn.states = make([]*ATNState)
|
||||
atn.states = make([]IATNState,0)
|
||||
// Each subrule/rule is a decision point and we must track them so we
|
||||
// can go back later and build DFA predictors for them. This includes
|
||||
// all the rules, subrules, optional blocks, ()+, ()* etc...
|
||||
atn.decisionToState = make([]*DecisionState)
|
||||
atn.decisionToState = make([]*DecisionState, 0)
|
||||
// Maps from rule index to starting state number.
|
||||
atn.ruleToStartState = []RuleStartState
|
||||
atn.ruleToStartState = make([]*RuleStartState, 0)
|
||||
// Maps from rule index to stop state number.
|
||||
atn.ruleToStopState = nil
|
||||
atn.modeNameToStartState = make( map[string]TokensStartState )
|
||||
atn.modeNameToStartState = make( map[string]*TokensStartState )
|
||||
// For lexer ATNs, atn.maps the rule index to the resulting token type.
|
||||
// For parser ATNs, atn.maps the rule index to the generated bypass token
|
||||
// type if the
|
||||
|
@ -41,7 +41,7 @@ func NewATN(grammarType *ATNType, maxTokenType int) *ATN {
|
|||
// For lexer ATNs, atn.is an array of {@link LexerAction} objects which may
|
||||
// be referenced by action transitions in the ATN.
|
||||
atn.lexerActions = nil
|
||||
atn.modeToStartState = make([]TokensStartState)
|
||||
atn.modeToStartState = make([]*TokensStartState, 0)
|
||||
|
||||
return atn
|
||||
|
||||
|
@ -51,7 +51,7 @@ func NewATN(grammarType *ATNType, maxTokenType int) *ATN {
|
|||
// If {@code ctx} is nil, the set of tokens will not include what can follow
|
||||
// the rule surrounding {@code s}. In other words, the set will be
|
||||
// restricted to tokens reachable staying within {@code s}'s rule.
|
||||
func (this *ATN) nextTokensInContext(s *ATNState, ctx *RuleContext) *IntervalSet {
|
||||
func (this *ATN) nextTokensInContext(s IATNState, ctx *RuleContext) *IntervalSet {
|
||||
var anal = NewLL1Analyzer(this)
|
||||
return anal.LOOK(s, nil, ctx)
|
||||
}
|
||||
|
@ -59,16 +59,16 @@ func (this *ATN) nextTokensInContext(s *ATNState, ctx *RuleContext) *IntervalSet
|
|||
// Compute the set of valid tokens that can occur starting in {@code s} and
|
||||
// staying in same rule. {@link Token//EPSILON} is in set if we reach end of
|
||||
// rule.
|
||||
func (this *ATN) nextTokensNoContext(s *ATNState) *IntervalSet {
|
||||
if (s.nextTokenWithinRule != nil ) {
|
||||
return s.nextTokenWithinRule
|
||||
func (this *ATN) nextTokensNoContext(s IATNState) *IntervalSet {
|
||||
if (s.getNextTokenWithinRule() != nil ) {
|
||||
return s.getNextTokenWithinRule()
|
||||
}
|
||||
s.nextTokenWithinRule = this.nextTokensInContext(s, nil)
|
||||
s.nextTokenWithinRule.readOnly = true
|
||||
return s.nextTokenWithinRule
|
||||
s.setNextTokenWithinRule( this.nextTokensInContext(s, nil) )
|
||||
s.getNextTokenWithinRule().readOnly = true
|
||||
return s.getNextTokenWithinRule()
|
||||
}
|
||||
|
||||
func (this *ATN) nextTokens(s *ATNState, ctx *RuleContext) *IntervalSet {
|
||||
func (this *ATN) nextTokens(s IATNState, ctx *RuleContext) *IntervalSet {
|
||||
if ( ctx==nil ) {
|
||||
return this.nextTokensNoContext(s)
|
||||
} else {
|
||||
|
@ -76,25 +76,25 @@ func (this *ATN) nextTokens(s *ATNState, ctx *RuleContext) *IntervalSet {
|
|||
}
|
||||
}
|
||||
|
||||
func (this *ATN) addState( state *ATNState ) {
|
||||
func (this *ATN) addState( state IATNState ) {
|
||||
if ( state != nil ) {
|
||||
state.atn = this
|
||||
state.stateNumber = len(this.states)
|
||||
state.setATN(this)
|
||||
state.setStateNumber(len(this.states))
|
||||
}
|
||||
this.states = append(this.states, state)
|
||||
}
|
||||
|
||||
func (this *ATN) removeState( state *ATNState ) {
|
||||
this.states[state.stateNumber] = nil // just free mem, don't shift states in list
|
||||
func (this *ATN) removeState( state IATNState ) {
|
||||
this.states[state.getStateNumber()] = nil // just free mem, don't shift states in list
|
||||
}
|
||||
|
||||
func (this *ATN) defineDecisionState( s DecisionState) DecisionState {
|
||||
append( this.decisionToState, s)
|
||||
func (this *ATN) defineDecisionState( s *DecisionState ) int {
|
||||
this.decisionToState = append( this.decisionToState, s)
|
||||
s.decision = len(this.decisionToState)-1
|
||||
return s.decision
|
||||
}
|
||||
|
||||
func (this *ATN) getDecisionState( decision int) DecisionState {
|
||||
func (this *ATN) getDecisionState( decision int) *DecisionState {
|
||||
if (len(this.decisionToState)==0) {
|
||||
return nil
|
||||
} else {
|
||||
|
@ -122,7 +122,7 @@ func (this *ATN) getDecisionState( decision int) DecisionState {
|
|||
|
||||
//var Token = require('./../Token').Token
|
||||
|
||||
func (this *ATN) getExpectedTokens( stateNumber int, ctx *RuleContext ) *IntervalSet {
|
||||
func (this *ATN) getExpectedTokens( stateNumber int, ctx IRuleContext ) *IntervalSet {
|
||||
if ( stateNumber < 0 || stateNumber >= len(this.states) ) {
|
||||
panic("Invalid state number.")
|
||||
}
|
||||
|
@ -134,13 +134,13 @@ func (this *ATN) getExpectedTokens( stateNumber int, ctx *RuleContext ) *Interva
|
|||
var expected = NewIntervalSet()
|
||||
expected.addSet(following)
|
||||
expected.removeOne(TokenEpsilon)
|
||||
for (ctx != nil && ctx.invokingState >= 0 && following.contains(TokenEpsilon)) {
|
||||
var invokingState = this.states[ctx.invokingState]
|
||||
var rt = invokingState.transitions[0]
|
||||
following = this.nextTokens(rt.(RuleTransition).followState, nil)
|
||||
for (ctx != nil && ctx.getInvokingState() >= 0 && following.contains(TokenEpsilon)) {
|
||||
var invokingState = this.states[ctx.getInvokingState()]
|
||||
var rt = invokingState.getTransitions()[0]
|
||||
following = this.nextTokens(rt.(*RuleTransition).followState, nil)
|
||||
expected.addSet(following)
|
||||
expected.removeOne(TokenEpsilon)
|
||||
ctx = ctx.parentCtx
|
||||
ctx = ctx.getParent().(IRuleContext)
|
||||
}
|
||||
if (following.contains(TokenEpsilon)) {
|
||||
expected.addOne(TokenEOF)
|
||||
|
|
|
@ -3,6 +3,7 @@ package antlr4
|
|||
import (
|
||||
"reflect"
|
||||
"fmt"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
// A tuple: (ATN state, predicted alt, syntactic, semantic context).
|
||||
|
@ -13,12 +14,27 @@ import (
|
|||
// an ATN state.
|
||||
//
|
||||
|
||||
type IATNConfig interface {
|
||||
getPrecedenceFilterSuppressed() bool
|
||||
setPrecedenceFilterSuppressed(bool)
|
||||
|
||||
getState() IATNState
|
||||
getAlt() int
|
||||
getSemanticContext() SemanticContext
|
||||
|
||||
getContext() IPredictionContext
|
||||
setContext(IPredictionContext)
|
||||
|
||||
getReachesIntoOuterContext() int
|
||||
setReachesIntoOuterContext(int)
|
||||
}
|
||||
|
||||
type ATNConfig struct {
|
||||
precedenceFilterSuppressed int
|
||||
state *ATNState
|
||||
precedenceFilterSuppressed bool
|
||||
state IATNState
|
||||
alt int
|
||||
context *PredictionContext
|
||||
semanticContext SemanticContext
|
||||
context IPredictionContext
|
||||
semanticContext SemanticContext
|
||||
reachesIntoOuterContext int
|
||||
}
|
||||
|
||||
|
@ -32,49 +48,93 @@ func NewATNConfig7(old *ATNConfig) *ATNConfig { // dup
|
|||
return a
|
||||
}
|
||||
|
||||
func NewATNConfig6(state *ATNState, alt int, context *PredictionContext) *ATNConfig {
|
||||
return NewATNConfig(state, alt, context, SemanticContextNONE);
|
||||
func NewATNConfig6(state IATNState, alt int, context IPredictionContext) *ATNConfig {
|
||||
return NewATNConfig5(state, alt, context, SemanticContextNONE);
|
||||
}
|
||||
|
||||
func NewATNConfig5(state *ATNState, alt int, context *PredictionContext, semanticContext SemanticContext) *ATNConfig {
|
||||
func NewATNConfig5(state IATNState,
|
||||
alt int, context IPredictionContext, semanticContext SemanticContext) *ATNConfig {
|
||||
a := new(ATNConfig)
|
||||
a.state = state;
|
||||
a.alt = alt;
|
||||
a.context = context;
|
||||
a.semanticContext = semanticContext;
|
||||
|
||||
a.InitATNConfig2(state, alt, context, semanticContext)
|
||||
return a
|
||||
}
|
||||
|
||||
func NewATNConfig4(c *ATNConfig, state *ATNState) *ATNConfig {
|
||||
func NewATNConfig4(c *ATNConfig, state IATNState) *ATNConfig {
|
||||
return NewATNConfig(c, state, c.context, c.semanticContext);
|
||||
}
|
||||
|
||||
func NewATNConfig3(c *ATNConfig, state *ATNState, semanticContext *SemanticContext) *ATNConfig {
|
||||
func NewATNConfig3(c *ATNConfig, state IATNState, semanticContext SemanticContext) *ATNConfig {
|
||||
return NewATNConfig(c, state, c.context, semanticContext);
|
||||
}
|
||||
|
||||
func NewATNConfig2(c *ATNConfig, semanticContext *SemanticContext) *ATNConfig {
|
||||
func NewATNConfig2(c *ATNConfig, semanticContext SemanticContext) *ATNConfig {
|
||||
return NewATNConfig(c, c.state, c.context, semanticContext);
|
||||
}
|
||||
|
||||
func NewATNConfig1(c *ATNConfig, state *ATNState, context *PredictionContext) *ATNConfig {
|
||||
func NewATNConfig1(c *ATNConfig, state IATNState, context IPredictionContext) *ATNConfig {
|
||||
return NewATNConfig(c, state, context, c.semanticContext);
|
||||
}
|
||||
|
||||
func NewATNConfig(c *ATNConfig, state *ATNState, context *PredictionContext, semanticContext *SemanticContext) *ATNConfig {
|
||||
func NewATNConfig(c *ATNConfig, state IATNState, context IPredictionContext, semanticContext SemanticContext) *ATNConfig {
|
||||
a := new(ATNConfig)
|
||||
|
||||
a.InitATNConfig(c, state, context, semanticContext)
|
||||
return a
|
||||
}
|
||||
|
||||
func (a *ATNConfig) InitATNConfig(c *ATNConfig, state *ATNState, context *PredictionContext, semanticContext *SemanticContext) {
|
||||
func (this *ATNConfig) getPrecedenceFilterSuppressed() bool {
|
||||
return this.precedenceFilterSuppressed
|
||||
}
|
||||
|
||||
func (this *ATNConfig) setPrecedenceFilterSuppressed(v bool) {
|
||||
this.precedenceFilterSuppressed = v
|
||||
}
|
||||
|
||||
func (this *ATNConfig) getState() IATNState {
|
||||
return this.state
|
||||
}
|
||||
|
||||
func (this *ATNConfig) getAlt() int {
|
||||
return this.alt
|
||||
}
|
||||
|
||||
func (this *ATNConfig) setContext(v IPredictionContext) {
|
||||
this.context = v
|
||||
}
|
||||
func (this *ATNConfig) getContext() IPredictionContext {
|
||||
return this.context
|
||||
}
|
||||
|
||||
func (this *ATNConfig) getSemanticContext() SemanticContext {
|
||||
return this.semanticContext
|
||||
}
|
||||
|
||||
func (this *ATNConfig) getReachesIntoOuterContext() int {
|
||||
return this.reachesIntoOuterContext
|
||||
}
|
||||
|
||||
func (this *ATNConfig) setReachesIntoOuterContext(v int) {
|
||||
this.reachesIntoOuterContext = v
|
||||
}
|
||||
|
||||
|
||||
func (a *ATNConfig) InitATNConfig(c IATNConfig, state IATNState, context IPredictionContext, semanticContext SemanticContext) {
|
||||
|
||||
a.state = state;
|
||||
a.alt = c.alt;
|
||||
a.alt = c.getAlt();
|
||||
a.context = context;
|
||||
a.semanticContext = semanticContext;
|
||||
a.reachesIntoOuterContext = c.getReachesIntoOuterContext();
|
||||
|
||||
}
|
||||
|
||||
func (a *ATNConfig) InitATNConfig2(state IATNState, alt int, context IPredictionContext, semanticContext SemanticContext) {
|
||||
|
||||
a.state = state;
|
||||
a.alt = alt;
|
||||
a.context = context;
|
||||
a.semanticContext = semanticContext;
|
||||
a.reachesIntoOuterContext = c.reachesIntoOuterContext;
|
||||
|
||||
}
|
||||
|
||||
|
@ -92,11 +152,11 @@ func (this *ATNConfig) equals(other interface{}) bool {
|
|||
}
|
||||
}
|
||||
|
||||
func (this *ATNConfig) shortHashString() {
|
||||
return "" + this.state.stateNumber + "/" + this.alt + "/" + this.semanticContext
|
||||
func (this *ATNConfig) shortHashString() string {
|
||||
return "" + strconv.Itoa(this.state.getStateNumber()) + "/" + strconv.Itoa(this.alt) + "/" + this.semanticContext.toString()
|
||||
}
|
||||
|
||||
func (this *ATNConfig) hashString() {
|
||||
func (this *ATNConfig) hashString() string {
|
||||
|
||||
var c string
|
||||
if (this.context == nil){
|
||||
|
@ -105,7 +165,7 @@ func (this *ATNConfig) hashString() {
|
|||
c = this.context.hashString()
|
||||
}
|
||||
|
||||
return "" + this.state.stateNumber + "/" + this.alt + "/" + c + "/" + fmt.Sprint(this.semanticContext)
|
||||
return "" + strconv.Itoa(this.state.getStateNumber()) + "/" + strconv.Itoa(this.alt) + "/" + c + "/" + this.semanticContext.toString()
|
||||
}
|
||||
|
||||
func (this *ATNConfig) toString() string {
|
||||
|
@ -122,10 +182,10 @@ func (this *ATNConfig) toString() string {
|
|||
|
||||
var c string
|
||||
if (this.reachesIntoOuterContext > 0){
|
||||
c = ",up=" + this.reachesIntoOuterContext
|
||||
c = ",up=" + fmt.Sprint(this.reachesIntoOuterContext)
|
||||
}
|
||||
|
||||
return "(" + this.state + "," + this.alt + a + b + c + ")"
|
||||
return "(" + fmt.Sprint(this.state) + "," + strconv.Itoa(this.alt) + a + b + c + ")"
|
||||
}
|
||||
|
||||
|
||||
|
@ -137,27 +197,28 @@ type LexerATNConfig struct {
|
|||
passedThroughNonGreedyDecision bool
|
||||
}
|
||||
|
||||
func NewLexerATNConfig6(state *ATNState, alt int, context *PredictionContext) *LexerATNConfig {
|
||||
func NewLexerATNConfig6(state IATNState, alt int, context IPredictionContext) *LexerATNConfig {
|
||||
|
||||
this := new(LexerATNConfig)
|
||||
|
||||
this.InitATNConfig(state, alt, context, SemanticContextNONE)
|
||||
this.InitATNConfig2(state, alt, context, SemanticContextNONE)
|
||||
|
||||
this.passedThroughNonGreedyDecision = false
|
||||
this.lexerActionExecutor = nil
|
||||
return this
|
||||
}
|
||||
|
||||
func NewLexerATNConfig5(state *ATNState, alt int, context *PredictionContext, lexerActionExecutor *LexerActionExecutor) *LexerATNConfig {
|
||||
func NewLexerATNConfig5(state IATNState, alt int, context IPredictionContext, lexerActionExecutor *LexerActionExecutor) *LexerATNConfig {
|
||||
|
||||
this := new(LexerATNConfig)
|
||||
|
||||
this.InitATNConfig(state, alt, context, SemanticContextNONE)
|
||||
this.InitATNConfig2(state, alt, context, SemanticContextNONE)
|
||||
this.lexerActionExecutor = lexerActionExecutor
|
||||
this.passedThroughNonGreedyDecision = false
|
||||
return this
|
||||
}
|
||||
|
||||
func NewLexerATNConfig4(c *LexerATNConfig, state *ATNState) *LexerATNConfig {
|
||||
func NewLexerATNConfig4(c *LexerATNConfig, state IATNState) *LexerATNConfig {
|
||||
|
||||
this := new(LexerATNConfig)
|
||||
|
||||
|
@ -167,7 +228,7 @@ func NewLexerATNConfig4(c *LexerATNConfig, state *ATNState) *LexerATNConfig {
|
|||
return this
|
||||
}
|
||||
|
||||
func NewLexerATNConfig3(c *LexerATNConfig, state *ATNState, lexerActionExecutor *LexerActionExecutor) *LexerATNConfig {
|
||||
func NewLexerATNConfig3(c *LexerATNConfig, state IATNState, lexerActionExecutor *LexerActionExecutor) *LexerATNConfig {
|
||||
|
||||
this := new(LexerATNConfig)
|
||||
|
||||
|
@ -177,7 +238,7 @@ func NewLexerATNConfig3(c *LexerATNConfig, state *ATNState, lexerActionExecutor
|
|||
return this
|
||||
}
|
||||
|
||||
func NewLexerATNConfig2(c *LexerATNConfig, state *ATNState, context *PredictionContext) *LexerATNConfig {
|
||||
func NewLexerATNConfig2(c *LexerATNConfig, state IATNState, context IPredictionContext) *LexerATNConfig {
|
||||
|
||||
this := new(LexerATNConfig)
|
||||
|
||||
|
@ -188,12 +249,12 @@ func NewLexerATNConfig2(c *LexerATNConfig, state *ATNState, context *Prediction
|
|||
}
|
||||
|
||||
|
||||
func NewLexerATNConfig1( state *ATNState, alt int, context *PredictionContext) *LexerATNConfig {
|
||||
func NewLexerATNConfig1( state IATNState, alt int, context IPredictionContext) *LexerATNConfig {
|
||||
|
||||
this := new(LexerATNConfig)
|
||||
|
||||
// c *ATNConfig, state *ATNState, context *PredictionContext, semanticContext *SemanticContext
|
||||
this.InitATNConfig(state, alt, context, SemanticContextNONE)
|
||||
// c *ATNConfig, state IATNState, context IPredictionContext, semanticContext SemanticContext
|
||||
this.InitATNConfig2(state, alt, context, SemanticContextNONE)
|
||||
|
||||
this.lexerActionExecutor = nil
|
||||
this.passedThroughNonGreedyDecision = false
|
||||
|
@ -202,7 +263,7 @@ func NewLexerATNConfig1( state *ATNState, alt int, context *PredictionContext) *
|
|||
}
|
||||
|
||||
|
||||
func (this *LexerATNConfig) hashString() {
|
||||
func (this *LexerATNConfig) hashString() string {
|
||||
var f string
|
||||
|
||||
if this.passedThroughNonGreedyDecision {
|
||||
|
@ -211,11 +272,11 @@ func (this *LexerATNConfig) hashString() {
|
|||
f = "0"
|
||||
}
|
||||
|
||||
return "" + this.state.stateNumber + this.alt + this.context +
|
||||
this.semanticContext + f + this.lexerActionExecutor
|
||||
return "" + strconv.Itoa(this.state.getStateNumber()) + strconv.Itoa(this.alt) + fmt.Sprint(this.context) +
|
||||
fmt.Sprint(this.semanticContext) + f + fmt.Sprint(this.lexerActionExecutor)
|
||||
}
|
||||
|
||||
func (this *LexerATNConfig) equals(other *ATNConfig) bool {
|
||||
func (this *LexerATNConfig) equals(other interface{}) bool {
|
||||
|
||||
othert, ok := other.(*LexerATNConfig)
|
||||
|
||||
|
@ -231,7 +292,7 @@ func (this *LexerATNConfig) equals(other *ATNConfig) bool {
|
|||
if (this.lexerActionExecutor != nil){
|
||||
b = !this.lexerActionExecutor.equals(othert.lexerActionExecutor)
|
||||
} else {
|
||||
b = !othert.lexerActionExecutor
|
||||
b = othert.lexerActionExecutor != nil
|
||||
}
|
||||
|
||||
if (b) {
|
||||
|
@ -242,7 +303,7 @@ func (this *LexerATNConfig) equals(other *ATNConfig) bool {
|
|||
}
|
||||
}
|
||||
|
||||
func checkNonGreedyDecision(source *LexerATNConfig, target *ATNState) bool {
|
||||
func checkNonGreedyDecision(source *LexerATNConfig, target IATNState) bool {
|
||||
ds, ok := target.(*DecisionState)
|
||||
return source.passedThroughNonGreedyDecision || (ok && ds.nonGreedy)
|
||||
}
|
||||
|
|
|
@ -1,39 +1,39 @@
|
|||
package antlr4
|
||||
import (
|
||||
"math"
|
||||
"fmt"
|
||||
)
|
||||
|
||||
|
||||
//
|
||||
// Specialized {@link Set}{@code <}{@link ATNConfig}{@code >} that can track
|
||||
// info about the set, with support for combining similar configurations using a
|
||||
// graph-structured stack.
|
||||
///
|
||||
|
||||
func hashATNConfig(c *ATNConfig) string {
|
||||
return c.shortHashString()
|
||||
func hashATNConfig(c interface{}) string {
|
||||
return c.(*ATNConfig).shortHashString()
|
||||
}
|
||||
|
||||
func equalATNConfigs(a, b *ATNConfig) bool {
|
||||
func equalATNConfigs(a, b interface{}) bool {
|
||||
if ( a==b ) {
|
||||
return true
|
||||
}
|
||||
if ( a==nil || b==nil ) {
|
||||
return false
|
||||
}
|
||||
return a.state.stateNumber==b.state.stateNumber && a.alt==b.alt && a.semanticContext.equals(b.semanticContext)
|
||||
return a.(*ATNConfig).state.getStateNumber()==b.(*ATNConfig).state.getStateNumber() &&
|
||||
a.(*ATNConfig).alt==b.(*ATNConfig).alt &&
|
||||
a.(*ATNConfig).semanticContext.equals(b.(*ATNConfig).semanticContext)
|
||||
}
|
||||
|
||||
type ATNConfigSet struct {
|
||||
readOnly bool
|
||||
fullCtx bool
|
||||
configLookup Set
|
||||
conflictingAlts BitSet
|
||||
configLookup *Set
|
||||
conflictingAlts *BitSet
|
||||
cachedHashString string
|
||||
hasSemanticContext bool
|
||||
dipsIntoOuterContext bool
|
||||
configs []*ATNConfig
|
||||
configs []IATNConfig
|
||||
uniqueAlt int
|
||||
}
|
||||
|
||||
|
@ -70,7 +70,7 @@ func (a *ATNConfigSet) InitATNConfigSet(fullCtx bool) {
|
|||
// we've made a.readonly.
|
||||
a.readOnly = false
|
||||
// Track the elements as they are added to the set supports get(i)///
|
||||
a.configs = make([]*ATNConfig)
|
||||
a.configs = make([]IATNConfig, 0)
|
||||
|
||||
// TODO: these fields make me pretty uncomfortable but nice to pack up info
|
||||
// together, saves recomputation
|
||||
|
@ -96,50 +96,50 @@ func (a *ATNConfigSet) InitATNConfigSet(fullCtx bool) {
|
|||
// <p>This method updates {@link //dipsIntoOuterContext} and
|
||||
// {@link //hasSemanticContext} when necessary.</p>
|
||||
// /
|
||||
func (this *ATNConfigSet) add(config *ATNConfig, mergeCache DoubleDict) bool {
|
||||
func (this *ATNConfigSet) add(config IATNConfig, mergeCache *DoubleDict) bool {
|
||||
|
||||
if (this.readOnly) {
|
||||
panic("This set is readonly")
|
||||
}
|
||||
if (config.semanticContext != SemanticContextNONE) {
|
||||
if (config.getSemanticContext() != SemanticContextNONE) {
|
||||
this.hasSemanticContext = true
|
||||
}
|
||||
if (config.reachesIntoOuterContext > 0) {
|
||||
if (config.getReachesIntoOuterContext() > 0) {
|
||||
this.dipsIntoOuterContext = true
|
||||
}
|
||||
var existing *ATNConfig = this.configLookup.add(config).(*ATNConfig)
|
||||
var existing = this.configLookup.add(config).(IATNConfig)
|
||||
if (existing == config) {
|
||||
this.cachedHashString = "-1"
|
||||
this.configs = append(this.configs, config )// track order here
|
||||
this.configs = append( this.configs, config )// track order here
|
||||
return true
|
||||
}
|
||||
// a previous (s,i,pi,_), merge with it and save result
|
||||
var rootIsWildcard = !this.fullCtx
|
||||
var merged = merge(existing.context, config.context, rootIsWildcard, mergeCache)
|
||||
var merged = merge(existing.getContext(), config.getContext(), rootIsWildcard, mergeCache)
|
||||
// no need to check for existing.context, config.context in cache
|
||||
// since only way to create Newgraphs is "call rule" and here. We
|
||||
// cache at both places.
|
||||
existing.reachesIntoOuterContext = math.Max( existing.reachesIntoOuterContext, config.reachesIntoOuterContext)
|
||||
existing.setReachesIntoOuterContext( intMax( existing.getReachesIntoOuterContext(), config.getReachesIntoOuterContext()) )
|
||||
// make sure to preserve the precedence filter suppression during the merge
|
||||
if (config.precedenceFilterSuppressed) {
|
||||
existing.precedenceFilterSuppressed = true
|
||||
if (config.getPrecedenceFilterSuppressed()) {
|
||||
existing.setPrecedenceFilterSuppressed( true )
|
||||
}
|
||||
existing.context = merged // replace context no need to alt mapping
|
||||
existing.setContext( merged )// replace context no need to alt mapping
|
||||
return true
|
||||
}
|
||||
|
||||
func (this *ATNConfigSet) getStates() {
|
||||
func (this *ATNConfigSet) getStates() *Set {
|
||||
var states = NewSet(nil,nil)
|
||||
for i := 0; i < len(this.configs); i++ {
|
||||
states.add(this.configs[i].state)
|
||||
states.add(this.configs[i].getState())
|
||||
}
|
||||
return states
|
||||
}
|
||||
|
||||
func (this *ATNConfigSet) getPredicates() []SemanticContext {
|
||||
var preds = make([]SemanticContext)
|
||||
var preds = make([]SemanticContext,0)
|
||||
for i := 0; i < len(this.configs); i++ {
|
||||
c := this.configs[i].semanticContext
|
||||
c := this.configs[i].getSemanticContext()
|
||||
if (c != SemanticContextNONE) {
|
||||
preds = append(preds, c)
|
||||
}
|
||||
|
@ -147,7 +147,7 @@ func (this *ATNConfigSet) getPredicates() []SemanticContext {
|
|||
return preds
|
||||
}
|
||||
|
||||
func (this *ATNConfigSet) getItems() []*ATNConfig {
|
||||
func (this *ATNConfigSet) getItems() []IATNConfig {
|
||||
return this.configs
|
||||
}
|
||||
|
||||
|
@ -155,12 +155,12 @@ func (this *ATNConfigSet) optimizeConfigs(interpreter *ATNSimulator) {
|
|||
if (this.readOnly) {
|
||||
panic("This set is readonly")
|
||||
}
|
||||
if (this.configLookup.length == 0) {
|
||||
if (this.configLookup.length() == 0) {
|
||||
return
|
||||
}
|
||||
for i := 0; i < len(this.configs); i++ {
|
||||
var config = this.configs[i]
|
||||
config.context = interpreter.getCachedContext(config.context)
|
||||
config.setContext(interpreter.getCachedContext(config.getContext()))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -200,7 +200,7 @@ func (this *ATNConfigSet) hashString() string {
|
|||
}
|
||||
}
|
||||
|
||||
func (this *ATNConfigSet) hashConfigs() {
|
||||
func (this *ATNConfigSet) hashConfigs() string {
|
||||
var s = ""
|
||||
for _, c := range this.configs {
|
||||
s += fmt.Sprint(c)
|
||||
|
@ -234,7 +234,7 @@ func (this *ATNConfigSet) clear() {
|
|||
if (this.readOnly) {
|
||||
panic("This set is readonly")
|
||||
}
|
||||
this.configs = make([]*ATNConfig)
|
||||
this.configs = make([]IATNConfig, 0)
|
||||
this.cachedHashString = "-1"
|
||||
this.configLookup = NewSet(hashATNConfig, equalATNConfigs)
|
||||
}
|
||||
|
|
|
@ -18,6 +18,6 @@ func NewATNDeserializationOptions(copyFrom *ATNDeserializationOptions) *ATNDeser
|
|||
return o
|
||||
}
|
||||
|
||||
var ATNDeserializationOptionsdefaultOptions = &ATNDeserializationOptions{true}
|
||||
var ATNDeserializationOptionsdefaultOptions = &ATNDeserializationOptions{true,false,false}
|
||||
|
||||
|
||||
|
|
|
@ -3,6 +3,7 @@ import (
|
|||
"strings"
|
||||
"fmt"
|
||||
"encoding/hex"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
// This is the earliest supported serialized UUID.
|
||||
|
@ -11,15 +12,15 @@ var BASE_SERIALIZED_UUID = "AADB8D7E-AEEF-4415-AD2B-8204D6CF042E"
|
|||
|
||||
// This list contains all of the currently supported UUIDs, ordered by when
|
||||
// the feature first appeared in this branch.
|
||||
var SUPPORTED_UUIDS = [...]string{ BASE_SERIALIZED_UUID }
|
||||
var SUPPORTED_UUIDS = []string{ BASE_SERIALIZED_UUID }
|
||||
|
||||
var SERIALIZED_VERSION = 3
|
||||
|
||||
// This is the current serialized UUID.
|
||||
var SERIALIZED_UUID = BASE_SERIALIZED_UUID
|
||||
|
||||
func InitArray( length int, value interface{}) {
|
||||
var tmp = make([]interface{}, length)
|
||||
func initIntArray( length, value int) []int {
|
||||
var tmp = make([]int, length)
|
||||
|
||||
for i := range tmp {
|
||||
tmp[i] = value
|
||||
|
@ -30,16 +31,16 @@ func InitArray( length int, value interface{}) {
|
|||
|
||||
type ATNDeserializer struct {
|
||||
|
||||
deserializationOptions ATNDeserializationOptions
|
||||
deserializationOptions *ATNDeserializationOptions
|
||||
data []rune
|
||||
pos int
|
||||
uuid string
|
||||
|
||||
}
|
||||
|
||||
func NewATNDeserializer (options ATNDeserializationOptions) *ATNDeserializer {
|
||||
func NewATNDeserializer (options *ATNDeserializationOptions) *ATNDeserializer {
|
||||
|
||||
if ( options== nil ) {
|
||||
if ( options == nil ) {
|
||||
options = ATNDeserializationOptionsdefaultOptions
|
||||
}
|
||||
|
||||
|
@ -62,18 +63,18 @@ func NewATNDeserializer (options ATNDeserializationOptions) *ATNDeserializer {
|
|||
// serialized ATN at or after the feature identified by {@code feature} was
|
||||
// introduced otherwise, {@code false}.
|
||||
|
||||
func stringInSlice(a string, list []string) bool {
|
||||
for _, b := range list {
|
||||
func stringInSlice(a string, list []string) int {
|
||||
for i, b := range list {
|
||||
if b == a {
|
||||
return true
|
||||
return i
|
||||
}
|
||||
}
|
||||
return false
|
||||
return -1
|
||||
}
|
||||
|
||||
func (this *ATNDeserializer) isFeatureSupported(feature, actualUuid string) bool {
|
||||
var idx1 = stringInSlice( feature, SUPPORTED_UUIDS )
|
||||
if (idx1<0) {
|
||||
if (idx1 < 0) {
|
||||
return false
|
||||
}
|
||||
var idx2 = stringInSlice( actualUuid, SUPPORTED_UUIDS )
|
||||
|
@ -136,13 +137,13 @@ func (this *ATNDeserializer) reset(data []rune) {
|
|||
func (this *ATNDeserializer) checkVersion() {
|
||||
var version = this.readInt()
|
||||
if ( version != SERIALIZED_VERSION ) {
|
||||
panic ("Could not deserialize ATN with version " + version + " (expected " + SERIALIZED_VERSION + ").")
|
||||
panic("Could not deserialize ATN with version " + strconv.Itoa(version) + " (expected " + strconv.Itoa(SERIALIZED_VERSION) + ").")
|
||||
}
|
||||
}
|
||||
|
||||
func (this *ATNDeserializer) checkUUID() {
|
||||
var uuid = this.readUUID()
|
||||
if ( strings.Index(uuid, SUPPORTED_UUIDS )<0) {
|
||||
if ( stringInSlice(uuid, SUPPORTED_UUIDS ) <0 ) {
|
||||
panic("Could not deserialize ATN with UUID: " + uuid + " (expected " + SERIALIZED_UUID + " or a legacy UUID).")
|
||||
}
|
||||
this.uuid = uuid
|
||||
|
@ -166,8 +167,8 @@ type BlockStartStateIntPair struct {
|
|||
|
||||
func (this *ATNDeserializer) readStates(atn *ATN) {
|
||||
|
||||
var loopBackStateNumbers = make([]LoopEndStateIntPair)
|
||||
var endStateNumbers = make([]BlockStartStateIntPair)
|
||||
var loopBackStateNumbers = make([]LoopEndStateIntPair,0)
|
||||
var endStateNumbers = make([]BlockStartStateIntPair,0)
|
||||
|
||||
var nstates = this.readInt()
|
||||
for i :=0; i<nstates; i++ {
|
||||
|
@ -184,10 +185,10 @@ func (this *ATNDeserializer) readStates(atn *ATN) {
|
|||
var s = this.stateFactory(stype, ruleIndex)
|
||||
if (stype == ATNStateLOOP_END) { // special case
|
||||
var loopBackStateNumber = this.readInt()
|
||||
loopBackStateNumbers = append( loopBackStateNumbers, LoopEndStateIntPair{s, loopBackStateNumber})
|
||||
loopBackStateNumbers = append( loopBackStateNumbers, LoopEndStateIntPair{s.(*LoopEndState), loopBackStateNumber})
|
||||
} else if _, ok := s.(*BlockStartState); ok {
|
||||
var endStateNumber = this.readInt()
|
||||
endStateNumbers = append( endStateNumbers, BlockStartStateIntPair{s, endStateNumber})
|
||||
endStateNumbers = append( endStateNumbers, BlockStartStateIntPair{s.(*BlockStartState), endStateNumber})
|
||||
}
|
||||
atn.addState(s)
|
||||
}
|
||||
|
@ -195,12 +196,12 @@ func (this *ATNDeserializer) readStates(atn *ATN) {
|
|||
// state instances have been initialized
|
||||
for j:=0; j<len(loopBackStateNumbers); j++ {
|
||||
pair := loopBackStateNumbers[j]
|
||||
pair.item0.loopBackState = atn.states[pair[1]]
|
||||
pair.item0.loopBackState = atn.states[pair.item1]
|
||||
}
|
||||
|
||||
for j:=0; j<len(endStateNumbers); j++ {
|
||||
pair := endStateNumbers[j]
|
||||
pair.item0.endState = atn.states[pair[1]]
|
||||
pair.item0.endState = atn.states[pair.item1].(*BlockEndState)
|
||||
}
|
||||
|
||||
var numNonGreedyStates = this.readInt()
|
||||
|
@ -220,12 +221,12 @@ func (this *ATNDeserializer) readRules(atn *ATN) {
|
|||
|
||||
var nrules = this.readInt()
|
||||
if (atn.grammarType == ATNTypeLexer ) {
|
||||
atn.ruleToTokenType = InitArray(nrules, 0)
|
||||
atn.ruleToTokenType = make([]int, nrules) // initIntArray(nrules, 0)
|
||||
}
|
||||
atn.ruleToStartState = InitArray(nrules, 0)
|
||||
atn.ruleToStartState = make([]*RuleStartState, nrules) // initIntArray(nrules, 0)
|
||||
for i:=0; i<nrules; i++ {
|
||||
var s = this.readInt()
|
||||
var startState = atn.states[s]
|
||||
var startState = atn.states[s].(*RuleStartState)
|
||||
atn.ruleToStartState[i] = startState
|
||||
if ( atn.grammarType == ATNTypeLexer ) {
|
||||
var tokenType = this.readInt()
|
||||
|
@ -235,14 +236,13 @@ func (this *ATNDeserializer) readRules(atn *ATN) {
|
|||
atn.ruleToTokenType[i] = tokenType
|
||||
}
|
||||
}
|
||||
atn.ruleToStopState = InitArray(nrules, 0)
|
||||
atn.ruleToStopState = make([]*RuleStopState, nrules) //initIntArray(nrules, 0)
|
||||
for i:=0; i<len(atn.states); i++ {
|
||||
var state = atn.states[i]
|
||||
if _, ok := state.(*RuleStopState); !ok {
|
||||
continue
|
||||
if s2, ok := state.(*RuleStopState); ok {
|
||||
atn.ruleToStopState[s2.ruleIndex] = s2
|
||||
atn.ruleToStartState[s2.ruleIndex].stopState = s2
|
||||
}
|
||||
atn.ruleToStopState[state.ruleIndex] = state
|
||||
atn.ruleToStartState[state.ruleIndex].stopState = state
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -250,12 +250,12 @@ func (this *ATNDeserializer) readModes(atn *ATN) {
|
|||
var nmodes = this.readInt()
|
||||
for i:=0; i<nmodes; i++ {
|
||||
var s = this.readInt()
|
||||
atn.modeToStartState = append(atn.modeToStartState, atn.states[s])
|
||||
atn.modeToStartState = append(atn.modeToStartState, atn.states[s].(*TokensStartState))
|
||||
}
|
||||
}
|
||||
|
||||
func (this *ATNDeserializer) readSets(atn *ATN) []*IntervalSet {
|
||||
var sets = make([]*IntervalSet)
|
||||
var sets = make([]*IntervalSet,0)
|
||||
var m = this.readInt()
|
||||
for i:=0; i<m; i++ {
|
||||
var iset = NewIntervalSet()
|
||||
|
@ -291,20 +291,20 @@ func (this *ATNDeserializer) readEdges(atn *ATN, sets []*IntervalSet) {
|
|||
// edges for rule stop states can be derived, so they aren't serialized
|
||||
for i:=0; i<len(atn.states); i++ {
|
||||
state := atn.states[i]
|
||||
for j:=0; j<len(state.transitions); j++ {
|
||||
var t,ok = state.transitions[j].(*RuleTransition)
|
||||
for j:=0; j<len(state.getTransitions()); j++ {
|
||||
var t,ok = state.getTransitions()[j].(*RuleTransition)
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
var outermostPrecedenceReturn = -1
|
||||
if (atn.ruleToStartState[t.target.ruleIndex].isPrecedenceRule) {
|
||||
if (atn.ruleToStartState[t.getTarget().getRuleIndex()].isPrecedenceRule) {
|
||||
if (t.precedence == 0) {
|
||||
outermostPrecedenceReturn = t.target.ruleIndex
|
||||
outermostPrecedenceReturn = t.getTarget().getRuleIndex()
|
||||
}
|
||||
}
|
||||
|
||||
trans := NewEpsilonTransition(t.followState, outermostPrecedenceReturn)
|
||||
atn.ruleToStopState[t.target.ruleIndex].addTransition(trans)
|
||||
atn.ruleToStopState[t.getTarget().getRuleIndex()].addTransition(trans, -1)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -322,16 +322,16 @@ func (this *ATNDeserializer) readEdges(atn *ATN, sets []*IntervalSet) {
|
|||
}
|
||||
s2.endState.startState = state
|
||||
}
|
||||
if _, ok := state.(*PlusLoopbackState); ok {
|
||||
for j:=0; j<len(state.transitions); j++ {
|
||||
target := state.transitions[j].target
|
||||
if s2, ok := state.(*PlusLoopbackState); ok {
|
||||
for j:=0; j<len(s2.getTransitions()); j++ {
|
||||
target := s2.getTransitions()[j].getTarget()
|
||||
if t2, ok := target.(*PlusBlockStartState); ok {
|
||||
t2.loopBackState = state
|
||||
}
|
||||
}
|
||||
} else if _, ok := state.(*StarLoopbackState); ok {
|
||||
for j:=0; j<len(state.transitions); j++ {
|
||||
target := state.transitions[j].target
|
||||
} else if s2, ok := state.(*StarLoopbackState); ok {
|
||||
for j:=0; j<len(s2.getTransitions()); j++ {
|
||||
target := s2.getTransitions()[j].getTarget()
|
||||
if t2, ok := target.(*StarLoopEntryState); ok {
|
||||
t2.loopBackState = state
|
||||
}
|
||||
|
@ -353,7 +353,7 @@ func (this *ATNDeserializer) readDecisions(atn *ATN) {
|
|||
func (this *ATNDeserializer) readLexerActions(atn *ATN) {
|
||||
if (atn.grammarType == ATNTypeLexer) {
|
||||
var count = this.readInt()
|
||||
atn.lexerActions = InitArray(count, nil)
|
||||
atn.lexerActions = make([]ILexerAction, count) // initIntArray(count, nil)
|
||||
for i :=0; i<count; i++ {
|
||||
var actionType = this.readInt()
|
||||
var data1 = this.readInt()
|
||||
|
@ -391,21 +391,22 @@ func (this *ATNDeserializer) generateRuleBypassTransition(atn *ATN, idx int) {
|
|||
atn.addState(bypassStop)
|
||||
|
||||
bypassStart.endState = bypassStop
|
||||
atn.defineDecisionState(bypassStart)
|
||||
var bs = bypassStart
|
||||
atn.defineDecisionState(bypassStart.DecisionState)
|
||||
|
||||
bypassStop.startState = bypassStart
|
||||
|
||||
var excludeTransition *ATNState = nil
|
||||
var endState *Transition = nil
|
||||
var excludeTransition ITransition = nil
|
||||
var endState IATNState = nil
|
||||
|
||||
if (atn.ruleToStartState[idx].isPrecedenceRule) {
|
||||
// wrap from the beginning of the rule to the StarLoopEntryState
|
||||
endState = nil
|
||||
for i:=0; i<len(atn.states); i++ {
|
||||
state := atn.states[i]
|
||||
if (this.stateIsEndStateFor(state, idx)) {
|
||||
if (this.stateIsEndStateFor(state, idx) != nil) {
|
||||
endState = state
|
||||
excludeTransition = state.(*StarLoopEntryState).loopBackState.transitions[0]
|
||||
excludeTransition = state.(*StarLoopEntryState).loopBackState.getTransitions()[0]
|
||||
break
|
||||
}
|
||||
}
|
||||
|
@ -420,13 +421,13 @@ func (this *ATNDeserializer) generateRuleBypassTransition(atn *ATN, idx int) {
|
|||
// target blockEnd instead
|
||||
for i:=0; i< len(atn.states); i++ {
|
||||
state := atn.states[i]
|
||||
for j :=0; j<len(state.transitions); j++ {
|
||||
var transition = state.transitions[j]
|
||||
for j :=0; j<len(state.getTransitions()); j++ {
|
||||
var transition = state.getTransitions()[j]
|
||||
if (transition == excludeTransition) {
|
||||
continue
|
||||
}
|
||||
if (transition.target == endState) {
|
||||
transition.target = bypassStop
|
||||
if (transition.getTarget() == endState) {
|
||||
transition.setTarget( bypassStop )
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -434,13 +435,13 @@ func (this *ATNDeserializer) generateRuleBypassTransition(atn *ATN, idx int) {
|
|||
// all transitions leaving the rule start state need to leave blockStart
|
||||
// instead
|
||||
var ruleToStartState = atn.ruleToStartState[idx]
|
||||
var count = len(ruleToStartState.transitions)
|
||||
var count = len(ruleToStartState.getTransitions())
|
||||
for ( count > 0) {
|
||||
bypassStart.addTransition(ruleToStartState.transitions[count-1],-1)
|
||||
ruleToStartState.transitions = []*Transition{ ruleToStartState.transitions[len(ruleToStartState.transitions) - 1] }
|
||||
bypassStart.addTransition(ruleToStartState.getTransitions()[count-1],-1)
|
||||
ruleToStartState.setTransitions( []ITransition{ ruleToStartState.getTransitions()[len(ruleToStartState.getTransitions()) - 1] })
|
||||
}
|
||||
// link the new states
|
||||
atn.ruleToStartState[idx].addTransition(NewEpsilonTransition(bypassStart,-1))
|
||||
atn.ruleToStartState[idx].addTransition(NewEpsilonTransition(bypassStart,-1), -1)
|
||||
bypassStop.addTransition(NewEpsilonTransition(endState, -1), -1)
|
||||
|
||||
var matchState = NewBasicState()
|
||||
|
@ -449,21 +450,21 @@ func (this *ATNDeserializer) generateRuleBypassTransition(atn *ATN, idx int) {
|
|||
bypassStart.addTransition(NewEpsilonTransition(matchState, -1), -1)
|
||||
}
|
||||
|
||||
func (this *ATNDeserializer) stateIsEndStateFor(state *ATNState, idx int) {
|
||||
if ( state.ruleIndex != idx) {
|
||||
func (this *ATNDeserializer) stateIsEndStateFor(state IATNState, idx int) IATNState {
|
||||
if ( state.getRuleIndex() != idx) {
|
||||
return nil
|
||||
}
|
||||
if _,ok := state.(*StarLoopEntryState); !ok {
|
||||
return nil
|
||||
}
|
||||
var maybeLoopEndState = state.transitions[len(state.transitions) - 1].target
|
||||
var maybeLoopEndState = state.getTransitions()[len(state.getTransitions()) - 1].getTarget()
|
||||
if _,ok := maybeLoopEndState.(*LoopEndState); !ok {
|
||||
return nil
|
||||
}
|
||||
|
||||
_,ok := maybeLoopEndState.transitions[0].target.(*RuleStopState)
|
||||
_,ok := maybeLoopEndState.getTransitions()[0].getTarget().(*RuleStopState)
|
||||
|
||||
if (maybeLoopEndState.epsilonOnlyTransitions && ok) {
|
||||
if (maybeLoopEndState.(*LoopEndState).epsilonOnlyTransitions && ok) {
|
||||
return state
|
||||
} else {
|
||||
return nil
|
||||
|
@ -487,12 +488,13 @@ func (this *ATNDeserializer) markPrecedenceDecisions(atn *ATN) {
|
|||
// decision for the closure block that determines whether a
|
||||
// precedence rule should continue or complete.
|
||||
//
|
||||
if ( atn.ruleToStartState[state.ruleIndex].isPrecedenceRule) {
|
||||
var maybeLoopEndState = state.transitions[len(state.transitions) - 1].target
|
||||
if _, ok := maybeLoopEndState.(*LoopEndState); ok {
|
||||
s2,ok2 := maybeLoopEndState.transitions[0].target.(*RuleStopState)
|
||||
if ( maybeLoopEndState.epsilonOnlyTransitions && ok2) {
|
||||
s2.(*StarLoopEntryState).precedenceRuleDecision = true
|
||||
if ( atn.ruleToStartState[state.getRuleIndex()].isPrecedenceRule) {
|
||||
var maybeLoopEndState = state.getTransitions()[len(state.getTransitions()) - 1].getTarget()
|
||||
if s3, ok := maybeLoopEndState.(*LoopEndState); ok {
|
||||
s := maybeLoopEndState.getTransitions()[0].getTarget()
|
||||
_,ok2 := s.(*RuleStopState)
|
||||
if ( s3.epsilonOnlyTransitions && ok2) {
|
||||
s.(*StarLoopEntryState).precedenceRuleDecision = true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -510,54 +512,54 @@ func (this *ATNDeserializer) verifyATN(atn *ATN) {
|
|||
if (state == nil) {
|
||||
continue
|
||||
}
|
||||
this.checkCondition(state.epsilonOnlyTransitions || len(state.transitions) <= 1, nil)
|
||||
this.checkCondition(state.getEpsilonOnlyTransitions() || len(state.getTransitions()) <= 1, "")
|
||||
|
||||
switch s2:= state.(type) {
|
||||
|
||||
case *PlusBlockStartState:
|
||||
this.checkCondition(s2.loopBackState != nil,nil)
|
||||
this.checkCondition(s2.loopBackState != nil,"")
|
||||
case *StarLoopEntryState:
|
||||
|
||||
this.checkCondition(s2.loopBackState != nil,nil)
|
||||
this.checkCondition(len(s2.transitions) == 2,nil)
|
||||
this.checkCondition(s2.loopBackState != nil,"")
|
||||
this.checkCondition(len(s2.getTransitions()) == 2,"")
|
||||
|
||||
switch _ := s2.(type) {
|
||||
switch s2 := state.(type) {
|
||||
case *StarBlockStartState:
|
||||
_,ok2 := s2.transitions[1].target.(*LoopEndState)
|
||||
this.checkCondition(ok2, nil)
|
||||
this.checkCondition(!s2.nonGreedy, nil)
|
||||
_,ok2 := s2.getTransitions()[1].getTarget().(*LoopEndState)
|
||||
this.checkCondition(ok2, "")
|
||||
this.checkCondition(!s2.nonGreedy, "")
|
||||
case *LoopEndState:
|
||||
s3,ok2 := s2.transitions[1].target.(*StarBlockStartState)
|
||||
this.checkCondition(ok2, nil)
|
||||
this.checkCondition(s3.nonGreedy, nil)
|
||||
s3,ok2 := s2.getTransitions()[1].getTarget().(*StarBlockStartState)
|
||||
this.checkCondition(ok2, "")
|
||||
this.checkCondition(s3.nonGreedy, "")
|
||||
default:
|
||||
panic("IllegalState")
|
||||
}
|
||||
|
||||
case *StarLoopbackState:
|
||||
this.checkCondition(len(state.transitions) == 1, nil)
|
||||
_,ok2 := state.transitions[0].target.(*StarLoopEntryState)
|
||||
this.checkCondition(ok2, nil)
|
||||
this.checkCondition(len(state.getTransitions()) == 1, "")
|
||||
_,ok2 := state.getTransitions()[0].getTarget().(*StarLoopEntryState)
|
||||
this.checkCondition(ok2, "")
|
||||
case *LoopEndState:
|
||||
this.checkCondition(s2.loopBackState != nil, nil)
|
||||
this.checkCondition(s2.loopBackState != nil, "")
|
||||
case *RuleStartState:
|
||||
this.checkCondition(s2.stopState != nil, nil)
|
||||
this.checkCondition(s2.stopState != nil, "")
|
||||
case *BlockStartState:
|
||||
this.checkCondition(s2.endState != nil, nil)
|
||||
this.checkCondition(s2.endState != nil, "")
|
||||
case *BlockEndState:
|
||||
this.checkCondition(s2.startState != nil, nil)
|
||||
this.checkCondition(s2.startState != nil, "")
|
||||
case *DecisionState:
|
||||
this.checkCondition(len(s2.transitions) <= 1 || s2.decision >= 0, nil)
|
||||
this.checkCondition(len(s2.getTransitions()) <= 1 || s2.decision >= 0, "")
|
||||
default:
|
||||
_, ok := s2.(*RuleStopState)
|
||||
this.checkCondition(len(s2.transitions) <= 1 || ok, nil)
|
||||
this.checkCondition(len(s2.getTransitions()) <= 1 || ok, "")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (this *ATNDeserializer) checkCondition(condition bool, message string) {
|
||||
if (!condition) {
|
||||
if (message==nil) {
|
||||
if (message=="") {
|
||||
message = "IllegalState"
|
||||
}
|
||||
panic(message)
|
||||
|
@ -567,7 +569,7 @@ func (this *ATNDeserializer) checkCondition(condition bool, message string) {
|
|||
func (this *ATNDeserializer) readInt() int {
|
||||
v := this.data[this.pos]
|
||||
this.pos += 1
|
||||
return v
|
||||
return int(v)
|
||||
}
|
||||
|
||||
func (this *ATNDeserializer) readInt32() int {
|
||||
|
@ -576,12 +578,12 @@ func (this *ATNDeserializer) readInt32() int {
|
|||
return low | (high << 16)
|
||||
}
|
||||
|
||||
func (this *ATNDeserializer) readLong() int64 {
|
||||
var low = this.readInt32()
|
||||
var high = this.readInt32()
|
||||
return (low & 0x00000000FFFFFFFF) | (high << 32)
|
||||
}
|
||||
|
||||
//func (this *ATNDeserializer) readLong() int64 {
|
||||
// panic("Not implemented")
|
||||
// var low = this.readInt32()
|
||||
// var high = this.readInt32()
|
||||
// return (low & 0x00000000FFFFFFFF) | (high << int32)
|
||||
//}
|
||||
|
||||
func createByteToHex() []string {
|
||||
var bth = make([]string, 256)
|
||||
|
@ -601,17 +603,17 @@ func (this *ATNDeserializer) readUUID() string {
|
|||
bb[2*i] = (integer >> 8) & 0xFF
|
||||
}
|
||||
return byteToHex[bb[0]] + byteToHex[bb[1]] +
|
||||
byteToHex[bb[2]] + byteToHex[bb[3]] + '-' +
|
||||
byteToHex[bb[4]] + byteToHex[bb[5]] + '-' +
|
||||
byteToHex[bb[6]] + byteToHex[bb[7]] + '-' +
|
||||
byteToHex[bb[8]] + byteToHex[bb[9]] + '-' +
|
||||
byteToHex[bb[2]] + byteToHex[bb[3]] + "-" +
|
||||
byteToHex[bb[4]] + byteToHex[bb[5]] + "-" +
|
||||
byteToHex[bb[6]] + byteToHex[bb[7]] + "-" +
|
||||
byteToHex[bb[8]] + byteToHex[bb[9]] + "-" +
|
||||
byteToHex[bb[10]] + byteToHex[bb[11]] +
|
||||
byteToHex[bb[12]] + byteToHex[bb[13]] +
|
||||
byteToHex[bb[14]] + byteToHex[bb[15]]
|
||||
}
|
||||
|
||||
|
||||
func (this *ATNDeserializer) edgeFactory(atn *ATN, typeIndex int, src, trg *ATNState, arg1, arg2, arg3 int, sets []*IntervalSet) *Transition {
|
||||
func (this *ATNDeserializer) edgeFactory(atn *ATN, typeIndex, src, trg, arg1, arg2, arg3 int, sets []*IntervalSet) ITransition {
|
||||
|
||||
var target = atn.states[trg]
|
||||
|
||||
|
@ -625,7 +627,7 @@ func (this *ATNDeserializer) edgeFactory(atn *ATN, typeIndex int, src, trg *ATNS
|
|||
return NewRangeTransition(target, arg1, arg2)
|
||||
}
|
||||
case TransitionRULE :
|
||||
return NewRuleTransition(atn.states[arg1].(*RuleStartState), arg2, arg3, target)
|
||||
return NewRuleTransition(atn.states[arg1], arg2, arg3, target)
|
||||
case TransitionPREDICATE :
|
||||
return NewPredicateTransition(target, arg1, arg2, arg3 != 0)
|
||||
case TransitionPRECEDENCE:
|
||||
|
@ -649,8 +651,8 @@ func (this *ATNDeserializer) edgeFactory(atn *ATN, typeIndex int, src, trg *ATNS
|
|||
panic("The specified transition type is not valid.")
|
||||
}
|
||||
|
||||
func (this *ATNDeserializer) stateFactory(typeIndex, ruleIndex int) *ATNState {
|
||||
var s *ATNState
|
||||
func (this *ATNDeserializer) stateFactory(typeIndex, ruleIndex int) IATNState {
|
||||
var s IATNState
|
||||
switch (typeIndex) {
|
||||
case ATNStateInvalidType:
|
||||
return nil;
|
||||
|
@ -683,11 +685,11 @@ func (this *ATNDeserializer) stateFactory(typeIndex, ruleIndex int) *ATNState {
|
|||
panic(message)
|
||||
}
|
||||
|
||||
s.ruleIndex = ruleIndex;
|
||||
s.setRuleIndex(ruleIndex)
|
||||
return s;
|
||||
}
|
||||
|
||||
func (this *ATNDeserializer) lexerActionFactory(typeIndex, data1, data2 int) *LexerAction {
|
||||
func (this *ATNDeserializer) lexerActionFactory(typeIndex, data1, data2 int) ILexerAction {
|
||||
switch (typeIndex) {
|
||||
case LexerActionTypeCHANNEL:
|
||||
return NewLexerChannelAction(data1)
|
||||
|
|
|
@ -42,11 +42,11 @@ func (this *ATNSimulator) InitATNSimulator(atn *ATN, sharedContextCache *Predict
|
|||
// Must distinguish between missing edge and edge we know leads nowhere///
|
||||
var ATNSimulatorERROR = NewDFAState(0x7FFFFFFF, NewATNConfigSet(false))
|
||||
|
||||
func (this *ATNSimulator) getCachedContext(context *PredictionContext) *PredictionContext {
|
||||
func (this *ATNSimulator) getCachedContext(context IPredictionContext) IPredictionContext {
|
||||
if (this.sharedContextCache == nil) {
|
||||
return context
|
||||
}
|
||||
var visited = make(map[*PredictionContext]*PredictionContext)
|
||||
var visited = make(map[IPredictionContext]IPredictionContext)
|
||||
return getCachedPredictionContext(context, this.sharedContextCache, visited)
|
||||
}
|
||||
|
||||
|
|
|
@ -1,67 +1,31 @@
|
|||
package antlr4
|
||||
|
||||
// The following images show the relation of states and
|
||||
// {@link ATNState//transitions} for various grammar constructs.
|
||||
//
|
||||
// <ul>
|
||||
//
|
||||
// <li>Solid edges marked with an &//0949 indicate a required
|
||||
// {@link EpsilonTransition}.</li>
|
||||
//
|
||||
// <li>Dashed edges indicate locations where any transition derived from
|
||||
// {@link Transition} might appear.</li>
|
||||
//
|
||||
// <li>Dashed nodes are place holders for either a sequence of linked
|
||||
// {@link BasicState} states or the inclusion of a block representing a nested
|
||||
// construct in one of the forms below.</li>
|
||||
//
|
||||
// <li>Nodes showing multiple outgoing alternatives with a {@code ...} support
|
||||
// any number of alternatives (one or more). Nodes without the {@code ...} only
|
||||
// support the exact number of alternatives shown in the diagram.</li>
|
||||
//
|
||||
// </ul>
|
||||
//
|
||||
// <h2>Basic Blocks</h2>
|
||||
//
|
||||
// <h3>Rule</h3>
|
||||
//
|
||||
// <embed src="images/Rule.svg" type="image/svg+xml"/>
|
||||
//
|
||||
// <h3>Block of 1 or more alternatives</h3>
|
||||
//
|
||||
// <embed src="images/Block.svg" type="image/svg+xml"/>
|
||||
//
|
||||
// <h2>Greedy Loops</h2>
|
||||
//
|
||||
// <h3>Greedy Closure: {@code (...)*}</h3>
|
||||
//
|
||||
// <embed src="images/ClosureGreedy.svg" type="image/svg+xml"/>
|
||||
//
|
||||
// <h3>Greedy Positive Closure: {@code (...)+}</h3>
|
||||
//
|
||||
// <embed src="images/PositiveClosureGreedy.svg" type="image/svg+xml"/>
|
||||
//
|
||||
// <h3>Greedy Optional: {@code (...)?}</h3>
|
||||
//
|
||||
// <embed src="images/OptionalGreedy.svg" type="image/svg+xml"/>
|
||||
//
|
||||
// <h2>Non-Greedy Loops</h2>
|
||||
//
|
||||
// <h3>Non-Greedy Closure: {@code (...)*?}</h3>
|
||||
//
|
||||
// <embed src="images/ClosureNonGreedy.svg" type="image/svg+xml"/>
|
||||
//
|
||||
// <h3>Non-Greedy Positive Closure: {@code (...)+?}</h3>
|
||||
//
|
||||
// <embed src="images/PositiveClosureNonGreedy.svg" type="image/svg+xml"/>
|
||||
//
|
||||
// <h3>Non-Greedy Optional: {@code (...)??}</h3>
|
||||
//
|
||||
// <embed src="images/OptionalNonGreedy.svg" type="image/svg+xml"/>
|
||||
//
|
||||
import "strconv"
|
||||
|
||||
var INITIAL_NUM_TRANSITIONS = 4
|
||||
|
||||
type IATNState interface {
|
||||
|
||||
getEpsilonOnlyTransitions() bool
|
||||
|
||||
getRuleIndex() int
|
||||
setRuleIndex(int)
|
||||
|
||||
getNextTokenWithinRule() *IntervalSet
|
||||
setNextTokenWithinRule(*IntervalSet)
|
||||
|
||||
getATN() *ATN
|
||||
setATN(*ATN)
|
||||
|
||||
getStateType() int
|
||||
|
||||
getStateNumber() int
|
||||
setStateNumber(int)
|
||||
|
||||
getTransitions() []ITransition
|
||||
setTransitions( []ITransition )
|
||||
addTransition(ITransition, int)
|
||||
}
|
||||
|
||||
type ATNState struct {
|
||||
// Which ATN are we in?
|
||||
atn *ATN
|
||||
|
@ -70,9 +34,9 @@ type ATNState struct {
|
|||
ruleIndex int
|
||||
epsilonOnlyTransitions bool
|
||||
// Track the transitions emanating from this ATN state.
|
||||
transitions []*Transition
|
||||
transitions []ITransition
|
||||
// Used to cache lookahead during parsing, not used during construction
|
||||
nextTokenWithinRule *Token
|
||||
nextTokenWithinRule *IntervalSet
|
||||
}
|
||||
|
||||
func NewATNState() *ATNState {
|
||||
|
@ -80,7 +44,7 @@ func NewATNState() *ATNState {
|
|||
as := new(ATNState)
|
||||
as.InitATNState()
|
||||
|
||||
return as
|
||||
return as
|
||||
}
|
||||
|
||||
func (as *ATNState) InitATNState(){
|
||||
|
@ -88,18 +52,65 @@ func (as *ATNState) InitATNState(){
|
|||
// Which ATN are we in?
|
||||
as.atn = nil
|
||||
as.stateNumber = ATNStateINVALID_STATE_NUMBER
|
||||
as.stateType = nil
|
||||
as.stateType = ATNStateInvalidType
|
||||
as.ruleIndex = 0 // at runtime, we don't have Rule objects
|
||||
as.epsilonOnlyTransitions = false
|
||||
// Track the transitions emanating from this ATN state.
|
||||
as.transitions = make([]Transition, 0)
|
||||
as.transitions = make([]ITransition, 0)
|
||||
// Used to cache lookahead during parsing, not used during construction
|
||||
as.nextTokenWithinRule = nil
|
||||
|
||||
}
|
||||
|
||||
func (as *ATNState) getRuleIndex() int {
|
||||
return as.ruleIndex
|
||||
}
|
||||
|
||||
func (as *ATNState) setRuleIndex(v int) {
|
||||
as.ruleIndex = v
|
||||
}
|
||||
func (as *ATNState) getEpsilonOnlyTransitions() bool {
|
||||
return as.epsilonOnlyTransitions
|
||||
}
|
||||
|
||||
func (as *ATNState) getATN() *ATN {
|
||||
return as.atn
|
||||
}
|
||||
|
||||
func (as *ATNState) setATN(atn *ATN) {
|
||||
as.atn = atn
|
||||
}
|
||||
|
||||
func (as *ATNState) getTransitions() []ITransition {
|
||||
return as.transitions
|
||||
}
|
||||
|
||||
func (as *ATNState) setTransitions(t []ITransition) {
|
||||
as.transitions = t
|
||||
}
|
||||
|
||||
func (as *ATNState) getStateType() int {
|
||||
return as.stateType
|
||||
}
|
||||
|
||||
func (as *ATNState) getStateNumber() int {
|
||||
return as.stateNumber
|
||||
}
|
||||
|
||||
func (as *ATNState) setStateNumber(stateNumber int) {
|
||||
as.stateNumber = stateNumber
|
||||
}
|
||||
|
||||
func (as *ATNState) getNextTokenWithinRule() *IntervalSet {
|
||||
return as.nextTokenWithinRule
|
||||
}
|
||||
|
||||
func (as *ATNState) setNextTokenWithinRule(v *IntervalSet) {
|
||||
as.nextTokenWithinRule = v
|
||||
}
|
||||
|
||||
const (
|
||||
// constants for serialization
|
||||
// constants for serialization
|
||||
ATNStateInvalidType = 0
|
||||
ATNStateBASIC = 1
|
||||
ATNStateRULE_START = 2
|
||||
|
@ -133,49 +144,49 @@ const (
|
|||
// "LOOP_END" ]
|
||||
|
||||
func (this *ATNState) toString() string {
|
||||
return this.stateNumber
|
||||
return strconv.Itoa(this.stateNumber)
|
||||
}
|
||||
|
||||
func (this *ATNState) equals(other *ATNState) bool {
|
||||
if ok := other.(ATNState); ok {
|
||||
return this.stateNumber == other.stateNumber
|
||||
} else {
|
||||
return false
|
||||
}
|
||||
func (this *ATNState) equals(other interface{}) bool {
|
||||
if ot, ok := other.(IATNState); ok {
|
||||
return this.stateNumber == ot.getStateNumber()
|
||||
} else {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
func (this *ATNState) isNonGreedyExitState() {
|
||||
return false
|
||||
func (this *ATNState) isNonGreedyExitState() bool {
|
||||
return false
|
||||
}
|
||||
|
||||
func (this *ATNState) addTransition(trans *Transition, index int) {
|
||||
if ( len(this.transitions) == 0 ) {
|
||||
this.epsilonOnlyTransitions = trans.isEpsilon
|
||||
} else if(this.epsilonOnlyTransitions != trans.isEpsilon) {
|
||||
this.epsilonOnlyTransitions = false
|
||||
}
|
||||
if (index==-1) {
|
||||
func (this *ATNState) addTransition(trans ITransition, index int) {
|
||||
if ( len(this.transitions) == 0 ) {
|
||||
this.epsilonOnlyTransitions = trans.getIsEpsilon()
|
||||
} else if(this.epsilonOnlyTransitions != trans.getIsEpsilon()) {
|
||||
this.epsilonOnlyTransitions = false
|
||||
}
|
||||
if (index==-1) {
|
||||
this.transitions = append(this.transitions, trans)
|
||||
} else {
|
||||
this.transitions = append(this.transitions[:index], append([]*Transition{ trans }, this.transitions[index:]...)...)
|
||||
// this.transitions.splice(index, 1, trans)
|
||||
}
|
||||
} else {
|
||||
this.transitions = append(this.transitions[:index], append([]ITransition{ trans }, this.transitions[index:]...)...)
|
||||
// this.transitions.splice(index, 1, trans)
|
||||
}
|
||||
}
|
||||
|
||||
type BasicState struct {
|
||||
ATNState
|
||||
*ATNState
|
||||
}
|
||||
|
||||
func NewBasicState() *BasicState {
|
||||
this := new(BasicState)
|
||||
this.InitATNState()
|
||||
|
||||
this.stateType = ATNStateBASIC
|
||||
return this
|
||||
this.stateType = ATNStateBASIC
|
||||
return this
|
||||
}
|
||||
|
||||
type DecisionState struct {
|
||||
ATNState
|
||||
*ATNState
|
||||
|
||||
decision int
|
||||
nonGreedy bool
|
||||
|
@ -188,7 +199,7 @@ func NewDecisionState() *DecisionState {
|
|||
this.InitATNState()
|
||||
this.InitDecisionState()
|
||||
|
||||
return this
|
||||
return this
|
||||
}
|
||||
|
||||
func (this *DecisionState) InitDecisionState() {
|
||||
|
@ -200,7 +211,7 @@ func (this *DecisionState) InitDecisionState() {
|
|||
|
||||
// The start of a regular {@code (...)} block.
|
||||
type BlockStartState struct {
|
||||
DecisionState
|
||||
*DecisionState
|
||||
|
||||
endState *BlockEndState
|
||||
}
|
||||
|
@ -222,7 +233,7 @@ func (this *BlockStartState) InitBlockStartState() {
|
|||
}
|
||||
|
||||
type BasicBlockStartState struct {
|
||||
BlockStartState
|
||||
*BlockStartState
|
||||
}
|
||||
|
||||
func NewBasicBlockStartState() *BasicBlockStartState {
|
||||
|
@ -237,11 +248,12 @@ func NewBasicBlockStartState() *BasicBlockStartState {
|
|||
return this
|
||||
}
|
||||
|
||||
|
||||
// Terminal node of a simple {@code (a|b|c)} block.
|
||||
type BlockEndState struct {
|
||||
ATNState
|
||||
|
||||
startState *ATNState
|
||||
startState IATNState
|
||||
}
|
||||
|
||||
func NewBlockEndState() *BlockEndState {
|
||||
|
@ -250,9 +262,9 @@ func NewBlockEndState() *BlockEndState {
|
|||
|
||||
this.InitATNState()
|
||||
this.stateType = ATNStateBLOCK_END
|
||||
this.startState = nil
|
||||
this.startState = nil
|
||||
|
||||
return this
|
||||
return this
|
||||
}
|
||||
|
||||
// The last node in the ATN for a rule, unless that rule is the start symbol.
|
||||
|
@ -268,14 +280,14 @@ func NewRuleStopState() *RuleStopState {
|
|||
this := new(RuleStopState)
|
||||
|
||||
this.InitATNState()
|
||||
this.stateType = ATNStateRULE_STOP
|
||||
return this
|
||||
this.stateType = ATNStateRULE_STOP
|
||||
return this
|
||||
}
|
||||
|
||||
type RuleStartState struct {
|
||||
ATNState
|
||||
|
||||
stopState *ATNState
|
||||
stopState IATNState
|
||||
isPrecedenceRule bool
|
||||
}
|
||||
|
||||
|
@ -295,7 +307,7 @@ func NewRuleStartState() *RuleStartState {
|
|||
// one to the loop back to start of the block and one to exit.
|
||||
//
|
||||
type PlusLoopbackState struct {
|
||||
BlockStartState
|
||||
*BlockStartState
|
||||
}
|
||||
|
||||
func NewPlusLoopbackState() *PlusLoopbackState {
|
||||
|
@ -316,9 +328,9 @@ func NewPlusLoopbackState() *PlusLoopbackState {
|
|||
// real decision-making note for {@code A+}.
|
||||
//
|
||||
type PlusBlockStartState struct {
|
||||
BlockStartState
|
||||
*BlockStartState
|
||||
|
||||
loopBackState *ATNState
|
||||
loopBackState IATNState
|
||||
}
|
||||
|
||||
func NewPlusBlockStartState() *PlusBlockStartState {
|
||||
|
@ -330,14 +342,14 @@ func NewPlusBlockStartState() *PlusBlockStartState {
|
|||
this.InitBlockStartState()
|
||||
|
||||
this.stateType = ATNStatePLUS_BLOCK_START
|
||||
this.loopBackState = nil
|
||||
this.loopBackState = nil
|
||||
|
||||
return this
|
||||
return this
|
||||
}
|
||||
|
||||
// The block that begins a closure loop.
|
||||
type StarBlockStartState struct {
|
||||
BlockStartState
|
||||
*BlockStartState
|
||||
}
|
||||
|
||||
func NewStarBlockStartState() *StarBlockStartState {
|
||||
|
@ -355,7 +367,7 @@ func NewStarBlockStartState() *StarBlockStartState {
|
|||
|
||||
|
||||
type StarLoopbackState struct {
|
||||
ATNState
|
||||
*ATNState
|
||||
}
|
||||
|
||||
func NewStarLoopbackState() *StarLoopbackState {
|
||||
|
@ -370,9 +382,9 @@ func NewStarLoopbackState() *StarLoopbackState {
|
|||
|
||||
|
||||
type StarLoopEntryState struct {
|
||||
DecisionState
|
||||
*DecisionState
|
||||
|
||||
loopBackState *ATNState
|
||||
loopBackState IATNState
|
||||
precedenceRuleDecision bool
|
||||
}
|
||||
|
||||
|
@ -384,19 +396,19 @@ func NewStarLoopEntryState() *StarLoopEntryState {
|
|||
this.InitDecisionState()
|
||||
|
||||
this.stateType = ATNStateSTAR_LOOP_ENTRY
|
||||
this.loopBackState = nil
|
||||
this.loopBackState = nil
|
||||
|
||||
// Indicates whether this state can benefit from a precedence DFA during SLL decision making.
|
||||
this.precedenceRuleDecision = false
|
||||
// Indicates whether this state can benefit from a precedence DFA during SLL decision making.
|
||||
this.precedenceRuleDecision = false
|
||||
|
||||
return this
|
||||
return this
|
||||
}
|
||||
|
||||
|
||||
// Mark the end of a * or + loop.
|
||||
type LoopEndState struct {
|
||||
ATNState
|
||||
loopBackState *ATNState
|
||||
*ATNState
|
||||
loopBackState IATNState
|
||||
}
|
||||
|
||||
func NewLoopEndState() *LoopEndState {
|
||||
|
@ -413,7 +425,7 @@ func NewLoopEndState() *LoopEndState {
|
|||
|
||||
// The Tokens rule start state linking to each lexer rule start state */
|
||||
type TokensStartState struct {
|
||||
DecisionState
|
||||
*DecisionState
|
||||
}
|
||||
|
||||
func NewTokensStartState() *TokensStartState {
|
||||
|
|
|
@ -2,10 +2,6 @@ package antlr4
|
|||
|
||||
// Represents the type of recognizer an ATN applies to.
|
||||
|
||||
type ATNType struct {
|
||||
|
||||
}
|
||||
|
||||
const (
|
||||
ATNTypeLexer = 0
|
||||
ATNTypeParser = 1
|
||||
|
|
|
@ -11,6 +11,7 @@
|
|||
// {@link CommonTokenStream}.</p>
|
||||
|
||||
package antlr4
|
||||
import "strconv"
|
||||
|
||||
type IntStream interface {
|
||||
consume()
|
||||
|
@ -28,8 +29,8 @@ type TokenStream interface {
|
|||
|
||||
LT(k int) *Token
|
||||
get(index int) *Token
|
||||
getTokenSource() *TokenSource
|
||||
setTokenSource(*TokenSource)
|
||||
getTokenSource() TokenSource
|
||||
setTokenSource(TokenSource)
|
||||
getText() string
|
||||
getTextFromInterval(*Interval) string
|
||||
getTextFromRuleContext(*RuleContext) string
|
||||
|
@ -38,16 +39,22 @@ type TokenStream interface {
|
|||
|
||||
// bt is just to keep meaningful parameter types to Parser
|
||||
type BufferedTokenStream struct {
|
||||
tokenSource *TokenSource
|
||||
tokenSource TokenSource
|
||||
|
||||
tokens []*Token
|
||||
index int
|
||||
fetchedEOF bool
|
||||
channel int
|
||||
}
|
||||
|
||||
func NewBufferedTokenStream(tokenSource *TokenSource) *BufferedTokenStream {
|
||||
func NewBufferedTokenStream(tokenSource TokenSource) *BufferedTokenStream {
|
||||
|
||||
ts := new(BufferedTokenStream)
|
||||
ts.InitBufferedTokenStream(tokenSource)
|
||||
return ts
|
||||
}
|
||||
|
||||
func (ts *BufferedTokenStream) InitBufferedTokenStream(tokenSource TokenSource){
|
||||
|
||||
// The {@link TokenSource} from which tokens for bt stream are fetched.
|
||||
ts.tokenSource = tokenSource
|
||||
|
@ -55,7 +62,7 @@ func NewBufferedTokenStream(tokenSource *TokenSource) *BufferedTokenStream {
|
|||
// A collection of all tokens fetched from the token source. The list is
|
||||
// considered a complete view of the input once {@link //fetchedEOF} is set
|
||||
// to {@code true}.
|
||||
ts.tokens = make([]Token, 0)
|
||||
ts.tokens = make([]*Token, 0)
|
||||
|
||||
// The index into {@link //tokens} of the current token (next token to
|
||||
// {@link //consume}). {@link //tokens}{@code [}{@link //p}{@code ]} should
|
||||
|
@ -85,7 +92,6 @@ func NewBufferedTokenStream(tokenSource *TokenSource) *BufferedTokenStream {
|
|||
// <ul>
|
||||
ts.fetchedEOF = false
|
||||
|
||||
return ts
|
||||
}
|
||||
|
||||
func (bt *BufferedTokenStream) mark() int {
|
||||
|
@ -139,7 +145,7 @@ func (bt *BufferedTokenStream) consume() {
|
|||
// {@code false}.
|
||||
// @see //get(int i)
|
||||
// /
|
||||
func (bt *BufferedTokenStream) sync(i int) {
|
||||
func (bt *BufferedTokenStream) sync(i int) bool {
|
||||
var n = i - len(bt.tokens) + 1 // how many more elements we need?
|
||||
if (n > 0) {
|
||||
var fetched = bt.fetch(n)
|
||||
|
@ -158,7 +164,7 @@ func (bt *BufferedTokenStream) fetch(n int) int {
|
|||
}
|
||||
|
||||
for i := 0; i < n; i++ {
|
||||
var t *Token = (*bt.tokenSource).nextToken()
|
||||
var t *Token = bt.tokenSource.nextToken()
|
||||
t.tokenIndex = len(bt.tokens)
|
||||
bt.tokens = append(bt.tokens, t)
|
||||
if (t.tokenType == TokenEOF) {
|
||||
|
@ -176,7 +182,7 @@ func (bt *BufferedTokenStream) getTokens(start int, stop int, types *IntervalSet
|
|||
return nil
|
||||
}
|
||||
bt.lazyInit()
|
||||
var subset = make([]*Token)
|
||||
var subset = make([]*Token, 0)
|
||||
if (stop >= len(bt.tokens)) {
|
||||
stop = len(bt.tokens) - 1
|
||||
}
|
||||
|
@ -196,14 +202,14 @@ func (bt *BufferedTokenStream) LA(i int) int {
|
|||
return bt.LT(i).tokenType
|
||||
}
|
||||
|
||||
func (bt *BufferedTokenStream) LB(k int) Token {
|
||||
func (bt *BufferedTokenStream) LB(k int) *Token {
|
||||
if (bt.index - k < 0) {
|
||||
return nil
|
||||
}
|
||||
return bt.tokens[bt.index - k]
|
||||
}
|
||||
|
||||
func (bt *BufferedTokenStream) LT(k int) Token {
|
||||
func (bt *BufferedTokenStream) LT(k int) *Token {
|
||||
bt.lazyInit()
|
||||
if (k == 0) {
|
||||
return nil
|
||||
|
@ -248,10 +254,14 @@ func (bt *BufferedTokenStream) setup() {
|
|||
bt.index = bt.adjustSeekIndex(0)
|
||||
}
|
||||
|
||||
func (bt *BufferedTokenStream) getTokenSource() TokenSource {
|
||||
return bt.tokenSource
|
||||
}
|
||||
|
||||
// Reset bt token stream by setting its token source.///
|
||||
func (bt *BufferedTokenStream) setTokenSource(tokenSource *TokenSource) {
|
||||
func (bt *BufferedTokenStream) setTokenSource(tokenSource TokenSource) {
|
||||
bt.tokenSource = tokenSource
|
||||
bt.tokens = make([]Token, 0)
|
||||
bt.tokens = make([]*Token, 0)
|
||||
bt.index = -1
|
||||
}
|
||||
|
||||
|
@ -289,10 +299,10 @@ func (bt *BufferedTokenStream) previousTokenOnChannel(i, channel int) int {
|
|||
// Collect all tokens on specified channel to the right of
|
||||
// the current token up until we see a token on DEFAULT_TOKEN_CHANNEL or
|
||||
// EOF. If channel is -1, find any non default channel token.
|
||||
func (bt *BufferedTokenStream) getHiddenTokensToRight(tokenIndex, channel int) {
|
||||
func (bt *BufferedTokenStream) getHiddenTokensToRight(tokenIndex, channel int) []*Token {
|
||||
bt.lazyInit()
|
||||
if (tokenIndex < 0 || tokenIndex >= len(bt.tokens)) {
|
||||
panic( "" + tokenIndex + " not in 0.." + len(bt.tokens) - 1 )
|
||||
panic( strconv.Itoa(tokenIndex) + " not in 0.." + strconv.Itoa(len(bt.tokens) - 1) )
|
||||
}
|
||||
var nextOnChannel = bt.nextTokenOnChannel(tokenIndex + 1, LexerDefaultTokenChannel)
|
||||
var from_ = tokenIndex + 1
|
||||
|
@ -309,10 +319,10 @@ func (bt *BufferedTokenStream) getHiddenTokensToRight(tokenIndex, channel int) {
|
|||
// Collect all tokens on specified channel to the left of
|
||||
// the current token up until we see a token on DEFAULT_TOKEN_CHANNEL.
|
||||
// If channel is -1, find any non default channel token.
|
||||
func (bt *BufferedTokenStream) getHiddenTokensToLeft(tokenIndex, channel int) {
|
||||
func (bt *BufferedTokenStream) getHiddenTokensToLeft(tokenIndex, channel int) []*Token {
|
||||
bt.lazyInit()
|
||||
if (tokenIndex < 0 || tokenIndex >= len(bt.tokens)) {
|
||||
panic( "" + tokenIndex + " not in 0.." + len(bt.tokens) - 1 )
|
||||
panic( strconv.Itoa(tokenIndex) + " not in 0.." + strconv.Itoa(len(bt.tokens) - 1) )
|
||||
}
|
||||
var prevOnChannel = bt.previousTokenOnChannel(tokenIndex - 1, LexerDefaultTokenChannel)
|
||||
if (prevOnChannel == tokenIndex - 1) {
|
||||
|
@ -325,7 +335,7 @@ func (bt *BufferedTokenStream) getHiddenTokensToLeft(tokenIndex, channel int) {
|
|||
}
|
||||
|
||||
func (bt *BufferedTokenStream) filterForChannel(left, right, channel int) []*Token {
|
||||
var hidden = make([]*Token)
|
||||
var hidden = make([]*Token,0)
|
||||
for i := left; i < right + 1; i++ {
|
||||
var t = bt.tokens[i]
|
||||
if (channel == -1) {
|
||||
|
@ -343,7 +353,7 @@ func (bt *BufferedTokenStream) filterForChannel(left, right, channel int) []*Tok
|
|||
}
|
||||
|
||||
func (bt *BufferedTokenStream) getSourceName() string {
|
||||
return (*bt.tokenSource).getSourceName()
|
||||
return bt.tokenSource.getSourceName()
|
||||
}
|
||||
|
||||
// Get the text of all tokens in bt buffer.///
|
||||
|
@ -354,14 +364,14 @@ func (bt *BufferedTokenStream) getText(interval *Interval) string {
|
|||
interval = NewInterval(0, len(bt.tokens) - 1)
|
||||
}
|
||||
var start = interval.start
|
||||
if s2, ok := start.(*Token); ok {
|
||||
start = s2.tokenIndex
|
||||
}
|
||||
// if s2, ok := start.(*Token); ok {
|
||||
// start = s2.tokenIndex
|
||||
// }
|
||||
var stop = interval.stop
|
||||
if s2, ok := stop.(*Token); ok {
|
||||
stop = s2.tokenIndex
|
||||
}
|
||||
if (start == nil || stop == nil || start < 0 || stop < 0) {
|
||||
// if s2, ok := stop.(*Token); ok {
|
||||
// stop = s2.tokenIndex
|
||||
// }
|
||||
if (start < 0 || stop < 0) {
|
||||
return ""
|
||||
}
|
||||
if (stop >= len(bt.tokens)) {
|
||||
|
@ -373,7 +383,7 @@ func (bt *BufferedTokenStream) getText(interval *Interval) string {
|
|||
if (t.tokenType == TokenEOF) {
|
||||
break
|
||||
}
|
||||
s = s + t.text
|
||||
s += t.text()
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
package antlr4
|
||||
|
||||
type TokenFactory interface {
|
||||
create(source *TokenFactorySourcePair, ttype, text, channel, start, stop, line, column int) *Token
|
||||
create(source *TokenSourceInputStreamPair, ttype int, text string, channel, start, stop, line, column int) *Token
|
||||
}
|
||||
|
||||
type CommonTokenFactory struct {
|
||||
|
@ -45,22 +45,22 @@ func NewCommonTokenFactory(copyText bool) *CommonTokenFactory {
|
|||
//
|
||||
var CommonTokenFactoryDEFAULT = NewCommonTokenFactory(false)
|
||||
|
||||
func (this *CommonTokenFactory) create(source *TokenFactorySourcePair, ttype, text, channel, start, stop, line, column int) *Token {
|
||||
func (this *CommonTokenFactory) create(source *TokenSourceInputStreamPair, ttype int, text string, channel, start, stop, line, column int) *Token {
|
||||
var t = NewCommonToken(source, ttype, channel, start, stop)
|
||||
t.line = line
|
||||
t.column = column
|
||||
if (text !=nil) {
|
||||
t.text = text
|
||||
} else if (this.copyText && source[1] !=nil) {
|
||||
t.text = source.inputStream.getText(start,stop)
|
||||
if (text != "") {
|
||||
t.setText( text )
|
||||
} else if (this.copyText && source.inputStream != nil) {
|
||||
t.setText( source.inputStream.getText(start,stop) )
|
||||
}
|
||||
return t
|
||||
return t.Token
|
||||
}
|
||||
|
||||
func (this *CommonTokenFactory) createThin(ttype int, text string) *CommonToken {
|
||||
func (this *CommonTokenFactory) createThin(ttype int, text string) *Token {
|
||||
var t = NewCommonToken(nil, ttype, TokenDefaultChannel, -1, -1)
|
||||
t.text = text
|
||||
return t
|
||||
t.setText( text )
|
||||
return t.Token
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -26,23 +26,24 @@
|
|||
package antlr4
|
||||
|
||||
type CommonTokenStream struct {
|
||||
BufferedTokenStream
|
||||
*BufferedTokenStream
|
||||
}
|
||||
|
||||
func NewCommonTokenStream(lexer *Lexer, channel int) {
|
||||
func NewCommonTokenStream(lexer ILexer, channel int) *CommonTokenStream {
|
||||
|
||||
ts := &CommonTokenStream(BufferedTokenStream{lexer})
|
||||
ts := new(CommonTokenStream)
|
||||
ts.InitBufferedTokenStream(lexer)
|
||||
|
||||
ts.channel = channel
|
||||
return ts
|
||||
|
||||
return ts
|
||||
}
|
||||
|
||||
func (ts *CommonTokenStream) adjustSeekIndex(i int) int {
|
||||
return ts.nextTokenOnChannel(i, ts.channel)
|
||||
}
|
||||
|
||||
func (ts *CommonTokenStream) LB(k int) Token {
|
||||
func (ts *CommonTokenStream) LB(k int) *Token {
|
||||
if (k==0 || ts.index-k<0) {
|
||||
return nil
|
||||
}
|
||||
|
@ -60,7 +61,7 @@ func (ts *CommonTokenStream) LB(k int) Token {
|
|||
return ts.tokens[i]
|
||||
}
|
||||
|
||||
func (ts *CommonTokenStream) LT(k int) Token {
|
||||
func (ts *CommonTokenStream) LT(k int) *Token {
|
||||
ts.lazyInit()
|
||||
if (k == 0) {
|
||||
return nil
|
||||
|
|
|
@ -18,16 +18,12 @@ type DFA struct {
|
|||
atnStartState *DecisionState
|
||||
decision int
|
||||
_states *DFAStatesSet
|
||||
s0 DFAState
|
||||
s0 *DFAState
|
||||
precedenceDfa bool
|
||||
}
|
||||
|
||||
func NewDFA(atnStartState *DecisionState, decision int) *DFA {
|
||||
|
||||
if (decision == nil) {
|
||||
decision = 0
|
||||
}
|
||||
|
||||
this := new(DFA)
|
||||
|
||||
// From which ATN state did we create this DFA?
|
||||
|
@ -110,7 +106,7 @@ func (this *DFA) setPrecedenceDfa(precedenceDfa bool) {
|
|||
this._states = NewDFAStatesSet()
|
||||
if (precedenceDfa) {
|
||||
var precedenceState = NewDFAState(-1, NewATNConfigSet(false))
|
||||
precedenceState.edges = make([]*DFAState)
|
||||
precedenceState.edges = make([]*DFAState,0)
|
||||
precedenceState.isAcceptState = false
|
||||
precedenceState.requiresFullContext = false
|
||||
this.s0 = precedenceState
|
||||
|
@ -150,7 +146,7 @@ func (this *DFA) toString(literalNames []string, symbolicNames []string) string
|
|||
return serializer.toString()
|
||||
}
|
||||
|
||||
func (this *DFA) toLexerString() {
|
||||
func (this *DFA) toLexerString() string {
|
||||
if (this.s0 == nil) {
|
||||
return ""
|
||||
}
|
||||
|
|
|
@ -1,5 +1,8 @@
|
|||
package antlr4
|
||||
import "fmt"
|
||||
import (
|
||||
"fmt"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
// A DFA walker that knows how to dump them to serialized strings.#/
|
||||
|
||||
|
@ -12,11 +15,11 @@ type DFASerializer struct {
|
|||
func NewDFASerializer(dfa *DFA, literalNames, symbolicNames []string) *DFASerializer {
|
||||
|
||||
if (literalNames == nil){
|
||||
literalNames = make([]string)
|
||||
literalNames = make([]string, 0)
|
||||
}
|
||||
|
||||
if (symbolicNames == nil){
|
||||
symbolicNames = make([]string)
|
||||
symbolicNames = make([]string, 0)
|
||||
}
|
||||
|
||||
this := new(DFASerializer)
|
||||
|
@ -35,8 +38,9 @@ func (this *DFASerializer) InitDFASerializer(dfa *DFA, literalNames, symbolicNam
|
|||
func (this *DFASerializer) toString() string {
|
||||
|
||||
if(this.dfa.s0 == nil) {
|
||||
return nil
|
||||
return ""
|
||||
}
|
||||
|
||||
var buf = ""
|
||||
var states = this.dfa.sortedStates()
|
||||
for i := 0; i<len(states); i++ {
|
||||
|
@ -51,13 +55,13 @@ func (this *DFASerializer) toString() string {
|
|||
buf += this.getEdgeLabel(j)
|
||||
buf += "->"
|
||||
buf += this.getStateString(t)
|
||||
buf += '\n'
|
||||
buf += "\n"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if len(buf) == 0 {
|
||||
return nil
|
||||
return ""
|
||||
}
|
||||
|
||||
return buf
|
||||
|
@ -67,7 +71,11 @@ func (this *DFASerializer) getEdgeLabel(i int) string {
|
|||
if (i==0) {
|
||||
return "EOF"
|
||||
} else if(this.literalNames !=nil || this.symbolicNames!=nil) {
|
||||
return this.literalNames[i-1] || this.symbolicNames[i-1]
|
||||
if (this.literalNames[i-1] == ""){
|
||||
return this.literalNames[i-1]
|
||||
} else {
|
||||
return this.symbolicNames[i-1]
|
||||
}
|
||||
} else {
|
||||
return string(i-1)
|
||||
}
|
||||
|
@ -85,7 +93,7 @@ func (this *DFASerializer) getStateString(s *DFAState) string {
|
|||
b = "^"
|
||||
}
|
||||
|
||||
var baseStateStr = a + "s" + s.stateNumber + b
|
||||
var baseStateStr = a + "s" + strconv.Itoa(s.stateNumber) + b
|
||||
if(s.isAcceptState) {
|
||||
if (s.predicates != nil) {
|
||||
return baseStateStr + "=>" + fmt.Sprint(s.predicates)
|
||||
|
@ -103,14 +111,14 @@ type LexerDFASerializer struct {
|
|||
|
||||
func NewLexerDFASerializer(dfa *DFA) *LexerDFASerializer {
|
||||
|
||||
this := new(DFASerializer)
|
||||
this := new(LexerDFASerializer)
|
||||
|
||||
this.InitDFASerializer(dfa, nil, nil)
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
func (this *LexerDFASerializer) getEdgeLabel(i int) {
|
||||
func (this *LexerDFASerializer) getEdgeLabel(i int) string {
|
||||
return "'" + string(i) + "'"
|
||||
}
|
||||
|
||||
|
|
|
@ -2,6 +2,7 @@ package antlr4
|
|||
|
||||
import (
|
||||
"fmt"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
// Map a predicate to a predicted alternative.///
|
||||
|
@ -60,7 +61,7 @@ type DFAState struct {
|
|||
predicates []PredPrediction
|
||||
}
|
||||
|
||||
func NewDFAState(stateNumber int, configs *NewATNConfigSet) *DFAState {
|
||||
func NewDFAState(stateNumber int, configs *ATNConfigSet) *DFAState {
|
||||
|
||||
if (configs == nil) {
|
||||
configs = NewATNConfigSet(false)
|
||||
|
@ -105,15 +106,15 @@ func NewDFAState(stateNumber int, configs *NewATNConfigSet) *DFAState {
|
|||
|
||||
// Get the set of all alts mentioned by all ATN configurations in this
|
||||
// DFA state.
|
||||
func (this *DFAState) getAltSet() {
|
||||
func (this *DFAState) getAltSet() *Set {
|
||||
var alts = NewSet(nil,nil)
|
||||
if (this.configs != nil) {
|
||||
for i := 0; i < len(this.configs.configs); i++ {
|
||||
var c = this.configs.configs[i]
|
||||
alts.add(c.alt)
|
||||
alts.add(c.getAlt())
|
||||
}
|
||||
}
|
||||
if (alts.length == 0) {
|
||||
if (alts.length() == 0) {
|
||||
return nil
|
||||
} else {
|
||||
return alts
|
||||
|
@ -143,7 +144,7 @@ func (this *DFAState) equals(other interface{}) bool {
|
|||
}
|
||||
|
||||
func (this *DFAState) toString() string {
|
||||
return "" + this.stateNumber + ":" + this.hashString()
|
||||
return strconv.Itoa(this.stateNumber) + ":" + this.hashString()
|
||||
}
|
||||
|
||||
func (this *DFAState) hashString() string {
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
package antlr4
|
||||
import "strings"
|
||||
import (
|
||||
"strconv"
|
||||
)
|
||||
|
||||
//
|
||||
// This implementation of {@link ANTLRErrorListener} can be used to identify
|
||||
|
@ -21,7 +23,7 @@ import "strings"
|
|||
// </ul>
|
||||
|
||||
type DiagnosticErrorListener struct {
|
||||
ErrorListener
|
||||
*DefaultErrorListener
|
||||
|
||||
exactOnly bool
|
||||
}
|
||||
|
@ -42,7 +44,7 @@ func (this *DiagnosticErrorListener) reportAmbiguity(recognizer *Parser, dfa *DF
|
|||
var msg = "reportAmbiguity d=" +
|
||||
this.getDecisionDescription(recognizer, dfa) +
|
||||
": ambigAlts=" +
|
||||
this.getConflictingAlts(ambigAlts, configs) +
|
||||
this.getConflictingAlts(ambigAlts, configs).toString() +
|
||||
", input='" +
|
||||
recognizer.getTokenStream().getTextFromInterval(NewInterval(startIndex, stopIndex)) + "'"
|
||||
recognizer.notifyErrorListeners(msg, nil, nil)
|
||||
|
@ -65,19 +67,19 @@ func (this *DiagnosticErrorListener) reportContextSensitivity(recognizer *Parser
|
|||
recognizer.notifyErrorListeners(msg, nil, nil)
|
||||
}
|
||||
|
||||
func (this *DiagnosticErrorListener) getDecisionDescription(recognizer *Parser, dfa *DFA) {
|
||||
func (this *DiagnosticErrorListener) getDecisionDescription(recognizer *Parser, dfa *DFA) string {
|
||||
var decision = dfa.decision
|
||||
var ruleIndex = dfa.atnStartState.ruleIndex
|
||||
|
||||
var ruleNames = recognizer.getRuleNames()
|
||||
if (ruleIndex < 0 || ruleIndex >= len(ruleNames)) {
|
||||
return "" + decision
|
||||
return strconv.Itoa(decision)
|
||||
}
|
||||
var ruleName = ruleNames[ruleIndex] || nil
|
||||
if (ruleName == nil || len(ruleName) == 0) {
|
||||
return "" + decision
|
||||
var ruleName = ruleNames[ruleIndex]
|
||||
if (ruleName == "") {
|
||||
return strconv.Itoa(decision)
|
||||
}
|
||||
return "" + decision + " (" + ruleName + ")"
|
||||
return strconv.Itoa(decision) + " (" + ruleName + ")"
|
||||
}
|
||||
|
||||
//
|
||||
|
@ -97,7 +99,15 @@ func (this *DiagnosticErrorListener) getConflictingAlts(reportedAlts *BitSet, se
|
|||
}
|
||||
var result = NewBitSet()
|
||||
for i := 0; i < len(set.configs); i++ {
|
||||
result.add(set.configs[i].alt)
|
||||
result.add(set.configs[i].getAlt())
|
||||
}
|
||||
return "{" + strings.Join(result.values(), ", ") + "}"
|
||||
|
||||
return result
|
||||
|
||||
// valuestrings := make([]string, len(result.values()))
|
||||
// for i,v := range result.values() {
|
||||
// valuestrings[i] = strconv.Itoa(v)
|
||||
// }
|
||||
//
|
||||
// return "{" + strings.Join(valuestrings, ", ") + "}"
|
||||
}
|
|
@ -2,34 +2,43 @@ package antlr4
|
|||
|
||||
import (
|
||||
"fmt"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
// Provides an empty default implementation of {@link ANTLRErrorListener}. The
|
||||
// default implementation of each method does nothing, but can be overridden as
|
||||
// necessary.
|
||||
|
||||
type ErrorListener struct {
|
||||
|
||||
type IErrorListener interface {
|
||||
syntaxError(recognizer *Parser, offendingSymbol interface{}, line, column int, msg string, e *RecognitionException)
|
||||
reportAmbiguity(recognizer *Parser, dfa *DFA, startIndex, stopIndex int, exact bool, ambigAlts *BitSet, configs *ATNConfigSet)
|
||||
reportAttemptingFullContext(recognizer *Parser, dfa *DFA, startIndex, stopIndex int, conflictingAlts *BitSet, configs *ATNConfigSet)
|
||||
reportContextSensitivity(recognizer *Parser, dfa *DFA, startIndex, stopIndex, prediction int, configs *ATNConfigSet)
|
||||
}
|
||||
|
||||
type DefaultErrorListener struct {
|
||||
|
||||
}
|
||||
|
||||
func NewErrorListener() *ErrorListener {
|
||||
return new(ErrorListener)
|
||||
func NewErrorListener() *DefaultErrorListener {
|
||||
return new(DefaultErrorListener)
|
||||
}
|
||||
|
||||
func (this *ErrorListener) syntaxError(recognizer *Parser, offendingSymbol interface{}, line, column int, msg string, e *RecognitionException) {
|
||||
func (this *DefaultErrorListener) syntaxError(recognizer *Parser, offendingSymbol interface{}, line, column int, msg string, e *RecognitionException) {
|
||||
}
|
||||
|
||||
func (this *ErrorListener) reportAmbiguity(recognizer *Parser, dfa *DFA, startIndex, stopIndex int, exact bool, ambigAlts *BitSet, configs *ATNConfigSet) {
|
||||
func (this *DefaultErrorListener) reportAmbiguity(recognizer *Parser, dfa *DFA, startIndex, stopIndex int, exact bool, ambigAlts *BitSet, configs *ATNConfigSet) {
|
||||
}
|
||||
|
||||
func (this *ErrorListener) reportAttemptingFullContext(recognizer *Parser, dfa *DFA, startIndex, stopIndex int, conflictingAlts *BitSet, configs *ATNConfigSet) {
|
||||
func (this *DefaultErrorListener) reportAttemptingFullContext(recognizer *Parser, dfa *DFA, startIndex, stopIndex int, conflictingAlts *BitSet, configs *ATNConfigSet) {
|
||||
}
|
||||
|
||||
func (this *ErrorListener) reportContextSensitivity(recognizer *Parser, dfa *DFA, startIndex, stopIndex, prediction int, configs *ATNConfigSet) {
|
||||
func (this *DefaultErrorListener) reportContextSensitivity(recognizer *Parser, dfa *DFA, startIndex, stopIndex, prediction int, configs *ATNConfigSet) {
|
||||
}
|
||||
|
||||
type ConsoleErrorListener struct {
|
||||
ErrorListener
|
||||
*DefaultErrorListener
|
||||
}
|
||||
|
||||
func NewConsoleErrorListener() *ConsoleErrorListener {
|
||||
|
@ -54,15 +63,15 @@ var ConsoleErrorListenerINSTANCE = NewConsoleErrorListener()
|
|||
// </pre>
|
||||
//
|
||||
func (this *ConsoleErrorListener) syntaxError(recognizer *Parser, offendingSymbol interface{}, line, column int, msg string, e *RecognitionException) {
|
||||
fmt.Errorf("line " + line + ":" + column + " " + msg)
|
||||
fmt.Errorf("line " + strconv.Itoa(line) + ":" + strconv.Itoa(column) + " " + msg)
|
||||
}
|
||||
|
||||
type ProxyErrorListener struct {
|
||||
ErrorListener
|
||||
delegates []ErrorListener
|
||||
*DefaultErrorListener
|
||||
delegates []IErrorListener
|
||||
}
|
||||
|
||||
func NewProxyErrorListener(delegates []ErrorListener) *ConsoleErrorListener {
|
||||
func NewProxyErrorListener(delegates []IErrorListener) *ProxyErrorListener {
|
||||
if (delegates==nil) {
|
||||
panic("delegates is not provided")
|
||||
}
|
||||
|
|
|
@ -4,8 +4,19 @@ import (
|
|||
"fmt"
|
||||
"strings"
|
||||
"reflect"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
type IErrorStrategy interface {
|
||||
reset(recognizer *Parser)
|
||||
recoverInline(recognizer *Parser)
|
||||
recover(recognizer *Parser, e IRecognitionException)
|
||||
sync(recognizer *Parser)
|
||||
inErrorRecoveryMode(recognizer *Parser)
|
||||
reportError(recognizer *Parser)
|
||||
reportMatch(recognizer *Parser)
|
||||
}
|
||||
|
||||
type ErrorStrategy struct {
|
||||
}
|
||||
|
||||
|
@ -15,7 +26,7 @@ func (this *ErrorStrategy) reset(recognizer *Parser){
|
|||
func (this *ErrorStrategy) recoverInline(recognizer *Parser){
|
||||
}
|
||||
|
||||
func (this *ErrorStrategy) recover(recognizer *Parser, e *RecognitionException){
|
||||
func (this *ErrorStrategy) recover(recognizer *Parser, e IRecognitionException){
|
||||
}
|
||||
|
||||
func (this *ErrorStrategy) sync(recognizer *Parser){
|
||||
|
@ -86,7 +97,7 @@ func (this *DefaultErrorStrategy) beginErrorCondition(recognizer *Parser) {
|
|||
this.errorRecoveryMode = true
|
||||
}
|
||||
|
||||
func (this *DefaultErrorStrategy) inErrorRecoveryMode(recognizer *Parser) {
|
||||
func (this *DefaultErrorStrategy) inErrorRecoveryMode(recognizer *Parser) bool {
|
||||
return this.errorRecoveryMode
|
||||
}
|
||||
|
||||
|
@ -130,7 +141,7 @@ func (this *DefaultErrorStrategy) reportMatch(recognizer *Parser) {
|
|||
// the exception</li>
|
||||
// </ul>
|
||||
//
|
||||
func (this *DefaultErrorStrategy) reportError(recognizer *Parser, e *RecognitionException) {
|
||||
func (this *DefaultErrorStrategy) reportError(recognizer *Parser, e IRecognitionException) {
|
||||
// if we've already reported an error and have not matched a token
|
||||
// yet successfully, don't report any errors.
|
||||
if(this.inErrorRecoveryMode(recognizer)) {
|
||||
|
@ -138,17 +149,17 @@ func (this *DefaultErrorStrategy) reportError(recognizer *Parser, e *Recognition
|
|||
}
|
||||
this.beginErrorCondition(recognizer)
|
||||
|
||||
switch e.(type) {
|
||||
switch t := e.(type) {
|
||||
default:
|
||||
fmt.Println("unknown recognition error type: " + reflect.TypeOf(e).Name())
|
||||
// fmt.Println(e.stack)
|
||||
recognizer.notifyErrorListeners(e.offendingToken, e.message, e)
|
||||
case NoViableAltException:
|
||||
this.reportNoViableAlternative(recognizer, e)
|
||||
case InputMismatchException:
|
||||
this.reportInputMismatch(recognizer, e)
|
||||
case FailedPredicateException:
|
||||
this.reportFailedPredicate(recognizer, e)
|
||||
recognizer.notifyErrorListeners(e.getMessage(), e.getOffendingToken(), e)
|
||||
case *NoViableAltException:
|
||||
this.reportNoViableAlternative(recognizer, t)
|
||||
case *InputMismatchException:
|
||||
this.reportInputMismatch(recognizer, t)
|
||||
case *FailedPredicateException:
|
||||
this.reportFailedPredicate(recognizer, t)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -159,9 +170,9 @@ func (this *DefaultErrorStrategy) reportError(recognizer *Parser, e *Recognition
|
|||
// until we find one in the resynchronization set--loosely the set of tokens
|
||||
// that can follow the current rule.</p>
|
||||
//
|
||||
func (this *DefaultErrorStrategy) recover(recognizer *Parser, e *RecognitionException) {
|
||||
func (this *DefaultErrorStrategy) recover(recognizer *Parser, e IRecognitionException) {
|
||||
|
||||
if (this.lastErrorIndex==recognizer.getInputStream().index() &&
|
||||
if (this.lastErrorIndex==recognizer.getInputStream().index &&
|
||||
this.lastErrorStates != nil && this.lastErrorStates.contains(recognizer.state)) {
|
||||
// uh oh, another error at same token index and previously-visited
|
||||
// state in ATN must be a case where LT(1) is in the recovery
|
||||
|
@ -169,7 +180,7 @@ func (this *DefaultErrorStrategy) recover(recognizer *Parser, e *RecognitionExce
|
|||
// at least to prevent an infinite loop this is a failsafe.
|
||||
recognizer.consume()
|
||||
}
|
||||
this.lastErrorIndex = recognizer.getInputStream().index()
|
||||
this.lastErrorIndex = recognizer.getInputStream().index
|
||||
if (this.lastErrorStates == nil) {
|
||||
this.lastErrorStates = NewIntervalSet()
|
||||
}
|
||||
|
@ -231,14 +242,14 @@ func (this *DefaultErrorStrategy) sync(recognizer *Parser) {
|
|||
var s = recognizer._interp.atn.states[recognizer.state]
|
||||
var la = recognizer.getTokenStream().LA(1)
|
||||
// try cheaper subset first might get lucky. seems to shave a wee bit off
|
||||
if (la==TokenEOF || recognizer.getATN().nextTokens(s).contains(la)) {
|
||||
if (la==TokenEOF || recognizer.getATN().nextTokens(s,nil).contains(la)) {
|
||||
return
|
||||
}
|
||||
// Return but don't end recovery. only do that upon valid token match
|
||||
if(recognizer.isExpectedToken(la)) {
|
||||
return
|
||||
}
|
||||
switch (s.stateType *RecognitionException) {
|
||||
switch (s.getStateType()) {
|
||||
case ATNStateBLOCK_START:
|
||||
case ATNStateSTAR_BLOCK_START:
|
||||
case ATNStatePLUS_BLOCK_START:
|
||||
|
@ -278,7 +289,7 @@ func (this *DefaultErrorStrategy) reportNoViableAlternative(recognizer *Parser,
|
|||
if (e.startToken.tokenType==TokenEOF) {
|
||||
input = "<EOF>"
|
||||
} else {
|
||||
input = tokens.getTextFromInterval(NewInterval(e.startToken, e.offendingToken))
|
||||
input = tokens.getTextFromTokens(e.startToken, e.offendingToken)
|
||||
}
|
||||
} else {
|
||||
input = "<unknown input>"
|
||||
|
@ -296,7 +307,7 @@ func (this *DefaultErrorStrategy) reportNoViableAlternative(recognizer *Parser,
|
|||
// @param recognizer the parser instance
|
||||
// @param e the recognition exception
|
||||
//
|
||||
func (this *DefaultErrorStrategy) reportInputMismatch(recognizer *Parser, e *RecognitionException) {
|
||||
func (this *DefaultErrorStrategy) reportInputMismatch(recognizer *Parser, e *InputMismatchException) {
|
||||
var msg = "mismatched input " + this.getTokenErrorDisplay(e.offendingToken) +
|
||||
" expecting " + e.getExpectedTokens().toStringVerbose(recognizer.literalNames, recognizer.symbolicNames, false)
|
||||
recognizer.notifyErrorListeners(msg, e.offendingToken, e)
|
||||
|
@ -311,7 +322,7 @@ func (this *DefaultErrorStrategy) reportInputMismatch(recognizer *Parser, e *Rec
|
|||
// @param recognizer the parser instance
|
||||
// @param e the recognition exception
|
||||
//
|
||||
func (this *DefaultErrorStrategy) reportFailedPredicate(recognizer *Parser, e *RecognitionException) {
|
||||
func (this *DefaultErrorStrategy) reportFailedPredicate(recognizer *Parser, e *FailedPredicateException) {
|
||||
var ruleName = recognizer.getRuleNames()[recognizer._ctx.ruleIndex]
|
||||
var msg = "rule " + ruleName + " " + e.message
|
||||
recognizer.notifyErrorListeners(msg, e.offendingToken, e)
|
||||
|
@ -423,7 +434,7 @@ func (this *DefaultErrorStrategy) reportMissingToken(recognizer *Parser) {
|
|||
// is in the set of tokens that can follow the {@code ')'} token reference
|
||||
// in rule {@code atom}. It can assume that you forgot the {@code ')'}.
|
||||
//
|
||||
func (this *DefaultErrorStrategy) recoverInline(recognizer *Parser) {
|
||||
func (this *DefaultErrorStrategy) recoverInline(recognizer *Parser) *Token {
|
||||
// SINGLE TOKEN DELETION
|
||||
var matchedSymbol = this.singleTokenDeletion(recognizer)
|
||||
if (matchedSymbol != nil) {
|
||||
|
@ -457,15 +468,15 @@ func (this *DefaultErrorStrategy) recoverInline(recognizer *Parser) {
|
|||
// @return {@code true} if single-token insertion is a viable recovery
|
||||
// strategy for the current mismatched input, otherwise {@code false}
|
||||
//
|
||||
func (this *DefaultErrorStrategy) singleTokenInsertion(recognizer *Parser) {
|
||||
func (this *DefaultErrorStrategy) singleTokenInsertion(recognizer *Parser) bool {
|
||||
var currentSymbolType = recognizer.getTokenStream().LA(1)
|
||||
// if current token is consistent with what could come after current
|
||||
// ATN state, then we know we're missing a token error recovery
|
||||
// is free to conjure up and insert the missing token
|
||||
var atn = recognizer._interp.atn
|
||||
var currentState = atn.states[recognizer.state]
|
||||
var next = currentState.transitions[0].target
|
||||
var expectingAtLL2 = atn.nextTokens(next, recognizer._ctx)
|
||||
var next = currentState.getTransitions()[0].getTarget()
|
||||
var expectingAtLL2 = atn.nextTokens(next, recognizer._ctx.RuleContext)
|
||||
if (expectingAtLL2.contains(currentSymbolType) ){
|
||||
this.reportMissingToken(recognizer)
|
||||
return true
|
||||
|
@ -492,7 +503,7 @@ func (this *DefaultErrorStrategy) singleTokenInsertion(recognizer *Parser) {
|
|||
// deletion successfully recovers from the mismatched input, otherwise
|
||||
// {@code nil}
|
||||
//
|
||||
func (this *DefaultErrorStrategy) singleTokenDeletion(recognizer *Parser) Token {
|
||||
func (this *DefaultErrorStrategy) singleTokenDeletion(recognizer *Parser) *Token {
|
||||
var nextTokenType = recognizer.getTokenStream().LA(2)
|
||||
var expecting = this.getExpectedTokens(recognizer)
|
||||
if (expecting.contains(nextTokenType)) {
|
||||
|
@ -566,12 +577,12 @@ func (this *DefaultErrorStrategy) getTokenErrorDisplay(t *Token) string {
|
|||
if (t == nil) {
|
||||
return "<no token>"
|
||||
}
|
||||
var s = t.text
|
||||
if (s == nil) {
|
||||
var s = t.text()
|
||||
if (s == "") {
|
||||
if (t.tokenType==TokenEOF) {
|
||||
s = "<EOF>"
|
||||
} else {
|
||||
s = "<" + t.tokenType + ">"
|
||||
s = "<" + strconv.Itoa(t.tokenType) + ">"
|
||||
}
|
||||
}
|
||||
return this.escapeWSAndQuote(s)
|
||||
|
@ -683,7 +694,7 @@ func (this *DefaultErrorStrategy) getErrorRecoverySet(recognizer *Parser) *Inter
|
|||
for (ctx != nil && ctx.invokingState>=0) {
|
||||
// compute what follows who invoked us
|
||||
var invokingState = atn.states[ctx.invokingState]
|
||||
var rt = invokingState.transitions[0]
|
||||
var rt = invokingState.getTransitions()[0]
|
||||
var follow = atn.nextTokens(rt.(*RuleTransition).followState, nil)
|
||||
recoverSet.addSet(follow)
|
||||
ctx = ctx.parentCtx
|
||||
|
@ -746,7 +757,7 @@ func NewBailErrorStrategy() *BailErrorStrategy {
|
|||
// rule func catches. Use {@link Exception//getCause()} to get the
|
||||
// original {@link RecognitionException}.
|
||||
//
|
||||
func (this *BailErrorStrategy) recover(recognizer *Parser, e *RecognitionException) {
|
||||
func (this *BailErrorStrategy) recover(recognizer *Parser, e IRecognitionException) {
|
||||
var context = recognizer._ctx
|
||||
for (context != nil) {
|
||||
context.exception = e
|
||||
|
|
|
@ -9,18 +9,24 @@ import (
|
|||
// in the input, where it is in the ATN, the rule invocation stack,
|
||||
// and what kind of problem occurred.
|
||||
|
||||
|
||||
type IRecognitionException interface {
|
||||
getOffendingToken() *Token
|
||||
getMessage() string
|
||||
}
|
||||
|
||||
type RecognitionException struct {
|
||||
|
||||
message string
|
||||
recognizer *Recognizer
|
||||
recognizer IRecognizer
|
||||
offendingToken *Token
|
||||
offendingState int
|
||||
ctx *RuleContext
|
||||
ctx IRuleContext
|
||||
input *InputStream
|
||||
|
||||
}
|
||||
|
||||
func NewRecognitionException(message string, recognizer *Recognizer, input *InputStream, ctx *RuleContext) *RecognitionException {
|
||||
func NewRecognitionException(message string, recognizer IRecognizer, input *InputStream, ctx IRuleContext) *RecognitionException {
|
||||
|
||||
// todo
|
||||
// Error.call(this)
|
||||
|
@ -38,7 +44,7 @@ func NewRecognitionException(message string, recognizer *Recognizer, input *Inpu
|
|||
return t
|
||||
}
|
||||
|
||||
func (t *RecognitionException) InitRecognitionException(message string, recognizer *Recognizer, input *InputStream, ctx *RuleContext){
|
||||
func (t *RecognitionException) InitRecognitionException(message string, recognizer IRecognizer, input *InputStream, ctx IRuleContext){
|
||||
|
||||
t.message = message
|
||||
t.recognizer = recognizer
|
||||
|
@ -55,9 +61,16 @@ func (t *RecognitionException) InitRecognitionException(message string, recogniz
|
|||
// edge we couldn't match.
|
||||
t.offendingState = -1
|
||||
if (t.recognizer!=nil) {
|
||||
t.offendingState = t.recognizer.state
|
||||
t.offendingState = t.recognizer.getState()
|
||||
}
|
||||
}
|
||||
|
||||
func (this *RecognitionException) getMessage() string {
|
||||
return this.message
|
||||
}
|
||||
|
||||
func (this *RecognitionException) getOffendingToken() *Token {
|
||||
return this.offendingToken
|
||||
}
|
||||
|
||||
// <p>If the state number is not known, this method returns -1.</p>
|
||||
|
@ -132,8 +145,7 @@ type NoViableAltException struct {
|
|||
// of the offending input and also knows where the parser was
|
||||
// in the various paths when the error. Reported by reportNoViableAlternative()
|
||||
//
|
||||
func NewNoViableAltException(recognizer *Parser, input *InputStream, startToken *Token,
|
||||
offendingToken *Token, deadEndConfigs *ATNConfigSet, ctx *ParserRuleContext) *NoViableAltException {
|
||||
func NewNoViableAltException(recognizer *Parser, input *InputStream, startToken *Token, offendingToken *Token, deadEndConfigs *ATNConfigSet, ctx *ParserRuleContext) *NoViableAltException {
|
||||
|
||||
if (ctx == nil){
|
||||
ctx = recognizer._ctx
|
||||
|
@ -168,9 +180,7 @@ func NewNoViableAltException(recognizer *Parser, input *InputStream, startToken
|
|||
}
|
||||
|
||||
type InputMismatchException struct {
|
||||
|
||||
RecognitionException
|
||||
|
||||
}
|
||||
|
||||
// This signifies any kind of mismatched input exceptions such as
|
||||
|
@ -209,8 +219,8 @@ func NewFailedPredicateException(recognizer *Parser, predicate string, message s
|
|||
this.InitRecognitionException(this.formatMessage(predicate, message), recognizer, recognizer.getInputStream(), recognizer._ctx)
|
||||
|
||||
var s = recognizer._interp.atn.states[recognizer.state]
|
||||
var trans = s.transitions[0]
|
||||
if trans2, ok := trans.(PredicateTransition); ok {
|
||||
var trans = s.getTransitions()[0]
|
||||
if trans2, ok := trans.(*PredicateTransition); ok {
|
||||
this.ruleIndex = trans2.ruleIndex
|
||||
this.predicateIndex = trans2.predIndex
|
||||
} else {
|
||||
|
@ -224,7 +234,7 @@ func NewFailedPredicateException(recognizer *Parser, predicate string, message s
|
|||
}
|
||||
|
||||
func (this *FailedPredicateException) formatMessage(predicate, message string) string {
|
||||
if (message !=nil) {
|
||||
if (message != "") {
|
||||
return message
|
||||
} else {
|
||||
return "failed predicate: {" + predicate + "}?"
|
||||
|
|
|
@ -1,9 +1,5 @@
|
|||
package antlr4
|
||||
|
||||
import (
|
||||
"math"
|
||||
)
|
||||
|
||||
// Vacuums all input from a string and then treat it like a buffer.
|
||||
|
||||
type InputStream struct {
|
||||
|
@ -52,7 +48,7 @@ func (is *InputStream) LA(offset int) int {
|
|||
if pos < 0 || pos >= is.size { // invalid
|
||||
return TokenEOF
|
||||
}
|
||||
return is.data[pos]
|
||||
return int(is.data[pos])
|
||||
}
|
||||
|
||||
func (is *InputStream) LT(offset int) int {
|
||||
|
@ -76,7 +72,7 @@ func (is *InputStream) seek(index int) {
|
|||
return
|
||||
}
|
||||
// seek forward
|
||||
is.index = math.Min(index, is.size)
|
||||
is.index = intMin(index, is.size)
|
||||
}
|
||||
|
||||
func (is *InputStream) getText(start int, stop int) string {
|
||||
|
@ -86,10 +82,10 @@ func (is *InputStream) getText(start int, stop int) string {
|
|||
if start >= is.size {
|
||||
return ""
|
||||
} else {
|
||||
return is.data[start:stop+1]
|
||||
return string(is.data[start:stop+1])
|
||||
}
|
||||
}
|
||||
|
||||
func (is *InputStream) toString() string {
|
||||
return is.data
|
||||
return string(is.data)
|
||||
}
|
||||
|
|
|
@ -3,7 +3,6 @@ package antlr4
|
|||
import (
|
||||
"strings"
|
||||
"strconv"
|
||||
"math"
|
||||
)
|
||||
|
||||
type Interval struct {
|
||||
|
@ -12,7 +11,7 @@ type Interval struct {
|
|||
}
|
||||
|
||||
/* stop is not included! */
|
||||
func NewInterval(start, stop int) Interval{
|
||||
func NewInterval(start, stop int) *Interval{
|
||||
i := new(Interval)
|
||||
|
||||
i.start = start
|
||||
|
@ -20,7 +19,7 @@ func NewInterval(start, stop int) Interval{
|
|||
return i
|
||||
}
|
||||
|
||||
func (i *Interval) contains(item int) {
|
||||
func (i *Interval) contains(item int) bool {
|
||||
return item >= i.start && item < i.stop
|
||||
}
|
||||
|
||||
|
@ -37,7 +36,7 @@ func (i *Interval) length() int {
|
|||
}
|
||||
|
||||
type IntervalSet struct {
|
||||
intervals []Interval
|
||||
intervals []*Interval
|
||||
readOnly bool
|
||||
}
|
||||
|
||||
|
@ -69,7 +68,7 @@ func (i *IntervalSet) addRange(l, h int) {
|
|||
|
||||
func (is *IntervalSet) addInterval(v *Interval) {
|
||||
if (is.intervals == nil) {
|
||||
is.intervals = make([]Interval, 0)
|
||||
is.intervals = make([]*Interval, 0)
|
||||
is.intervals = append( is.intervals, v )
|
||||
} else {
|
||||
// find insert pos
|
||||
|
@ -84,7 +83,7 @@ func (is *IntervalSet) addInterval(v *Interval) {
|
|||
is.intervals[k].start = v.start
|
||||
return
|
||||
} else if (v.start <= i.stop) {
|
||||
is.intervals[k] = NewInterval(math.Min(i.start, v.start), math.Max(i.stop, v.stop))
|
||||
is.intervals[k] = NewInterval(intMin(i.start, v.start), intMax(i.stop, v.stop))
|
||||
is.reduce(k)
|
||||
return
|
||||
}
|
||||
|
@ -94,7 +93,7 @@ func (is *IntervalSet) addInterval(v *Interval) {
|
|||
}
|
||||
}
|
||||
|
||||
func (i *IntervalSet) addSet(other IntervalSet) *IntervalSet {
|
||||
func (i *IntervalSet) addSet(other *IntervalSet) *IntervalSet {
|
||||
if (other.intervals != nil) {
|
||||
for k := 0; k < len(other.intervals); k++ {
|
||||
var i2 = other.intervals[k]
|
||||
|
@ -129,7 +128,7 @@ func (is *IntervalSet) complement(start int, stop int) *IntervalSet {
|
|||
return result
|
||||
}
|
||||
|
||||
func (i *IntervalSet) contains(item Interval) bool {
|
||||
func (i *IntervalSet) contains(item int) bool {
|
||||
if (i.intervals == nil) {
|
||||
return false
|
||||
} else {
|
||||
|
@ -170,7 +169,7 @@ func (is *IntervalSet) removeRange(v *Interval) {
|
|||
return
|
||||
} else if(v.start<=i.start && v.stop>=i.stop) {
|
||||
// is.intervals.splice(k, 1)
|
||||
is.intervals = append(is.intervals[0:k], is.intervals[k+1]...)
|
||||
is.intervals = append(is.intervals[0:k], is.intervals[k+1:]...)
|
||||
k = k - 1 // need another pass
|
||||
} else if(v.start<i.stop) {
|
||||
is.intervals[k] = NewInterval(i.start, v.start)
|
||||
|
@ -182,34 +181,30 @@ func (is *IntervalSet) removeRange(v *Interval) {
|
|||
}
|
||||
}
|
||||
|
||||
// TODO this looks like a dupe of removeRange...
|
||||
func (is *IntervalSet) removeOne(v *Interval) {
|
||||
if(v.start==v.stop-1) {
|
||||
is.removeOne(v.start)
|
||||
} else if (is.intervals!=nil) {
|
||||
var k = 0
|
||||
for n := 0; n < len(is.intervals); n++ {
|
||||
i := is.intervals[k]
|
||||
// intervals are ordered
|
||||
if v.stop <= i.start {
|
||||
return
|
||||
} else if v.start>i.start && v.stop<i.stop {
|
||||
// check for including range, split it
|
||||
is.intervals[k] = NewInterval(i.start, v.start)
|
||||
var x = NewInterval(v.stop, i.stop)
|
||||
// is.intervals.splice(k, 0, x)
|
||||
func (is *IntervalSet) removeOne(v int) {
|
||||
if (is.intervals != nil) {
|
||||
for k := 0; k < len(is.intervals); k++ {
|
||||
var i = is.intervals[k];
|
||||
// intervals is ordered
|
||||
if (v < i.start) {
|
||||
return;
|
||||
} else if (v == i.start && v == i.stop - 1) {
|
||||
// is.intervals.splice(k, 1);
|
||||
is.intervals = append(is.intervals[0:k], is.intervals[k+1:]...)
|
||||
return;
|
||||
} else if (v == i.start) {
|
||||
is.intervals[k] = NewInterval(i.start + 1, i.stop);
|
||||
return;
|
||||
} else if (v == i.stop - 1) {
|
||||
is.intervals[k] = NewInterval(i.start, i.stop - 1);
|
||||
return;
|
||||
} else if (v < i.stop - 1) {
|
||||
var x = NewInterval(i.start, v);
|
||||
i.start = v + 1;
|
||||
// is.intervals.splice(k, 0, x);
|
||||
is.intervals = append(is.intervals[0:k], append([]*Interval{x}, is.intervals[k:]...)...)
|
||||
return
|
||||
} else if(v.start<=i.start && v.stop>=i.stop) {
|
||||
// is.intervals.splice(k, 1)
|
||||
is.intervals = append(is.intervals[0:k], is.intervals[k+1]...)
|
||||
k = k - 1; // need another pass
|
||||
} else if(v.start<i.stop) {
|
||||
is.intervals[k] = NewInterval(i.start, v.start)
|
||||
} else if(v.stop<i.stop) {
|
||||
is.intervals[k] = NewInterval(v.stop, i.stop)
|
||||
return;
|
||||
}
|
||||
k += 1
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -231,19 +226,19 @@ func (i *IntervalSet) toStringVerbose(literalNames []string, symbolicNames []str
|
|||
}
|
||||
}
|
||||
|
||||
func (is *IntervalSet) toCharString() {
|
||||
func (is *IntervalSet) toCharString() string {
|
||||
var names = make([]string, len(is.intervals))
|
||||
|
||||
for i := 0; i < len( is.intervals ); i++ {
|
||||
var v = is.intervals[i]
|
||||
if(v.stop==v.start+1) {
|
||||
if ( v.start== TokenEOF ) {
|
||||
append(names, "<EOF>")
|
||||
names = append(names, "<EOF>")
|
||||
} else {
|
||||
append(names, ("'" + string(v.start) + "'"))
|
||||
names = append(names, ("'" + string(v.start) + "'"))
|
||||
}
|
||||
} else {
|
||||
append(names, "'" + string(v.start) + "'..'" + string(v.stop-1) + "'")
|
||||
names = append(names, "'" + string(v.start) + "'..'" + string(v.stop-1) + "'")
|
||||
}
|
||||
}
|
||||
if (len(names) > 1) {
|
||||
|
@ -254,7 +249,7 @@ func (is *IntervalSet) toCharString() {
|
|||
}
|
||||
|
||||
|
||||
func (is *IntervalSet) toIndexString() {
|
||||
func (is *IntervalSet) toIndexString() string {
|
||||
var names = make([]string, 0)
|
||||
for i := 0; i < len( is.intervals ); i++ {
|
||||
var v = is.intervals[i]
|
||||
|
@ -297,7 +292,11 @@ func (i *IntervalSet) elementName(literalNames []string, symbolicNames []string,
|
|||
} else if (a == TokenEpsilon) {
|
||||
return "<EPSILON>"
|
||||
} else {
|
||||
return literalNames[a] || symbolicNames[a]
|
||||
if (literalNames[a] != ""){
|
||||
return literalNames[a]
|
||||
} else {
|
||||
return symbolicNames[a]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -29,17 +29,17 @@ const (
|
|||
//
|
||||
// @param s the ATN state
|
||||
// @return the expected symbols for each outgoing transition of {@code s}.
|
||||
func (la *LL1Analyzer) getDecisionLookahead(s *ATNState) []*IntervalSet {
|
||||
func (la *LL1Analyzer) getDecisionLookahead(s IATNState) []*IntervalSet {
|
||||
if (s == nil) {
|
||||
return nil
|
||||
}
|
||||
var count = len(s.transitions)
|
||||
var look = make([]*IntervalSet)
|
||||
var count = len(s.getTransitions())
|
||||
var look = make([]*IntervalSet, count)
|
||||
for alt := 0; alt < count; alt++ {
|
||||
look[alt] = NewIntervalSet()
|
||||
var lookBusy = NewSet(nil,nil)
|
||||
var seeThruPreds = false // fail to get lookahead upon pred
|
||||
la._LOOK(s.transitions[alt].target, nil, PredictionContextEMPTY, look[alt], lookBusy, NewBitSet(), seeThruPreds, false)
|
||||
la._LOOK(s.getTransitions()[alt].getTarget(), nil, PredictionContextEMPTY, look[alt], lookBusy, NewBitSet(), seeThruPreds, false)
|
||||
// Wipe out lookahead for la alternative if we found nothing
|
||||
// or we had a predicate when we !seeThruPreds
|
||||
if (look[alt].length==0 || look[alt].contains(LL1AnalyzerHIT_PRED)) {
|
||||
|
@ -67,12 +67,12 @@ func (la *LL1Analyzer) getDecisionLookahead(s *ATNState) []*IntervalSet {
|
|||
// @return The set of tokens that can follow {@code s} in the ATN in the
|
||||
// specified {@code ctx}.
|
||||
///
|
||||
func (la *LL1Analyzer) LOOK(s *ATNState, stopState int, ctx *RuleContext) *IntervalSet {
|
||||
func (la *LL1Analyzer) LOOK(s, stopState IATNState, ctx *RuleContext) *IntervalSet {
|
||||
var r = NewIntervalSet()
|
||||
var seeThruPreds = true // ignore preds get all lookahead
|
||||
var lookContext *RuleContext
|
||||
if (ctx != nil){
|
||||
predictionContextFromRuleContext(s.atn, ctx)
|
||||
predictionContextFromRuleContext(s.getATN(), ctx)
|
||||
}
|
||||
la._LOOK(s, stopState, lookContext, r, NewSet(nil, nil), NewBitSet(), seeThruPreds, true)
|
||||
return r
|
||||
|
@ -109,7 +109,7 @@ func (la *LL1Analyzer) LOOK(s *ATNState, stopState int, ctx *RuleContext) *Inter
|
|||
// is {@code nil}.
|
||||
|
||||
|
||||
func (la *LL1Analyzer) _LOOK(s, stopState *ATNState, ctx *PredictionContext, look *IntervalSet, lookBusy *Set, calledRuleStack *BitSet, seeThruPreds, addEOF bool) {
|
||||
func (la *LL1Analyzer) _LOOK(s, stopState IATNState, ctx IPredictionContext, look *IntervalSet, lookBusy *Set, calledRuleStack *BitSet, seeThruPreds, addEOF bool) {
|
||||
|
||||
c := NewATNConfig6(s, 0, ctx)
|
||||
|
||||
|
@ -146,16 +146,16 @@ func (la *LL1Analyzer) _LOOK(s, stopState *ATNState, ctx *PredictionContext, loo
|
|||
returnState := la.atn.states[ctx.getReturnState(i)]
|
||||
// System.out.println("popping back to "+retState)
|
||||
|
||||
removed := calledRuleStack[returnState.ruleIndex]
|
||||
removed := calledRuleStack[returnState.getRuleIndex()]
|
||||
|
||||
// TODO this is incorrect
|
||||
defer func(){
|
||||
if (removed) {
|
||||
calledRuleStack.add(returnState.ruleIndex)
|
||||
calledRuleStack.add(returnState.getRuleIndex())
|
||||
}
|
||||
}()
|
||||
|
||||
calledRuleStack.clear(returnState.ruleIndex)
|
||||
calledRuleStack.clear(returnState.getRuleIndex())
|
||||
la._LOOK(returnState, stopState, ctx.getParent(i), look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
|
||||
|
||||
}
|
||||
|
@ -163,37 +163,37 @@ func (la *LL1Analyzer) _LOOK(s, stopState *ATNState, ctx *PredictionContext, loo
|
|||
}
|
||||
}
|
||||
|
||||
n := len(s.transitions)
|
||||
n := len(s.getTransitions())
|
||||
|
||||
for i:=0; i<n; i++ {
|
||||
t := s.transitions[i]
|
||||
t := s.getTransitions()[i]
|
||||
|
||||
if t1, ok := t.(*RuleTransition); ok {
|
||||
|
||||
if (calledRuleStack[t1.target.ruleIndex]) {
|
||||
if (calledRuleStack[t1.getTarget().getRuleIndex()]) {
|
||||
continue
|
||||
}
|
||||
|
||||
newContext := SingletonPredictionContextcreate(ctx, t1.followState.stateNumber)
|
||||
newContext := SingletonPredictionContextcreate(ctx, t1.followState.getStateNumber())
|
||||
|
||||
defer func(){
|
||||
calledRuleStack.remove(t1.target.ruleIndex);
|
||||
calledRuleStack.remove(t1.getTarget().getRuleIndex());
|
||||
}()
|
||||
|
||||
calledRuleStack.add(t1.target.ruleIndex)
|
||||
la._LOOK(t.target, stopState, newContext, look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
|
||||
calledRuleStack.add(t1.getTarget().getRuleIndex())
|
||||
la._LOOK(t.getTarget(), stopState, newContext, look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
|
||||
} else if t2, ok := t.(*AbstractPredicateTransition); ok {
|
||||
if ( seeThruPreds ) {
|
||||
la._LOOK(t2.target, stopState, ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
|
||||
la._LOOK(t2.getTarget(), stopState, ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
|
||||
} else {
|
||||
look.addOne(LL1AnalyzerHIT_PRED)
|
||||
}
|
||||
} else if ( t.isEpsilon() ) {
|
||||
la._LOOK(t.target, stopState, ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
|
||||
} else if ( t.getIsEpsilon() ) {
|
||||
la._LOOK(t.getTarget(), stopState, ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
|
||||
} else if _, ok := t.(*WildcardTransition); ok {
|
||||
look.addRange( TokenMinUserTokenType, la.atn.maxTokenType );
|
||||
} else {
|
||||
set := t.label
|
||||
set := t.getLabel()
|
||||
if (set != nil) {
|
||||
if _, ok := t.(*NotSetTransition); ok {
|
||||
set = set.complement(TokenMinUserTokenType, la.atn.maxTokenType);
|
||||
|
|
|
@ -17,13 +17,15 @@ type TokenSource interface {
|
|||
getCharPositionInLine() int
|
||||
getInputStream() *InputStream
|
||||
getSourceName() string
|
||||
setTokenFactory(factory *TokenFactory)
|
||||
getTokenFactory() *TokenFactory
|
||||
setTokenFactory(factory TokenFactory)
|
||||
getTokenFactory() TokenFactory
|
||||
}
|
||||
|
||||
type TokenFactorySourcePair struct {
|
||||
factory *TokenFactory
|
||||
inputStream *InputStream
|
||||
type ILexer interface {
|
||||
TokenSource
|
||||
|
||||
setChannel(int)
|
||||
pushMode(int)
|
||||
}
|
||||
|
||||
type Lexer struct {
|
||||
|
@ -31,7 +33,7 @@ type Lexer struct {
|
|||
|
||||
_input *InputStream
|
||||
_factory *TokenFactory
|
||||
_tokenFactorySourcePair *TokenFactorySourcePair
|
||||
_tokenFactorySourcePair *TokenSourceInputStreamPair
|
||||
_interp *LexerATNSimulator
|
||||
_token *Token
|
||||
_tokenStartCharIndex int
|
||||
|
|
|
@ -335,7 +335,7 @@ func (this *LexerATNSimulator) accept(input *InputStream, lexerActionExecutor *L
|
|||
}
|
||||
}
|
||||
|
||||
func (this *LexerATNSimulator) getReachableTarget(trans *Transition, t int) *ATNState {
|
||||
func (this *LexerATNSimulator) getReachableTarget(trans ITransition, t int) IATNState {
|
||||
if (trans.matches(t, 0, 0xFFFE)) {
|
||||
return trans.target
|
||||
} else {
|
||||
|
@ -343,7 +343,7 @@ func (this *LexerATNSimulator) getReachableTarget(trans *Transition, t int) *ATN
|
|||
}
|
||||
}
|
||||
|
||||
func (this *LexerATNSimulator) computeStartState(input *InputStream, p *ATNState ) *OrderedATNConfigSet {
|
||||
func (this *LexerATNSimulator) computeStartState(input *InputStream, p IATNState ) *OrderedATNConfigSet {
|
||||
|
||||
var configs = NewOrderedATNConfigSet()
|
||||
for i := 0; i < len(p.transitions); i++ {
|
||||
|
@ -417,7 +417,7 @@ func (this *LexerATNSimulator) closure(input *InputStream, config *LexerATNConfi
|
|||
}
|
||||
|
||||
// side-effect: can alter configs.hasSemanticContext
|
||||
func (this *LexerATNSimulator) getEpsilonTarget(input *InputStream, config *LexerATNConfig, trans *Transition,
|
||||
func (this *LexerATNSimulator) getEpsilonTarget(input *InputStream, config *LexerATNConfig, trans ITransition,
|
||||
configs *ATNConfigSet, speculative, treatEofAsEpsilon bool) *LexerATNConfig {
|
||||
|
||||
var cfg *LexerATNConfig
|
||||
|
|
|
@ -10,6 +10,15 @@ const (
|
|||
LexerActionTypeSKIP = 6 //The type of a {@link LexerSkipAction} action.
|
||||
LexerActionTypeTYPE = 7 //The type of a {@link LexerTypeAction} action.
|
||||
)
|
||||
|
||||
type ILexerAction interface {
|
||||
getActionType() int
|
||||
getIsPositionDependent() bool
|
||||
execute(lexer ILexer)
|
||||
hashString() string
|
||||
equals(other ILexerAction) bool
|
||||
}
|
||||
|
||||
type LexerAction struct {
|
||||
actionType int
|
||||
isPositionDependent bool
|
||||
|
@ -26,15 +35,23 @@ func (la *LexerAction) InitLexerAction(action int){
|
|||
la.isPositionDependent = false
|
||||
}
|
||||
|
||||
func (this *LexerAction) execute(lexer *Lexer) {
|
||||
func (this *LexerAction) execute(lexer ILexer) {
|
||||
panic("Not implemented")
|
||||
}
|
||||
|
||||
func (this *LexerAction) getActionType() int {
|
||||
return this.actionType
|
||||
}
|
||||
|
||||
func (this *LexerAction) getIsPositionDependent() bool {
|
||||
return this.isPositionDependent
|
||||
}
|
||||
|
||||
func (this *LexerAction) hashString() string {
|
||||
return "" + this.actionType
|
||||
}
|
||||
|
||||
func (this *LexerAction) equals(other *LexerAction) {
|
||||
func (this *LexerAction) equals(other ILexerAction) bool {
|
||||
return this == other
|
||||
}
|
||||
|
||||
|
@ -56,7 +73,7 @@ func NewLexerSkipAction() *LexerSkipAction {
|
|||
// Provides a singleton instance of this parameterless lexer action.
|
||||
var LexerSkipActionINSTANCE = NewLexerSkipAction()
|
||||
|
||||
func (this *LexerSkipAction) execute(lexer *Lexer) {
|
||||
func (this *LexerSkipAction) execute(lexer ILexer) {
|
||||
lexer.skip()
|
||||
}
|
||||
|
||||
|
@ -79,7 +96,7 @@ func NewLexerTypeAction(_type int) *LexerTypeAction {
|
|||
return this
|
||||
}
|
||||
|
||||
func (this *LexerTypeAction) execute(lexer *Lexer) {
|
||||
func (this *LexerTypeAction) execute(lexer ILexer) {
|
||||
lexer._type = this._type
|
||||
}
|
||||
|
||||
|
@ -88,7 +105,7 @@ func (this *LexerTypeAction) hashString() string {
|
|||
}
|
||||
|
||||
|
||||
func (this *LexerTypeAction) equals(other *LexerAction) bool {
|
||||
func (this *LexerTypeAction) equals(other ILexerAction) bool {
|
||||
if(this == other) {
|
||||
return true
|
||||
} else if _, ok := other.(*LexerTypeAction); !ok {
|
||||
|
@ -121,7 +138,7 @@ func NewLexerPushModeAction(mode int) *LexerPushModeAction {
|
|||
|
||||
// <p>This action is implemented by calling {@link Lexer//pushMode} with the
|
||||
// value provided by {@link //getMode}.</p>
|
||||
func (this *LexerPushModeAction) execute(lexer *Lexer) {
|
||||
func (this *LexerPushModeAction) execute(lexer ILexer) {
|
||||
lexer.pushMode(this.mode)
|
||||
}
|
||||
|
||||
|
@ -129,7 +146,7 @@ func (this *LexerPushModeAction) hashString() string {
|
|||
return "" + this.actionType + this.mode
|
||||
}
|
||||
|
||||
func (this *LexerPushModeAction) equals(other *LexerAction) bool {
|
||||
func (this *LexerPushModeAction) equals(other ILexerAction) bool {
|
||||
if (this == other) {
|
||||
return true
|
||||
} else if _, ok := other.(*LexerPushModeAction); !ok {
|
||||
|
@ -143,7 +160,6 @@ func (this *LexerPushModeAction) toString() string {
|
|||
return "pushMode(" + this.mode + ")"
|
||||
}
|
||||
|
||||
|
||||
// Implements the {@code popMode} lexer action by calling {@link Lexer//popMode}.
|
||||
//
|
||||
// <p>The {@code popMode} command does not have any parameters, so this action is
|
||||
|
@ -164,7 +180,7 @@ func NewLexerPopModeAction() *LexerPopModeAction {
|
|||
var LexerPopModeActionINSTANCE = NewLexerPopModeAction()
|
||||
|
||||
// <p>This action is implemented by calling {@link Lexer//popMode}.</p>
|
||||
func (this *LexerPopModeAction) execute(lexer *Lexer) {
|
||||
func (this *LexerPopModeAction) execute(lexer ILexer) {
|
||||
lexer.popMode()
|
||||
}
|
||||
|
||||
|
@ -191,7 +207,7 @@ func NewLexerMoreAction() *LexerMoreAction {
|
|||
var LexerMoreActionINSTANCE = NewLexerMoreAction()
|
||||
|
||||
// <p>This action is implemented by calling {@link Lexer//popMode}.</p>
|
||||
func (this *LexerMoreAction) execute(lexer *Lexer) {
|
||||
func (this *LexerMoreAction) execute(lexer ILexer) {
|
||||
lexer.more()
|
||||
}
|
||||
|
||||
|
@ -217,7 +233,7 @@ func NewLexerModeAction(mode int) *LexerModeAction {
|
|||
|
||||
// <p>This action is implemented by calling {@link Lexer//mode} with the
|
||||
// value provided by {@link //getMode}.</p>
|
||||
func (this *LexerModeAction) execute(lexer *Lexer) {
|
||||
func (this *LexerModeAction) execute(lexer ILexer) {
|
||||
lexer.mode(this.mode)
|
||||
}
|
||||
|
||||
|
@ -225,7 +241,7 @@ func (this *LexerModeAction) hashString() string {
|
|||
return "" + this.actionType + this.mode
|
||||
}
|
||||
|
||||
func (this *LexerModeAction) equals(other *LexerAction) bool {
|
||||
func (this *LexerModeAction) equals(other ILexerAction) bool {
|
||||
if (this == other) {
|
||||
return true
|
||||
} else if _, ok := other.(*LexerModeAction); !ok {
|
||||
|
@ -272,7 +288,7 @@ func NewLexerCustomAction(ruleIndex, actionIndex int) *LexerCustomAction {
|
|||
|
||||
// <p>Custom actions are implemented by calling {@link Lexer//action} with the
|
||||
// appropriate rule and action indexes.</p>
|
||||
func (this *LexerCustomAction) execute(lexer *Lexer) {
|
||||
func (this *LexerCustomAction) execute(lexer ILexer) {
|
||||
lexer.action(nil, this.ruleIndex, this.actionIndex)
|
||||
}
|
||||
|
||||
|
@ -280,7 +296,7 @@ func (this *LexerCustomAction) hashString() string {
|
|||
return "" + this.actionType + this.ruleIndex + this.actionIndex
|
||||
}
|
||||
|
||||
func (this *LexerCustomAction) equals(other *LexerAction) bool {
|
||||
func (this *LexerCustomAction) equals(other ILexerAction) bool {
|
||||
if (this == other) {
|
||||
return true
|
||||
} else if _, ok := other.(*LexerCustomAction); !ok {
|
||||
|
@ -307,20 +323,17 @@ func NewLexerChannelAction(channel int) *LexerChannelAction {
|
|||
return this
|
||||
}
|
||||
|
||||
//LexerChannelAction.prototype = Object.create(LexerAction.prototype)
|
||||
//LexerChannelAction.prototype.constructor = LexerChannelAction
|
||||
|
||||
// <p>This action is implemented by calling {@link Lexer//setChannel} with the
|
||||
// value provided by {@link //getChannel}.</p>
|
||||
func (this *LexerChannelAction) execute(lexer *Lexer) {
|
||||
lexer._channel = this.channel
|
||||
func (this *LexerChannelAction) execute(lexer ILexer) {
|
||||
lexer.setChannel(this.channel)
|
||||
}
|
||||
|
||||
func (this *LexerChannelAction) hashString() string {
|
||||
return "" + this.actionType + this.channel
|
||||
}
|
||||
|
||||
func (this *LexerChannelAction) equals(other *LexerAction) bool {
|
||||
func (this *LexerChannelAction) equals(other ILexerAction) bool {
|
||||
if (this == other) {
|
||||
return true
|
||||
} else if _, ok := other.(*LexerChannelAction); !ok {
|
||||
|
@ -358,11 +371,11 @@ type LexerIndexedCustomAction struct {
|
|||
LexerAction
|
||||
|
||||
offset int
|
||||
action *LexerAction
|
||||
action ILexerAction
|
||||
isPositionDependent bool
|
||||
}
|
||||
|
||||
func NewLexerIndexedCustomAction(offset int, action *LexerAction) *LexerIndexedCustomAction {
|
||||
func NewLexerIndexedCustomAction(offset int, action ILexerAction) *LexerIndexedCustomAction {
|
||||
|
||||
this := new(LexerIndexedCustomAction)
|
||||
this.InitLexerAction( action.actionType )
|
||||
|
@ -376,7 +389,7 @@ func NewLexerIndexedCustomAction(offset int, action *LexerAction) *LexerIndexedC
|
|||
|
||||
// <p>This method calls {@link //execute} on the result of {@link //getAction}
|
||||
// using the provided {@code lexer}.</p>
|
||||
func (this *LexerIndexedCustomAction) execute(lexer *Lexer) {
|
||||
func (this *LexerIndexedCustomAction) execute(lexer ILexer) {
|
||||
// assume the input stream position was properly set by the calling code
|
||||
this.action.execute(lexer)
|
||||
}
|
||||
|
@ -385,7 +398,7 @@ func (this *LexerIndexedCustomAction) hashString() string {
|
|||
return "" + this.actionType + this.offset + this.action
|
||||
}
|
||||
|
||||
func (this *LexerIndexedCustomAction) equals(other *LexerAction) bool {
|
||||
func (this *LexerIndexedCustomAction) equals(other ILexerAction) bool {
|
||||
if (this == other) {
|
||||
return true
|
||||
} else if _, ok := other.(*LexerIndexedCustomAction); !ok {
|
||||
|
|
|
@ -9,7 +9,7 @@ package antlr4
|
|||
|
||||
type LexerActionExecutor struct {
|
||||
lexerActions []*LexerAction
|
||||
hashString string
|
||||
cachedHashString string
|
||||
}
|
||||
|
||||
func NewLexerActionExecutor(lexerActions []*LexerAction) *LexerActionExecutor {
|
||||
|
@ -30,7 +30,7 @@ func NewLexerActionExecutor(lexerActions []*LexerAction) *LexerActionExecutor {
|
|||
s += a.hashString()
|
||||
}
|
||||
|
||||
this.hashString = s // "".join([str(la) for la in
|
||||
this.cachedHashString = s // "".join([str(la) for la in
|
||||
|
||||
return this
|
||||
}
|
||||
|
|
|
@ -27,14 +27,13 @@ func (this *TraceListener) exitEveryRule(ctx *ParserRuleContext) {
|
|||
fmt.Println("exit " + this.parser.getRuleNames()[ctx.ruleIndex] + ", LT(1)=" + this.parser._input.LT(1).text())
|
||||
}
|
||||
|
||||
|
||||
type Parser struct {
|
||||
Recognizer
|
||||
|
||||
_input TokenStream
|
||||
_errHandler ErrorStrategy
|
||||
_precedenceStack IntStack
|
||||
_ctx ParserRuleContext
|
||||
_ctx *ParserRuleContext
|
||||
buildParseTrees bool
|
||||
_tracer bool
|
||||
_parseListeners []*ParseTreeListener
|
||||
|
@ -276,12 +275,12 @@ func (this *Parser) getATN() *ATN {
|
|||
return this._interp.atn
|
||||
}
|
||||
|
||||
func (p *Parser) getTokenFactory() *TokenFactory {
|
||||
func (p *Parser) getTokenFactory() TokenFactory {
|
||||
return (*p._input.getTokenSource()).getTokenFactory()
|
||||
}
|
||||
|
||||
// Tell our token source and error strategy about a Newway to create tokens.//
|
||||
func (p *Parser) setTokenFactory(factory *TokenFactory) {
|
||||
func (p *Parser) setTokenFactory(factory TokenFactory) {
|
||||
(*p._input.getTokenSource()).setTokenFactory( factory )
|
||||
}
|
||||
|
||||
|
@ -321,11 +320,11 @@ func (p *Parser) getATNWithBypassAlts() {
|
|||
// String id = m.get("ID")
|
||||
// </pre>
|
||||
|
||||
func (p *Parser) compileParseTreePattern(pattern, patternRuleIndex, lexer *Lexer) {
|
||||
func (p *Parser) compileParseTreePattern(pattern, patternRuleIndex, lexer ILexer) {
|
||||
|
||||
if (lexer == nil) {
|
||||
if (p.getTokenStream() != nil) {
|
||||
var tokenSource = (*p.getTokenStream()).getTokenSource()
|
||||
var tokenSource = p.getTokenStream().getTokenSource()
|
||||
if _, ok := tokenSource.(Lexer); ok {
|
||||
lexer = tokenSource
|
||||
}
|
||||
|
@ -341,19 +340,19 @@ func (p *Parser) compileParseTreePattern(pattern, patternRuleIndex, lexer *Lexer
|
|||
}
|
||||
|
||||
func (p *Parser) getInputStream() *InputStream {
|
||||
return p.getTokenStream()
|
||||
return p.getTokenStream().(*InputStream)
|
||||
}
|
||||
|
||||
func (p *Parser) setInputStream(input *TokenStream) {
|
||||
func (p *Parser) setInputStream(input TokenStream) {
|
||||
p.setTokenStream(input)
|
||||
}
|
||||
|
||||
func (p *Parser) getTokenStream() *TokenStream {
|
||||
func (p *Parser) getTokenStream() TokenStream {
|
||||
return p._input
|
||||
}
|
||||
|
||||
// Set the token stream and reset the parser.//
|
||||
func (p *Parser) setTokenStream(input *TokenStream) {
|
||||
func (p *Parser) setTokenStream(input TokenStream) {
|
||||
p._input = nil
|
||||
p.reset()
|
||||
p._input = input
|
||||
|
@ -366,7 +365,7 @@ func (p *Parser) getCurrentToken() *Token {
|
|||
return p._input.LT(1)
|
||||
}
|
||||
|
||||
func (p *Parser) notifyErrorListeners(msg string, offendingToken *Token, err *RecognitionException) {
|
||||
func (p *Parser) notifyErrorListeners(msg string, offendingToken *Token, err IRecognitionException) {
|
||||
offendingToken = offendingToken || nil
|
||||
err = err || nil
|
||||
if (offendingToken == nil) {
|
||||
|
@ -566,7 +565,7 @@ func (p *Parser) inContext(context *ParserRuleContext) bool {
|
|||
// @return {@code true} if {@code symbol} can follow the current state in
|
||||
// the ATN, otherwise {@code false}.
|
||||
|
||||
func (p *Parser) isExpectedToken(symbol *Token) bool {
|
||||
func (p *Parser) isExpectedToken(symbol int) bool {
|
||||
var atn *ATN = p._interp.atn
|
||||
var ctx = p._ctx
|
||||
var s = atn.states[p.state]
|
||||
|
@ -579,7 +578,7 @@ func (p *Parser) isExpectedToken(symbol *Token) bool {
|
|||
}
|
||||
for (ctx != nil && ctx.invokingState >= 0 && following.contains(TokenEpsilon)) {
|
||||
var invokingState = atn.states[ctx.invokingState]
|
||||
var rt = invokingState.transitions[0]
|
||||
var rt = invokingState.getTransitions()[0]
|
||||
following = atn.nextTokens(rt.(*RuleTransition).followState,nil)
|
||||
if (following.contains(symbol)) {
|
||||
return true
|
||||
|
|
|
@ -250,7 +250,7 @@ func (this *ParserATNSimulator) execATN(dfa *DFA, s0 *DFAState, input TokenStrea
|
|||
input.seek(startIndex)
|
||||
var alts = this.evalSemanticContext(D.predicates, outerContext, true)
|
||||
if (len(alts)==0) {
|
||||
panic this.noViableAlt(input, outerContext, D.configs, startIndex)
|
||||
panic(this.noViableAlt(input, outerContext, D.configs, startIndex))
|
||||
} else if (len(alts)==1) {
|
||||
return alts.minValue()
|
||||
} else {
|
||||
|
@ -650,7 +650,7 @@ func (this *ParserATNSimulator) removeAllConfigsNotInRuleStopState(configs *ATNC
|
|||
return result
|
||||
}
|
||||
|
||||
func (this *ParserATNSimulator) computeStartState(p *ATNState, ctx *RuleContext, fullCtx bool) *ATNConfigSet {
|
||||
func (this *ParserATNSimulator) computeStartState(p IATNState, ctx *RuleContext, fullCtx bool) *ATNConfigSet {
|
||||
// always at least the implicit call to start rule
|
||||
var initialContext = predictionContextFromRuleContext(this.atn, ctx)
|
||||
var configs = NewATNConfigSet(fullCtx)
|
||||
|
@ -721,7 +721,7 @@ func (this *ParserATNSimulator) computeStartState(p *ATNState, ctx *RuleContext,
|
|||
//
|
||||
func (this *ParserATNSimulator) applyPrecedenceFilter(configs *ATNConfigSet) *ATNConfigSet {
|
||||
|
||||
var statesFromAlt1 = map[int]*PredictionContext
|
||||
var statesFromAlt1 = make(map[int]IPredictionContext)
|
||||
var configSet = NewATNConfigSet(configs.fullCtx)
|
||||
|
||||
for i:=0; i<len(configs.configs); i++ {
|
||||
|
@ -763,7 +763,7 @@ func (this *ParserATNSimulator) applyPrecedenceFilter(configs *ATNConfigSet) *AT
|
|||
return configSet
|
||||
}
|
||||
|
||||
func (this *ParserATNSimulator) getReachableTarget(trans *Transition, ttype int) *ATNState {
|
||||
func (this *ParserATNSimulator) getReachableTarget(trans ITransition, ttype int) IATNState {
|
||||
if (trans.matches(ttype, 0, this.atn.maxTokenType)) {
|
||||
return trans.target
|
||||
} else {
|
||||
|
@ -771,9 +771,9 @@ func (this *ParserATNSimulator) getReachableTarget(trans *Transition, ttype int)
|
|||
}
|
||||
}
|
||||
|
||||
func (this *ParserATNSimulator) getPredsForAmbigAlts(ambigAlts *BitSet, configs *ATNConfigSet, nalts int) []*SemanticContext {
|
||||
func (this *ParserATNSimulator) getPredsForAmbigAlts(ambigAlts *BitSet, configs *ATNConfigSet, nalts int) []SemanticContext {
|
||||
|
||||
var altToPred = make([]*SemanticContext, nalts + 1)
|
||||
var altToPred = make([]SemanticContext, nalts + 1)
|
||||
for i:=0; i<len(configs.configs); i++ {
|
||||
var c = configs.configs[i]
|
||||
if(ambigAlts.contains( c.alt )) {
|
||||
|
@ -929,7 +929,7 @@ func (this *ParserATNSimulator) splitAccordingToSemanticValidity( configs *ATNCo
|
|||
succeeded.add(c,nil)
|
||||
}
|
||||
}
|
||||
return [succeeded, failed]
|
||||
return []*ATNConfigSet{succeeded, failed}
|
||||
}
|
||||
|
||||
// Look through a list of predicate/alt pairs, returning alts for the
|
||||
|
@ -983,7 +983,7 @@ func (this *ParserATNSimulator) closure(config *ATNConfig, configs *ATNConfigSet
|
|||
func (this *ParserATNSimulator) closureCheckingStopState(config *ATNConfig, configs *ATNConfigSet, closureBusy Set, collectPredicates, fullCtx bool, depth int, treatEofAsEpsilon bool) {
|
||||
|
||||
if (ParserATNSimulatorprototypedebug) {
|
||||
fmt.Println("closure(" + config.toString(this.parser,true) + ")")
|
||||
fmt.Println("closure(" + config.toString() + ")") //config.toString(this.parser,true) + ")")
|
||||
fmt.Println("configs(" + configs.toString() + ")")
|
||||
if(config.reachesIntoOuterContext>50) {
|
||||
panic("problem")
|
||||
|
@ -1099,7 +1099,7 @@ func (this *ParserATNSimulator) getRuleName( index int ) string {
|
|||
}
|
||||
}
|
||||
|
||||
func (this *ParserATNSimulator) getEpsilonTarget(config *ATNConfig, t *Transition, collectPredicates, inContext, fullCtx, treatEofAsEpsilon bool) *ATNConfig {
|
||||
func (this *ParserATNSimulator) getEpsilonTarget(config *ATNConfig, t ITransition, collectPredicates, inContext, fullCtx, treatEofAsEpsilon bool) *ATNConfig {
|
||||
|
||||
switch(t.serializationType) {
|
||||
case TransitionRULE:
|
||||
|
@ -1160,7 +1160,7 @@ func (this *ParserATNSimulator) precedenceTransition(config *ATNConfig,
|
|||
if (this.parser!=nil) {
|
||||
fmt.Println("context surrounding pred is " + fmt.Sprint(this.parser.getRuleInvocationStack()))
|
||||
}
|
||||
}``
|
||||
}
|
||||
var c *ATNConfig = nil
|
||||
if (collectPredicates && inContext) {
|
||||
if (fullCtx) {
|
||||
|
@ -1388,7 +1388,7 @@ func (this *ParserATNSimulator) addDFAEdge(dfa *DFA, from_ *DFAState, t int, to
|
|||
return to
|
||||
}
|
||||
if (from_.edges==nil) {
|
||||
from_.edges = []
|
||||
from_.edges = make([]*DFAState)
|
||||
}
|
||||
from_.edges[t+1] = to // connect
|
||||
|
||||
|
|
|
@ -29,12 +29,13 @@ import (
|
|||
|
||||
|
||||
type ParserRuleContext struct {
|
||||
RuleContext
|
||||
*RuleContext
|
||||
|
||||
parentCtx *ParserRuleContext
|
||||
ruleIndex int
|
||||
children []RuleContext
|
||||
start, stop *Token
|
||||
exception *RecognitionException
|
||||
exception IRecognitionException
|
||||
}
|
||||
|
||||
func NewParserRuleContext(parent *ParserRuleContext, invokingStateNumber int) *ParserRuleContext {
|
||||
|
@ -119,13 +120,17 @@ func (prc *ParserRuleContext) addErrorNode(badToken *Token) *ErrorNodeImpl {
|
|||
return node
|
||||
}
|
||||
|
||||
func (prc *ParserRuleContext) getChild(i int, childType reflect.Type) {
|
||||
func (prc *ParserRuleContext) getChild(i int) Tree {
|
||||
if (prc.children != nil && len(prc.children) >= i){
|
||||
return prc.children[i]
|
||||
} else {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
func (prc *ParserRuleContext) getChildOfType(i int, childType reflect.Type) IRuleContext {
|
||||
if (childType == nil) {
|
||||
if (prc.children != nil && len(prc.children) >= i){
|
||||
return prc.children[i]
|
||||
} else {
|
||||
return nil
|
||||
}
|
||||
return prc.getChild(i)
|
||||
} else {
|
||||
for j :=0; j<len(prc.children); j++ {
|
||||
var child = prc.children[j]
|
||||
|
@ -197,7 +202,7 @@ func (prc *ParserRuleContext) getTypedRuleContexts(ctxType reflect.Type) []*inte
|
|||
// }
|
||||
}
|
||||
|
||||
func (prc *ParserRuleContext) getChildCount() {
|
||||
func (prc *ParserRuleContext) getChildCount() int {
|
||||
if (prc.children== nil) {
|
||||
return 0
|
||||
} else {
|
||||
|
@ -205,7 +210,7 @@ func (prc *ParserRuleContext) getChildCount() {
|
|||
}
|
||||
}
|
||||
|
||||
func (prc *ParserRuleContext) getSourceInterval() {
|
||||
func (prc *ParserRuleContext) getSourceInterval() *Interval {
|
||||
if( prc.start == nil || prc.stop == nil) {
|
||||
return TreeINVALID_INTERVAL
|
||||
} else {
|
||||
|
@ -213,7 +218,7 @@ func (prc *ParserRuleContext) getSourceInterval() {
|
|||
}
|
||||
}
|
||||
|
||||
var RuleContextEMPTY = NewParserRuleContext(nil, nil)
|
||||
var RuleContextEMPTY = NewParserRuleContext(nil, -1)
|
||||
|
||||
|
||||
type InterpreterRuleContext struct {
|
||||
|
|
|
@ -4,6 +4,15 @@ import (
|
|||
"fmt"
|
||||
)
|
||||
|
||||
type IPredictionContext interface {
|
||||
hashString() string
|
||||
getParent(int) IPredictionContext
|
||||
getReturnState(int) int
|
||||
equals(IPredictionContext) bool
|
||||
length() int
|
||||
isEmpty() bool
|
||||
}
|
||||
|
||||
type PredictionContext struct {
|
||||
cachedHashString string
|
||||
}
|
||||
|
@ -23,7 +32,7 @@ const (
|
|||
PredictionContextEMPTY_RETURN_STATE = 0x7FFFFFFF
|
||||
)
|
||||
|
||||
var PredictionContextEMPTY *PredictionContext = nil
|
||||
//var PredictionContextEMPTY *PredictionContext = nil
|
||||
|
||||
// Represents {@code $} in an array in full context mode, when {@code $}
|
||||
// doesn't mean wildcard: {@code $ + x = [$,x]}. Here,
|
||||
|
@ -67,7 +76,7 @@ func (this *PredictionContext) hasEmptyPath() {
|
|||
return this.getReturnState(this.length() - 1) == PredictionContextEMPTY_RETURN_STATE
|
||||
}
|
||||
|
||||
func (this *PredictionContext) hashString() {
|
||||
func (this *PredictionContext) hashString() string {
|
||||
return this.cachedHashString
|
||||
}
|
||||
|
||||
|
@ -79,7 +88,7 @@ func calculateEmptyHashString() string {
|
|||
return ""
|
||||
}
|
||||
|
||||
func (this *PredictionContext) getParent(index int) PredictionContext {
|
||||
func (this *PredictionContext) getParent(index int) IPredictionContext {
|
||||
panic("Not implemented")
|
||||
}
|
||||
|
||||
|
@ -96,12 +105,12 @@ func (this *PredictionContext) getReturnState(index int) int {
|
|||
// can be used for both lexers and parsers.
|
||||
|
||||
type PredictionContextCache struct {
|
||||
cache map[*PredictionContext]*PredictionContext
|
||||
cache map[IPredictionContext]IPredictionContext
|
||||
}
|
||||
|
||||
func NewPredictionContextCache() {
|
||||
t := new(PredictionContextCache)
|
||||
t.cache = make(map[*PredictionContext]*PredictionContext)
|
||||
t.cache = make(map[IPredictionContext]IPredictionContext)
|
||||
return t
|
||||
}
|
||||
|
||||
|
@ -109,11 +118,11 @@ func NewPredictionContextCache() {
|
|||
// return that one instead and do not add a Newcontext to the cache.
|
||||
// Protect shared cache from unsafe thread access.
|
||||
//
|
||||
func (this *PredictionContextCache) add(ctx *PredictionContext) {
|
||||
func (this *PredictionContextCache) add(ctx IPredictionContext) {
|
||||
if (ctx == PredictionContextEMPTY) {
|
||||
return PredictionContextEMPTY
|
||||
}
|
||||
var existing = this.cache[ctx] || nil
|
||||
var existing = this.cache[ctx]
|
||||
if (existing != nil) {
|
||||
return existing
|
||||
}
|
||||
|
@ -121,7 +130,7 @@ func (this *PredictionContextCache) add(ctx *PredictionContext) {
|
|||
return ctx
|
||||
}
|
||||
|
||||
func (this *PredictionContextCache) get(ctx *PredictionContext) {
|
||||
func (this *PredictionContextCache) get(ctx IPredictionContext) {
|
||||
return this.cache[ctx]
|
||||
}
|
||||
|
||||
|
@ -132,23 +141,23 @@ func (this *PredictionContextCache) length() int {
|
|||
|
||||
type SingletonPredictionContext struct {
|
||||
PredictionContext
|
||||
parentCtx *PredictionContext
|
||||
parentCtx IPredictionContext
|
||||
returnState int
|
||||
}
|
||||
|
||||
func NewSingletonPredictionContext(parent *PredictionContext, returnState int) {
|
||||
func NewSingletonPredictionContext(parent IPredictionContext, returnState int) {
|
||||
s := new(SingletonPredictionContext)
|
||||
|
||||
// var hashString string
|
||||
//
|
||||
// if (parent != nil){
|
||||
// hashString = calculateHashString(parent, returnState)
|
||||
// } else {
|
||||
// hashString = calculateEmptyHashString()
|
||||
// }
|
||||
// var hashString string
|
||||
//
|
||||
// if (parent != nil){
|
||||
// hashString = calculateHashString(parent, returnState)
|
||||
// } else {
|
||||
// hashString = calculateEmptyHashString()
|
||||
// }
|
||||
|
||||
panic("Must initializer parent predicition context")
|
||||
// PredictionContext.call(s, hashString)
|
||||
// PredictionContext.call(s, hashString)
|
||||
|
||||
s.parentCtx = parent
|
||||
s.returnState = returnState
|
||||
|
@ -156,7 +165,7 @@ func NewSingletonPredictionContext(parent *PredictionContext, returnState int) {
|
|||
return s
|
||||
}
|
||||
|
||||
func SingletonPredictionContextcreate(parent PredictionContext, returnState int) *SingletonPredictionContext {
|
||||
func SingletonPredictionContextcreate(parent IPredictionContext, returnState int) *SingletonPredictionContext {
|
||||
if (returnState == PredictionContextEMPTY_RETURN_STATE && parent == nil) {
|
||||
// someone can pass in the bits of an array ctx that mean $
|
||||
return PredictionContextEMPTY
|
||||
|
@ -169,7 +178,7 @@ func (this *SingletonPredictionContext) length() int {
|
|||
return 1
|
||||
}
|
||||
|
||||
func (this *SingletonPredictionContext) getParent(index int) PredictionContext {
|
||||
func (this *SingletonPredictionContext) getParent(index int) IPredictionContext {
|
||||
return this.parentCtx
|
||||
}
|
||||
|
||||
|
@ -177,7 +186,7 @@ func (this *SingletonPredictionContext) getReturnState(index int) int {
|
|||
return this.returnState
|
||||
}
|
||||
|
||||
func (this *SingletonPredictionContext) equals(other *PredictionContext) {
|
||||
func (this *SingletonPredictionContext) equals(other IPredictionContext) {
|
||||
if (this == other) {
|
||||
return true
|
||||
} else if _, ok := other.(*SingletonPredictionContext); !ok {
|
||||
|
@ -229,7 +238,7 @@ type EmptyPredictionContext struct {
|
|||
func NewEmptyPredictionContext() *EmptyPredictionContext {
|
||||
|
||||
panic("Must init SingletonPredictionContext")
|
||||
// SingletonPredictionContext.call(this, nil, PredictionContextEMPTY_RETURN_STATE)
|
||||
// SingletonPredictionContext.call(this, nil, PredictionContextEMPTY_RETURN_STATE)
|
||||
|
||||
p := new(EmptyPredictionContext)
|
||||
return p
|
||||
|
@ -259,11 +268,11 @@ var PredictionContextEMPTY = NewEmptyPredictionContext()
|
|||
|
||||
type ArrayPredictionContext struct {
|
||||
PredictionContext
|
||||
parents []*PredictionContext
|
||||
parents []IPredictionContext
|
||||
returnStates []int
|
||||
}
|
||||
|
||||
func NewArrayPredictionContext(parents []*PredictionContext, returnStates []int) *ArrayPredictionContext {
|
||||
func NewArrayPredictionContext(parents []IPredictionContext, returnStates []int) *ArrayPredictionContext {
|
||||
// Parent can be nil only if full ctx mode and we make an array
|
||||
// from {@link //EMPTY} and non-empty. We merge {@link //EMPTY} by using
|
||||
// nil parent and
|
||||
|
@ -272,8 +281,8 @@ func NewArrayPredictionContext(parents []*PredictionContext, returnStates []int)
|
|||
c := new(ArrayPredictionContext)
|
||||
|
||||
panic("Must init PredictionContext")
|
||||
// var hash = calculateHashString(parents, returnStates)
|
||||
// PredictionContext.call(c, hash)
|
||||
// var hash = calculateHashString(parents, returnStates)
|
||||
// PredictionContext.call(c, hash)
|
||||
c.parents = parents
|
||||
c.returnStates = returnStates
|
||||
|
||||
|
@ -290,7 +299,7 @@ func (this *ArrayPredictionContext) length() int {
|
|||
return len(this.returnStates)
|
||||
}
|
||||
|
||||
func (this *ArrayPredictionContext) getParent(index int) *PredictionContext {
|
||||
func (this *ArrayPredictionContext) getParent(index int) IPredictionContext {
|
||||
return this.parents[index]
|
||||
}
|
||||
|
||||
|
@ -298,7 +307,7 @@ func (this *ArrayPredictionContext) getReturnState(index int) int {
|
|||
return this.returnStates[index]
|
||||
}
|
||||
|
||||
func (this *ArrayPredictionContext) equals(other *PredictionContext) {
|
||||
func (this *ArrayPredictionContext) equals(other IPredictionContext) {
|
||||
if (this == other) {
|
||||
return true
|
||||
} else if _, ok := other.(*ArrayPredictionContext); !ok {
|
||||
|
@ -338,7 +347,7 @@ func (this *ArrayPredictionContext) toString() string {
|
|||
// Convert a {@link RuleContext} tree to a {@link PredictionContext} graph.
|
||||
// Return {@link //EMPTY} if {@code outerContext} is empty or nil.
|
||||
// /
|
||||
func predictionContextFromRuleContext(a *ATN, outerContext *RuleContext) *PredictionContext {
|
||||
func predictionContextFromRuleContext(a *ATN, outerContext *RuleContext) IPredictionContext {
|
||||
if (outerContext == nil) {
|
||||
outerContext = RuleContextEMPTY
|
||||
}
|
||||
|
@ -350,9 +359,9 @@ func predictionContextFromRuleContext(a *ATN, outerContext *RuleContext) *Predic
|
|||
// If we have a parent, convert it to a PredictionContext graph
|
||||
var parent = predictionContextFromRuleContext(a, outerContext.parentCtx)
|
||||
var state = a.states[outerContext.invokingState]
|
||||
var transition = state.transitions[0]
|
||||
var transition = state.getTransitions()[0]
|
||||
|
||||
return SingletonPredictionContextcreate(parent, transition.followState.stateNumber)
|
||||
return SingletonPredictionContextcreate(parent, transition.(*RuleTransition).followState.getStateNumber())
|
||||
}
|
||||
|
||||
func calculateListsHashString(parents []PredictionContext, returnStates []int) {
|
||||
|
@ -369,7 +378,7 @@ func calculateListsHashString(parents []PredictionContext, returnStates []int) {
|
|||
return s
|
||||
}
|
||||
|
||||
func merge(a, b *PredictionContext, rootIsWildcard bool, mergeCache *DoubleDict) *PredictionContext {
|
||||
func merge(a, b IPredictionContext, rootIsWildcard bool, mergeCache *DoubleDict) IPredictionContext {
|
||||
// share same graph if both same
|
||||
if (a == b) {
|
||||
return a
|
||||
|
@ -393,10 +402,10 @@ func merge(a, b *PredictionContext, rootIsWildcard bool, mergeCache *DoubleDict)
|
|||
}
|
||||
// convert singleton so both are arrays to normalize
|
||||
if _, ok := a.(SingletonPredictionContext); ok {
|
||||
a = NewArrayPredictionContext([]*PredictionContext{ a.getParent(0) }, []int{ a.getReturnState(0) })
|
||||
a = NewArrayPredictionContext([]IPredictionContext{ a.getParent(0) }, []int{ a.getReturnState(0) })
|
||||
}
|
||||
if _, ok := b.(SingletonPredictionContext); ok {
|
||||
b = NewArrayPredictionContext( []*PredictionContext{ b.getParent(0) }, []int{ b.getReturnState(0) })
|
||||
b = NewArrayPredictionContext( []IPredictionContext{ b.getParent(0) }, []int{ b.getReturnState(0) })
|
||||
}
|
||||
return mergeArrays(a, b, rootIsWildcard, mergeCache)
|
||||
}
|
||||
|
@ -432,7 +441,7 @@ func merge(a, b *PredictionContext, rootIsWildcard bool, mergeCache *DoubleDict)
|
|||
// otherwise false to indicate a full-context merge
|
||||
// @param mergeCache
|
||||
// /
|
||||
func mergeSingletons(a, b *SingletonPredictionContext, rootIsWildcard bool, mergeCache *DoubleDict) *PredictionContext {
|
||||
func mergeSingletons(a, b *SingletonPredictionContext, rootIsWildcard bool, mergeCache *DoubleDict) IPredictionContext {
|
||||
if (mergeCache != nil) {
|
||||
var previous = mergeCache.get(a, b)
|
||||
if (previous != nil) {
|
||||
|
@ -472,10 +481,10 @@ func mergeSingletons(a, b *SingletonPredictionContext, rootIsWildcard bool, merg
|
|||
return spc
|
||||
} else { // a != b payloads differ
|
||||
// see if we can collapse parents due to $+x parents if local ctx
|
||||
var singleParent *PredictionContext = nil
|
||||
var singleParent IPredictionContext = nil
|
||||
if (a == b || (a.parentCtx != nil && a.parentCtx == b.parentCtx)) { // ax +
|
||||
// bx =
|
||||
// [a,b]x
|
||||
// bx =
|
||||
// [a,b]x
|
||||
singleParent = a.parentCtx
|
||||
}
|
||||
if (singleParent != nil) { // parents are same
|
||||
|
@ -485,7 +494,7 @@ func mergeSingletons(a, b *SingletonPredictionContext, rootIsWildcard bool, merg
|
|||
payloads[0] = b.returnState
|
||||
payloads[1] = a.returnState
|
||||
}
|
||||
var parents = []*PredictionContext{ singleParent, singleParent }
|
||||
var parents = []IPredictionContext{ singleParent, singleParent }
|
||||
var apc = NewArrayPredictionContext(parents, payloads)
|
||||
if (mergeCache != nil) {
|
||||
mergeCache.set(a, b, apc)
|
||||
|
@ -496,11 +505,11 @@ func mergeSingletons(a, b *SingletonPredictionContext, rootIsWildcard bool, merg
|
|||
// into array can't merge.
|
||||
// ax + by = [ax,by]
|
||||
var payloads = []int{ a.returnState, b.returnState }
|
||||
var parents = []*PredictionContext{ a.parentCtx, b.parentCtx }
|
||||
var parents = []IPredictionContext{ a.parentCtx, b.parentCtx }
|
||||
if (a.returnState > b.returnState) { // sort by payload
|
||||
payloads[0] = b.returnState
|
||||
payloads[1] = a.returnState
|
||||
parents = []*PredictionContext{ b.parentCtx, a.parentCtx }
|
||||
parents = []IPredictionContext{ b.parentCtx, a.parentCtx }
|
||||
}
|
||||
var a_ = NewArrayPredictionContext(parents, payloads)
|
||||
if (mergeCache != nil) {
|
||||
|
@ -548,7 +557,7 @@ func mergeSingletons(a, b *SingletonPredictionContext, rootIsWildcard bool, merg
|
|||
// @param rootIsWildcard {@code true} if this is a local-context merge,
|
||||
// otherwise false to indicate a full-context merge
|
||||
// /
|
||||
func mergeRoot(a, b *SingletonPredictionContext, rootIsWildcard bool) *PredictionContext {
|
||||
func mergeRoot(a, b *SingletonPredictionContext, rootIsWildcard bool) IPredictionContext {
|
||||
if (rootIsWildcard) {
|
||||
if (a == PredictionContextEMPTY) {
|
||||
return PredictionContextEMPTY // // + b =//
|
||||
|
@ -561,11 +570,11 @@ func mergeRoot(a, b *SingletonPredictionContext, rootIsWildcard bool) *Predictio
|
|||
return PredictionContextEMPTY // $ + $ = $
|
||||
} else if (a == PredictionContextEMPTY) { // $ + x = [$,x]
|
||||
var payloads = []int{ b.returnState, PredictionContextEMPTY_RETURN_STATE }
|
||||
var parents = []*PredictionContext{ b.parentCtx, nil }
|
||||
var parents = []IPredictionContext{ b.parentCtx, nil }
|
||||
return NewArrayPredictionContext(parents, payloads)
|
||||
} else if (b == PredictionContextEMPTY) { // x + $ = [$,x] ($ is always first if present)
|
||||
var payloads = []int{ a.returnState, PredictionContextEMPTY_RETURN_STATE }
|
||||
var parents = []*PredictionContext{ a.parentCtx, nil }
|
||||
var parents = []IPredictionContext{ a.parentCtx, nil }
|
||||
return NewArrayPredictionContext(parents, payloads)
|
||||
}
|
||||
}
|
||||
|
@ -592,7 +601,7 @@ func mergeRoot(a, b *SingletonPredictionContext, rootIsWildcard bool) *Predictio
|
|||
// {@link SingletonPredictionContext}.<br>
|
||||
// <embed src="images/ArrayMerge_EqualTop.svg" type="image/svg+xml"/></p>
|
||||
// /
|
||||
func mergeArrays(a, b *ArrayPredictionContext, rootIsWildcard bool, mergeCache *DoubleDict) *PredictionContext {
|
||||
func mergeArrays(a, b *ArrayPredictionContext, rootIsWildcard bool, mergeCache *DoubleDict) IPredictionContext {
|
||||
if (mergeCache != nil) {
|
||||
var previous = mergeCache.get(a, b)
|
||||
if (previous != nil) {
|
||||
|
@ -609,7 +618,7 @@ func mergeArrays(a, b *ArrayPredictionContext, rootIsWildcard bool, mergeCache *
|
|||
var k = 0 // walks target M array
|
||||
|
||||
var mergedReturnStates = make([]int,0)
|
||||
var mergedParents = make([]*PredictionContext,0)
|
||||
var mergedParents = make([]IPredictionContext,0)
|
||||
// walk and merge to yield mergedParents, mergedReturnStates
|
||||
for i < len(a.returnStates) && j < len(b.returnStates) {
|
||||
var a_parent = a.parents[i]
|
||||
|
@ -620,8 +629,8 @@ func mergeArrays(a, b *ArrayPredictionContext, rootIsWildcard bool, mergeCache *
|
|||
// $+$ = $
|
||||
var bothDollars = payload == PredictionContextEMPTY_RETURN_STATE && a_parent == nil && b_parent == nil
|
||||
var ax_ax = (a_parent != nil && b_parent != nil && a_parent == b_parent) // ax+ax
|
||||
// ->
|
||||
// ax
|
||||
// ->
|
||||
// ax
|
||||
if (bothDollars || ax_ax) {
|
||||
mergedParents[k] = a_parent // choose left
|
||||
mergedReturnStates[k] = payload
|
||||
|
@ -698,8 +707,8 @@ func mergeArrays(a, b *ArrayPredictionContext, rootIsWildcard bool, mergeCache *
|
|||
// Make pass over all <em>M</em> {@code parents} merge any {@code equals()}
|
||||
// ones.
|
||||
// /
|
||||
func combineCommonParents(parents []*PredictionContext) {
|
||||
var uniqueParents = map[*PredictionContext]*PredictionContext
|
||||
func combineCommonParents(parents []IPredictionContext) {
|
||||
var uniqueParents = make(map[IPredictionContext]IPredictionContext)
|
||||
|
||||
for p := 0; p < len(parents); p++ {
|
||||
var parent = parents[p]
|
||||
|
@ -712,56 +721,56 @@ func combineCommonParents(parents []*PredictionContext) {
|
|||
}
|
||||
}
|
||||
|
||||
func getCachedPredictionContext(context *PredictionContext, contextCache *PredictionContextCache, visited map[*PredictionContext]*PredictionContext) *PredictionContext {
|
||||
func getCachedPredictionContext(context IPredictionContext, contextCache *PredictionContextCache, visited map[IPredictionContext]IPredictionContext) IPredictionContext {
|
||||
|
||||
panic("getCachedPredictionContext not implemented")
|
||||
|
||||
return nil
|
||||
// if (context.isEmpty()) {
|
||||
// return context
|
||||
// }
|
||||
// var existing = visited[context] || nil
|
||||
// if (existing != nil) {
|
||||
// return existing
|
||||
// }
|
||||
// existing = contextCache.get(context)
|
||||
// if (existing != nil) {
|
||||
// visited[context] = existing
|
||||
// return existing
|
||||
// }
|
||||
// var changed = false
|
||||
// var parents = []
|
||||
// for i := 0; i < len(parents); i++ {
|
||||
// var parent = getCachedPredictionContext(context.getParent(i), contextCache, visited)
|
||||
// if (changed || parent != context.getParent(i)) {
|
||||
// if (!changed) {
|
||||
// parents = []
|
||||
// for j := 0; j < len(context); j++ {
|
||||
// parents[j] = context.getParent(j)
|
||||
// }
|
||||
// changed = true
|
||||
// }
|
||||
// parents[i] = parent
|
||||
// }
|
||||
// }
|
||||
// if (!changed) {
|
||||
// contextCache.add(context)
|
||||
// visited[context] = context
|
||||
// return context
|
||||
// }
|
||||
// var updated = nil
|
||||
// if (parents.length == 0) {
|
||||
// updated = PredictionContextEMPTY
|
||||
// } else if (parents.length == 1) {
|
||||
// updated = SingletonPredictionContext.create(parents[0], context.getReturnState(0))
|
||||
// } else {
|
||||
// updated = NewArrayPredictionContext(parents, context.returnStates)
|
||||
// }
|
||||
// contextCache.add(updated)
|
||||
// visited[updated] = updated
|
||||
// visited[context] = updated
|
||||
//
|
||||
// return updated
|
||||
// return context
|
||||
// }
|
||||
// var existing = visited[context] || nil
|
||||
// if (existing != nil) {
|
||||
// return existing
|
||||
// }
|
||||
// existing = contextCache.get(context)
|
||||
// if (existing != nil) {
|
||||
// visited[context] = existing
|
||||
// return existing
|
||||
// }
|
||||
// var changed = false
|
||||
// var parents = []
|
||||
// for i := 0; i < len(parents); i++ {
|
||||
// var parent = getCachedPredictionContext(context.getParent(i), contextCache, visited)
|
||||
// if (changed || parent != context.getParent(i)) {
|
||||
// if (!changed) {
|
||||
// parents = []
|
||||
// for j := 0; j < len(context); j++ {
|
||||
// parents[j] = context.getParent(j)
|
||||
// }
|
||||
// changed = true
|
||||
// }
|
||||
// parents[i] = parent
|
||||
// }
|
||||
// }
|
||||
// if (!changed) {
|
||||
// contextCache.add(context)
|
||||
// visited[context] = context
|
||||
// return context
|
||||
// }
|
||||
// var updated = nil
|
||||
// if (parents.length == 0) {
|
||||
// updated = PredictionContextEMPTY
|
||||
// } else if (parents.length == 1) {
|
||||
// updated = SingletonPredictionContext.create(parents[0], context.getReturnState(0))
|
||||
// } else {
|
||||
// updated = NewArrayPredictionContext(parents, context.returnStates)
|
||||
// }
|
||||
// contextCache.add(updated)
|
||||
// visited[updated] = updated
|
||||
// visited[context] = updated
|
||||
//
|
||||
// return updated
|
||||
}
|
||||
|
||||
// ter's recursive version of Sam's getAllNodes()
|
||||
|
|
|
@ -5,6 +5,11 @@ import (
|
|||
"strings"
|
||||
)
|
||||
|
||||
type IRecognizer interface {
|
||||
getState() int
|
||||
getATN() *ATN
|
||||
}
|
||||
|
||||
type Recognizer struct {
|
||||
|
||||
_listeners []ParseTreeListener
|
||||
|
@ -59,6 +64,10 @@ func (this *Recognizer) getATN() *ATN {
|
|||
return this._interp.atn
|
||||
}
|
||||
|
||||
func (this *Recognizer) getState() int {
|
||||
return this.state
|
||||
}
|
||||
|
||||
//func (this *Recognizer) getTokenTypeMap() {
|
||||
// var tokenNames = this.getTokenNames()
|
||||
// if (tokenNames==nil) {
|
||||
|
@ -171,7 +180,7 @@ func (this *Recognizer) getTokenErrorDisplay(t *Token) string {
|
|||
return "'" + s + "'"
|
||||
}
|
||||
|
||||
func (this *Recognizer) getErrorListenerDispatch() *ErrorListener {
|
||||
func (this *Recognizer) getErrorListenerDispatch() IErrorListener {
|
||||
return NewProxyErrorListener(this._listeners)
|
||||
}
|
||||
|
||||
|
|
|
@ -24,17 +24,26 @@ import (
|
|||
// @see ParserRuleContext
|
||||
//
|
||||
|
||||
type RuleContext struct {
|
||||
type IRuleContext interface {
|
||||
RuleNode
|
||||
parentCtx *RuleContext
|
||||
invokingState int
|
||||
ruleIndex int
|
||||
children []*RuleContext
|
||||
|
||||
getInvokingState()int
|
||||
getRuleIndex()int
|
||||
getChildren()[]IRuleContext
|
||||
isEmpty() bool
|
||||
}
|
||||
|
||||
func NewRuleContext(parent *RuleContext, invokingState int) *RuleContext {
|
||||
type RuleContext struct {
|
||||
parentCtx IRuleContext
|
||||
invokingState int
|
||||
ruleIndex int
|
||||
children []IRuleContext
|
||||
}
|
||||
|
||||
func NewRuleContext(parent IRuleContext, invokingState int) *RuleContext {
|
||||
|
||||
rn := new(RuleContext)
|
||||
|
||||
rn := &RuleContext{RuleNode{}}
|
||||
rn.InitRuleContext(parent, invokingState)
|
||||
|
||||
return rn
|
||||
|
@ -55,6 +64,22 @@ func (rn *RuleContext) InitRuleContext(parent *RuleContext, invokingState int) {
|
|||
}
|
||||
}
|
||||
|
||||
func (this *RuleContext) getParent() Tree {
|
||||
return this.parentCtx
|
||||
}
|
||||
|
||||
func (this *RuleContext) getInvokingState() int {
|
||||
return this.getInvokingState()
|
||||
}
|
||||
|
||||
func (this *RuleContext) getRuleIndex() int{
|
||||
return this.ruleIndex
|
||||
}
|
||||
|
||||
func (this *RuleContext) getChildren() []IRuleContext {
|
||||
return this.children
|
||||
}
|
||||
|
||||
func (this *RuleContext) depth() {
|
||||
var n = 0
|
||||
var p = this
|
||||
|
@ -77,11 +102,11 @@ func (this *RuleContext) getSourceInterval() *Interval {
|
|||
return TreeINVALID_INTERVAL
|
||||
}
|
||||
|
||||
func (this *RuleContext) getRuleContext() *RuleContext {
|
||||
func (this *RuleContext) getRuleContext() IRuleContext {
|
||||
return this
|
||||
}
|
||||
|
||||
func (this *RuleContext) getPayload() *RuleContext {
|
||||
func (this *RuleContext) getPayload() interface{} {
|
||||
return this
|
||||
}
|
||||
|
||||
|
@ -123,7 +148,7 @@ func (this *RuleContext) accept(visitor *ParseTreeVisitor) {
|
|||
// (root child1 .. childN). Print just a node if this is a leaf.
|
||||
//
|
||||
|
||||
func (this *RuleContext) toStringTree(ruleNames []string, recog *Recognizer) string {
|
||||
func (this *RuleContext) toStringTree(ruleNames []string, recog IRecognizer) string {
|
||||
return TreestoStringTree(this, ruleNames, recog)
|
||||
}
|
||||
|
||||
|
|
|
@ -12,13 +12,13 @@ import (
|
|||
//
|
||||
|
||||
type SemanticContext interface {
|
||||
evaluate(parser *Recognizer, outerContext *RuleContext) bool
|
||||
evalPrecedence(parser *Recognizer, outerContext *RuleContext) *SemanticContext
|
||||
evaluate(parser IRecognizer, outerContext *RuleContext) bool
|
||||
evalPrecedence(parser IRecognizer, outerContext *RuleContext) SemanticContext
|
||||
equals(interface{}) bool
|
||||
toString() string
|
||||
}
|
||||
|
||||
func SemanticContextandContext(a, b *SemanticContext) *SemanticContext {
|
||||
func SemanticContextandContext(a, b SemanticContext) SemanticContext {
|
||||
if (a == nil || a == SemanticContextNONE) {
|
||||
return b
|
||||
}
|
||||
|
@ -33,7 +33,7 @@ func SemanticContextandContext(a, b *SemanticContext) *SemanticContext {
|
|||
}
|
||||
}
|
||||
|
||||
func SemanticContextorContext(a, b *SemanticContext) *SemanticContext {
|
||||
func SemanticContextorContext(a, b SemanticContext) SemanticContext {
|
||||
if (a == nil) {
|
||||
return b
|
||||
}
|
||||
|
@ -43,7 +43,7 @@ func SemanticContextorContext(a, b *SemanticContext) *SemanticContext {
|
|||
if (a == SemanticContextNONE || b == SemanticContextNONE) {
|
||||
return SemanticContextNONE
|
||||
}
|
||||
var result = NewOR(a, b *SemanticContext)
|
||||
var result = NewOR(a, b)
|
||||
if ( len(result.opnds) == 1) {
|
||||
return result.opnds[0]
|
||||
} else {
|
||||
|
@ -51,6 +51,9 @@ func SemanticContextorContext(a, b *SemanticContext) *SemanticContext {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
type Predicate struct {
|
||||
ruleIndex int
|
||||
predIndex int
|
||||
|
@ -69,9 +72,9 @@ func NewPredicate(ruleIndex, predIndex int, isCtxDependent bool) *Predicate {
|
|||
//The default {@link SemanticContext}, which is semantically equivalent to
|
||||
//a predicate of the form {@code {true}?}.
|
||||
|
||||
var SemanticContextNONE = NewPredicate(-1,-1,false)
|
||||
var SemanticContextNONE SemanticContext = NewPredicate(-1,-1,false)
|
||||
|
||||
func (this *Predicate) evaluate(parser *Recognizer, outerContext *RuleContext) *SemanticContext {
|
||||
func (this *Predicate) evaluate(parser IRecognizer, outerContext *RuleContext) SemanticContext {
|
||||
|
||||
var localctx *RuleContext = nil
|
||||
|
||||
|
@ -116,11 +119,11 @@ func NewPrecedencePredicate(precedence int) *PrecedencePredicate {
|
|||
return this
|
||||
}
|
||||
|
||||
func (this *PrecedencePredicate) evaluate(parser *Recognizer, outerContext *RuleContext) *SemanticContext {
|
||||
func (this *PrecedencePredicate) evaluate(parser IRecognizer, outerContext *RuleContext) SemanticContext {
|
||||
return parser.precpred(outerContext, this.precedence)
|
||||
}
|
||||
|
||||
func (this *PrecedencePredicate) evalPrecedence(parser *Recognizer, outerContext *RuleContext) *SemanticContext {
|
||||
func (this *PrecedencePredicate) evalPrecedence(parser IRecognizer, outerContext *RuleContext) SemanticContext {
|
||||
if (parser.precpred(outerContext, this.precedence)) {
|
||||
return SemanticContextNONE
|
||||
} else {
|
||||
|
@ -170,10 +173,10 @@ func PrecedencePredicatefilterPrecedencePredicates(set *Set) []*PrecedencePredic
|
|||
type AND struct {
|
||||
SemanticContext
|
||||
|
||||
opnds []*SemanticContext
|
||||
opnds []SemanticContext
|
||||
}
|
||||
|
||||
func NewAND(a, b *SemanticContext) *AND {
|
||||
func NewAND(a, b SemanticContext) *AND {
|
||||
|
||||
var operands = NewSet(nil,nil)
|
||||
if aa, ok := a.(*AND); ok {
|
||||
|
@ -230,18 +233,18 @@ func (this *AND) hashString() {
|
|||
// The evaluation of predicates by this context is short-circuiting, but
|
||||
// unordered.</p>
|
||||
//
|
||||
func (this *AND) evaluate(parser *Recognizer, outerContext *RuleContext) *SemanticContext {
|
||||
func (this *AND) evaluate(parser IRecognizer, outerContext *RuleContext) SemanticContext {
|
||||
for i := 0; i < len(this.opnds); i++ {
|
||||
if (!this.opnds[i].evaluate(parser *Recognizer, outerContext *RuleContext)) {
|
||||
if (!this.opnds[i].evaluate(parser, outerContext)) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func (this *AND) evalPrecedence(parser *Recognizer, outerContext *RuleContext) *SemanticContext {
|
||||
func (this *AND) evalPrecedence(parser IRecognizer, outerContext *RuleContext) SemanticContext {
|
||||
var differs = false
|
||||
var operands = make([]*SemanticContext)
|
||||
var operands = make([]SemanticContext)
|
||||
|
||||
for i := 0; i < len(this.opnds); i++ {
|
||||
var context = this.opnds[i]
|
||||
|
@ -264,7 +267,7 @@ func (this *AND) evalPrecedence(parser *Recognizer, outerContext *RuleContext) *
|
|||
return SemanticContextNONE
|
||||
}
|
||||
|
||||
var result *SemanticContext = nil
|
||||
var result SemanticContext = nil
|
||||
|
||||
for _,o := range operands {
|
||||
if (result == nil){
|
||||
|
@ -299,10 +302,10 @@ func (this *AND) toString() string {
|
|||
type OR struct {
|
||||
SemanticContext
|
||||
|
||||
opnds []*SemanticContext
|
||||
opnds []SemanticContext
|
||||
}
|
||||
|
||||
func NewOR(a, b *SemanticContext) *OR {
|
||||
func NewOR(a, b SemanticContext) *OR {
|
||||
var operands = NewSet(nil,nil)
|
||||
if aa, ok := a.(*OR); ok {
|
||||
for _, o := range aa.opnds {
|
||||
|
@ -357,21 +360,21 @@ func (this *OR) hashString() {
|
|||
// The evaluation of predicates by this context is short-circuiting, but
|
||||
// unordered.</p>
|
||||
//
|
||||
func (this *OR) evaluate(parser *Recognizer, outerContext *RuleContext) *SemanticContext {
|
||||
func (this *OR) evaluate(parser IRecognizer, outerContext *RuleContext) SemanticContext {
|
||||
for i := 0; i < len(this.opnds); i++ {
|
||||
if (this.opnds[i].evaluate(parser *Recognizer, outerContext *RuleContext)) {
|
||||
if (this.opnds[i].evaluate(parser, outerContext)) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (this *OR) evalPrecedence(parser *Recognizer, outerContext *RuleContext) *SemanticContext {
|
||||
func (this *OR) evalPrecedence(parser IRecognizer, outerContext *RuleContext) SemanticContext {
|
||||
var differs = false
|
||||
var operands = make([]*SemanticContext)
|
||||
var operands = make([]SemanticContext)
|
||||
for i := 0; i < len(this.opnds); i++ {
|
||||
var context = this.opnds[i]
|
||||
var evaluated = context.evalPrecedence(parser *Recognizer, outerContext *RuleContext)
|
||||
var evaluated = context.evalPrecedence(parser, outerContext)
|
||||
differs |= (evaluated != context)
|
||||
if (evaluated == SemanticContextNONE) {
|
||||
// The OR context is true if any element is true
|
||||
|
@ -388,7 +391,7 @@ func (this *OR) evalPrecedence(parser *Recognizer, outerContext *RuleContext) *S
|
|||
// all elements were false, so the OR context is false
|
||||
return nil
|
||||
}
|
||||
var result *SemanticContext = nil
|
||||
var result SemanticContext = nil
|
||||
|
||||
for _,o := range operands {
|
||||
if (result == nil) {
|
||||
|
|
|
@ -5,7 +5,7 @@ import (
|
|||
)
|
||||
|
||||
type TokenSourceInputStreamPair struct {
|
||||
tokenSource *TokenSource
|
||||
tokenSource TokenSource
|
||||
inputStream *InputStream
|
||||
}
|
||||
|
||||
|
@ -65,7 +65,7 @@ func (this *Token) setText(s string) {
|
|||
this._text = s
|
||||
}
|
||||
|
||||
func (this *Token) getTokenSource() *TokenSource {
|
||||
func (this *Token) getTokenSource() TokenSource {
|
||||
return this.source.tokenSource
|
||||
}
|
||||
|
||||
|
@ -74,7 +74,7 @@ func (this *Token) getInputStream() *InputStream {
|
|||
}
|
||||
|
||||
type CommonToken struct {
|
||||
Token
|
||||
*Token
|
||||
}
|
||||
|
||||
func NewCommonToken(source *TokenSourceInputStreamPair, tokenType, channel, start, stop int) *CommonToken {
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
package antlr4
|
||||
import (
|
||||
"fmt"
|
||||
"fmt"
|
||||
)
|
||||
|
||||
// atom, set, epsilon, action, predicate, rule transitions.
|
||||
|
@ -12,34 +12,61 @@ import (
|
|||
// the states. We'll use the term Edge for the DFA to distinguish them from
|
||||
// ATN transitions.</p>
|
||||
|
||||
//func ([A-Z]+Transition)[ ]?\([A-Za-z, ]+\) \*([A-Z]+Transition) {\n\tTransition\.call\(t, target\)
|
||||
type ITransition interface {
|
||||
getTarget() IATNState
|
||||
setTarget(IATNState)
|
||||
getIsEpsilon() bool
|
||||
getLabel() *IntervalSet
|
||||
getSerializationType() int
|
||||
matches( int, int, int ) bool
|
||||
}
|
||||
|
||||
type Transition struct {
|
||||
target *ATNState
|
||||
target IATNState
|
||||
isEpsilon bool
|
||||
label *IntervalSet
|
||||
serializationType int
|
||||
}
|
||||
|
||||
func Transition (target *ATNState) *Transition {
|
||||
func NewTransition (target IATNState) *Transition {
|
||||
|
||||
if (target==nil || target==nil) {
|
||||
panic("target cannot be nil.")
|
||||
}
|
||||
panic("target cannot be nil.")
|
||||
}
|
||||
|
||||
t := new(Transition)
|
||||
t.InitTransition(target)
|
||||
|
||||
return t
|
||||
return t
|
||||
}
|
||||
|
||||
func (t *Transition) InitTransition(target *ATNState) {
|
||||
func (t *Transition) InitTransition(target IATNState) {
|
||||
t.target = target
|
||||
// Are we epsilon, action, sempred?
|
||||
t.isEpsilon = false
|
||||
t.label = nil
|
||||
}
|
||||
|
||||
func (t *Transition) getTarget() IATNState {
|
||||
return t.target
|
||||
}
|
||||
|
||||
func (t *Transition) setTarget(s IATNState) {
|
||||
t.target = s
|
||||
}
|
||||
|
||||
func (t *Transition) getIsEpsilon() bool {
|
||||
return t.isEpsilon
|
||||
}
|
||||
|
||||
func (t *Transition) getLabel() *IntervalSet {
|
||||
return t.label
|
||||
}
|
||||
|
||||
func (t *Transition) getSerializationType() int {
|
||||
return t.serializationType
|
||||
}
|
||||
|
||||
func (t *Transition) matches( symbol, minVocabSymbol, maxVocabSymbol int ) bool {
|
||||
panic("Not implemented")
|
||||
}
|
||||
|
@ -104,26 +131,26 @@ type AtomTransition struct {
|
|||
label *IntervalSet
|
||||
}
|
||||
|
||||
func NewAtomTransition ( target *ATNState, label int ) *AtomTransition {
|
||||
func NewAtomTransition ( target IATNState, label int ) *AtomTransition {
|
||||
|
||||
t := new(AtomTransition)
|
||||
t.InitTransition( target )
|
||||
|
||||
t.label_ = label // The token type or character value or, signifies special label.
|
||||
t.label = t.makeLabel()
|
||||
t.serializationType = TransitionATOM
|
||||
|
||||
return t
|
||||
t.label = t.makeLabel()
|
||||
t.serializationType = TransitionATOM
|
||||
|
||||
return t
|
||||
}
|
||||
|
||||
func (t *AtomTransition) makeLabel() *IntervalSet {
|
||||
var s = NewIntervalSet()
|
||||
s.addOne(t.label_)
|
||||
return s
|
||||
s.addOne(t.label_)
|
||||
return s
|
||||
}
|
||||
|
||||
func (t *AtomTransition) matches( symbol, minVocabSymbol, maxVocabSymbol int ) bool {
|
||||
return t.label_ == symbol
|
||||
return t.label_ == symbol
|
||||
}
|
||||
|
||||
func (t *AtomTransition) toString() string {
|
||||
|
@ -132,23 +159,24 @@ func (t *AtomTransition) toString() string {
|
|||
|
||||
type RuleTransition struct {
|
||||
Transition
|
||||
followState *ATNState
|
||||
|
||||
followState IATNState
|
||||
ruleIndex, precedence int
|
||||
|
||||
}
|
||||
|
||||
func NewRuleTransition ( ruleStart *ATNState, ruleIndex, precedence int, followState *RuleTransition ) *RuleTransition {
|
||||
func NewRuleTransition ( ruleStart IATNState, ruleIndex, precedence int, followState IATNState ) *RuleTransition {
|
||||
|
||||
t := new(RuleTransition)
|
||||
t.InitTransition( ruleStart )
|
||||
|
||||
t.ruleIndex = ruleIndex
|
||||
t.ruleIndex = ruleIndex
|
||||
t.precedence = precedence
|
||||
t.followState = followState
|
||||
t.followState = followState
|
||||
t.serializationType = TransitionRULE
|
||||
t.isEpsilon = true
|
||||
t.isEpsilon = true
|
||||
|
||||
return t
|
||||
return t
|
||||
}
|
||||
|
||||
|
||||
|
@ -164,19 +192,19 @@ type EpsilonTransition struct {
|
|||
outermostPrecedenceReturn int
|
||||
}
|
||||
|
||||
func NewEpsilonTransition ( target *ATNState, outermostPrecedenceReturn int ) *EpsilonTransition {
|
||||
func NewEpsilonTransition ( target IATNState, outermostPrecedenceReturn int ) *EpsilonTransition {
|
||||
|
||||
t := new(EpsilonTransition)
|
||||
t.InitTransition( target )
|
||||
|
||||
t.serializationType = TransitionEPSILON
|
||||
t.isEpsilon = true
|
||||
t.outermostPrecedenceReturn = outermostPrecedenceReturn
|
||||
return t
|
||||
t.serializationType = TransitionEPSILON
|
||||
t.isEpsilon = true
|
||||
t.outermostPrecedenceReturn = outermostPrecedenceReturn
|
||||
return t
|
||||
}
|
||||
|
||||
|
||||
func (t *EpsilonTransition) matches( symbol, minVocabSymbol, maxVocabSymbol int ) {
|
||||
func (t *EpsilonTransition) matches( symbol, minVocabSymbol, maxVocabSymbol int ) bool {
|
||||
return false
|
||||
}
|
||||
|
||||
|
@ -190,26 +218,26 @@ type RangeTransition struct {
|
|||
start, stop int
|
||||
}
|
||||
|
||||
func NewRangeTransition ( target *ATNState, start, stop int ) *RangeTransition {
|
||||
func NewRangeTransition ( target IATNState, start, stop int ) *RangeTransition {
|
||||
|
||||
t := new(RangeTransition)
|
||||
t.InitTransition( target )
|
||||
|
||||
t.serializationType = TransitionRANGE
|
||||
t.start = start
|
||||
t.stop = stop
|
||||
t.label = t.makeLabel()
|
||||
return t
|
||||
t.start = start
|
||||
t.stop = stop
|
||||
t.label = t.makeLabel()
|
||||
return t
|
||||
}
|
||||
|
||||
|
||||
func (t *RangeTransition) makeLabel() *IntervalSet {
|
||||
var s = NewIntervalSet()
|
||||
s.addRange(t.start, t.stop)
|
||||
return s
|
||||
var s = NewIntervalSet()
|
||||
s.addRange(t.start, t.stop)
|
||||
return s
|
||||
}
|
||||
|
||||
func (t *RangeTransition) matches(symbol, minVocabSymbol, maxVocabSymbol int) {
|
||||
func (t *RangeTransition) matches(symbol, minVocabSymbol, maxVocabSymbol int) bool {
|
||||
return symbol >= t.start && symbol <= t.stop
|
||||
}
|
||||
|
||||
|
@ -221,7 +249,7 @@ type AbstractPredicateTransition struct {
|
|||
Transition
|
||||
}
|
||||
|
||||
func NewAbstractPredicateTransition ( target *ATNState ) *AbstractPredicateTransition {
|
||||
func NewAbstractPredicateTransition ( target IATNState ) *AbstractPredicateTransition {
|
||||
|
||||
t := new(AbstractPredicateTransition)
|
||||
t.InitTransition( target )
|
||||
|
@ -236,17 +264,17 @@ type PredicateTransition struct {
|
|||
ruleIndex, predIndex int
|
||||
}
|
||||
|
||||
func NewPredicateTransition ( target *ATNState, ruleIndex, predIndex int, isCtxDependent bool ) *PredicateTransition {
|
||||
func NewPredicateTransition ( target IATNState, ruleIndex, predIndex int, isCtxDependent bool ) *PredicateTransition {
|
||||
|
||||
t := new(PredicateTransition)
|
||||
t.InitTransition(target)
|
||||
|
||||
t.serializationType = TransitionPREDICATE
|
||||
t.ruleIndex = ruleIndex
|
||||
t.predIndex = predIndex
|
||||
t.isCtxDependent = isCtxDependent // e.g., $i ref in pred
|
||||
t.isEpsilon = true
|
||||
return t
|
||||
t.serializationType = TransitionPREDICATE
|
||||
t.ruleIndex = ruleIndex
|
||||
t.predIndex = predIndex
|
||||
t.isCtxDependent = isCtxDependent // e.g., $i ref in pred
|
||||
t.isEpsilon = true
|
||||
return t
|
||||
}
|
||||
|
||||
|
||||
|
@ -269,17 +297,17 @@ type ActionTransition struct {
|
|||
ruleIndex, actionIndex, predIndex int
|
||||
}
|
||||
|
||||
func NewActionTransition ( target *ATNState, ruleIndex, actionIndex int, isCtxDependent bool ) *ActionTransition {
|
||||
func NewActionTransition ( target IATNState, ruleIndex, actionIndex int, isCtxDependent bool ) *ActionTransition {
|
||||
|
||||
t := new(ActionTransition)
|
||||
t.InitTransition( target )
|
||||
|
||||
t.serializationType = TransitionACTION
|
||||
t.ruleIndex = ruleIndex
|
||||
t.actionIndex = actionIndex
|
||||
t.isCtxDependent = isCtxDependent // e.g., $i ref in pred
|
||||
t.isEpsilon = true
|
||||
return t
|
||||
t.serializationType = TransitionACTION
|
||||
t.ruleIndex = ruleIndex
|
||||
t.actionIndex = actionIndex
|
||||
t.isCtxDependent = isCtxDependent // e.g., $i ref in pred
|
||||
t.isEpsilon = true
|
||||
return t
|
||||
}
|
||||
|
||||
|
||||
|
@ -291,19 +319,19 @@ func (t *ActionTransition) matches(symbol, minVocabSymbol, maxVocabSymbol int)
|
|||
func (t *ActionTransition) toString() string {
|
||||
return "action_" + t.ruleIndex + ":" + t.actionIndex
|
||||
}
|
||||
|
||||
|
||||
|
||||
type SetTransition struct {
|
||||
Transition
|
||||
}
|
||||
|
||||
func NewSetTransition ( target *ATNState, set *IntervalSet ) *SetTransition {
|
||||
func NewSetTransition ( target IATNState, set *IntervalSet ) *SetTransition {
|
||||
|
||||
t := new(SetTransition)
|
||||
t.InitTransition( target )
|
||||
t.InitSetTransition( set )
|
||||
|
||||
return t
|
||||
return t
|
||||
}
|
||||
|
||||
func (t *SetTransition) InitSetTransition( set *IntervalSet ) {
|
||||
|
@ -322,7 +350,7 @@ func (t *SetTransition) InitSetTransition( set *IntervalSet ) {
|
|||
func (t *SetTransition) matches(symbol, minVocabSymbol, maxVocabSymbol int) bool {
|
||||
return t.label.contains(symbol)
|
||||
}
|
||||
|
||||
|
||||
|
||||
func (t *SetTransition) toString() string {
|
||||
return t.label.toString()
|
||||
|
@ -333,7 +361,7 @@ type NotSetTransition struct {
|
|||
SetTransition
|
||||
}
|
||||
|
||||
func NewNotSetTransition ( target *ATNState, set *IntervalSet) *NotSetTransition {
|
||||
func NewNotSetTransition ( target IATNState, set *IntervalSet) *NotSetTransition {
|
||||
|
||||
t := new(NotSetTransition)
|
||||
t.InitTransition( target )
|
||||
|
@ -357,7 +385,7 @@ type WildcardTransition struct {
|
|||
Transition
|
||||
}
|
||||
|
||||
func NewWildcardTransition ( target *ATNState ) *WildcardTransition {
|
||||
func NewWildcardTransition ( target IATNState ) *WildcardTransition {
|
||||
|
||||
t := new(WildcardTransition)
|
||||
t.InitTransition( target )
|
||||
|
@ -380,20 +408,20 @@ type PrecedencePredicateTransition struct {
|
|||
precedence int
|
||||
}
|
||||
|
||||
func NewPrecedencePredicateTransition ( target *ATNState, precedence int ) *PrecedencePredicateTransition {
|
||||
func NewPrecedencePredicateTransition ( target IATNState, precedence int ) *PrecedencePredicateTransition {
|
||||
|
||||
t := new(PrecedencePredicateTransition)
|
||||
t.InitTransition( target )
|
||||
|
||||
t.serializationType = TransitionPRECEDENCE
|
||||
t.precedence = precedence
|
||||
t.isEpsilon = true
|
||||
t.serializationType = TransitionPRECEDENCE
|
||||
t.precedence = precedence
|
||||
t.isEpsilon = true
|
||||
|
||||
return t
|
||||
return t
|
||||
}
|
||||
|
||||
|
||||
func (t *PrecedencePredicateTransition) matches(symbol, minVocabSymbol, maxVocabSymbol int) {
|
||||
func (t *PrecedencePredicateTransition) matches(symbol, minVocabSymbol, maxVocabSymbol int) bool {
|
||||
return false
|
||||
}
|
||||
|
||||
|
@ -404,7 +432,7 @@ func (t *PrecedencePredicateTransition) getPredicate() *PrecedencePredicate {
|
|||
func (t *PrecedencePredicateTransition) toString() string {
|
||||
return fmt.Sprint(t.precedence) + " >= _p"
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
|
|
@ -8,33 +8,37 @@ package antlr4
|
|||
var TreeINVALID_INTERVAL = NewInterval(-1, -2)
|
||||
|
||||
type Tree interface {
|
||||
getParent() *Tree
|
||||
getPayload() *interface{}
|
||||
getChild(i int) *Tree
|
||||
getParent() Tree
|
||||
getPayload() interface{}
|
||||
getChild(i int) Tree
|
||||
getChildCount() int
|
||||
toStringTree() string
|
||||
// toStringTree() string
|
||||
}
|
||||
|
||||
type SyntaxTree interface {
|
||||
Tree
|
||||
|
||||
getSourceInterval() *Interval
|
||||
}
|
||||
|
||||
type ParseTree interface {
|
||||
SyntaxTree
|
||||
|
||||
// <T> T accept(ParseTreeVisitor<? extends T> visitor);
|
||||
accept(visitor *ParseTreeVisitor)
|
||||
getText() string
|
||||
toStringTree(parser *Parser) string
|
||||
// toStringTree([]string, IRecognizer) string
|
||||
}
|
||||
|
||||
type RuleNode interface {
|
||||
ParseTree
|
||||
getRuleContext() *RuleContext
|
||||
|
||||
getRuleContext() IRuleContext
|
||||
}
|
||||
|
||||
type TerminalNode interface {
|
||||
ParseTree
|
||||
|
||||
getSymbol() *Token
|
||||
}
|
||||
|
||||
|
|
|
@ -5,10 +5,22 @@ import (
|
|||
"errors"
|
||||
"strings"
|
||||
"hash/fnv"
|
||||
"math"
|
||||
"regexp"
|
||||
// "regexp"
|
||||
)
|
||||
|
||||
func intMin(a,b int) int {
|
||||
if a < b {
|
||||
return a
|
||||
}
|
||||
return b
|
||||
}
|
||||
|
||||
func intMax(a,b int) int {
|
||||
if a > b {
|
||||
return a
|
||||
}
|
||||
return b
|
||||
}
|
||||
|
||||
// A simple integer stack
|
||||
|
||||
|
@ -151,7 +163,7 @@ func NewBitSet() *BitSet {
|
|||
return b
|
||||
}
|
||||
|
||||
func (this *BitSet) add(value bool) {
|
||||
func (this *BitSet) add(value int) {
|
||||
this.data[value] = true
|
||||
}
|
||||
|
||||
|
@ -299,10 +311,15 @@ func TitleCase(str string) string {
|
|||
|
||||
// func (re *Regexp) ReplaceAllStringFunc(src string, repl func(string) string) string
|
||||
// return str.replace(//g, function(txt){return txt.charAt(0).toUpperCase() + txt.substr(1)})
|
||||
re := regexp.MustCompile("\w\S*")
|
||||
return re.ReplaceAllStringFunc(str, func(s string) {
|
||||
return strings.ToUpper(s[0:1]) + s[1:2]
|
||||
})
|
||||
|
||||
panic("Not implemented")
|
||||
|
||||
// re := regexp.MustCompile("\w\S*")
|
||||
// return re.ReplaceAllStringFunc(str, func(s string) {
|
||||
// return strings.ToUpper(s[0:1]) + s[1:2]
|
||||
// })
|
||||
return nil
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
|
Loading…
Reference in New Issue