forked from jasder/antlr
ATN runtime format, style, and comment cleanup
This commit is contained in:
parent
c8a9d75cfe
commit
538455eae6
|
@ -2,86 +2,80 @@ package antlr
|
||||||
|
|
||||||
import "fmt"
|
import "fmt"
|
||||||
|
|
||||||
// Temporary - for debugging purposes of the Go port
|
// PortDebug prints debug information to standard out when true. TODO: Remove.
|
||||||
const (
|
const PortDebug = false
|
||||||
PortDebug = false
|
|
||||||
)
|
|
||||||
|
|
||||||
var ATNInvalidAltNumber = 0
|
var ATNInvalidAltNumber int
|
||||||
|
|
||||||
type ATN struct {
|
type ATN struct {
|
||||||
DecisionToState []DecisionState
|
// DecisionToState is the decision points for all rules, subrules, optional
|
||||||
grammarType int
|
// blocks, ()+, ()*, etc. Used to build DFA predictors for them.
|
||||||
maxTokenType int
|
DecisionToState []DecisionState
|
||||||
states []ATNState
|
|
||||||
ruleToStartState []*RuleStartState
|
// grammarType is the ATN type and is used for deserializing ATNs from strings.
|
||||||
ruleToStopState []*RuleStopState
|
grammarType int
|
||||||
|
|
||||||
|
// lexerActions is referenced by action transitions in the ATN for lexer ATNs.
|
||||||
|
lexerActions []LexerAction
|
||||||
|
|
||||||
|
// maxTokenType is the maximum value for any symbol recognized by a transition in the ATN.
|
||||||
|
maxTokenType int
|
||||||
|
|
||||||
modeNameToStartState map[string]*TokensStartState
|
modeNameToStartState map[string]*TokensStartState
|
||||||
modeToStartState []*TokensStartState
|
|
||||||
ruleToTokenType []int
|
modeToStartState []*TokensStartState
|
||||||
lexerActions []LexerAction
|
|
||||||
|
// ruleToStartState maps from rule index to starting state number.
|
||||||
|
ruleToStartState []*RuleStartState
|
||||||
|
|
||||||
|
// ruleToStopState maps from rule index to stop state number.
|
||||||
|
ruleToStopState []*RuleStopState
|
||||||
|
|
||||||
|
// ruleToTokenType maps the rule index to the resulting token type for lexer
|
||||||
|
// ATNs. For parser ATNs, it maps the rule index to the generated bypass token
|
||||||
|
// type if ATNDeserializationOptions.isGenerateRuleBypassTransitions was
|
||||||
|
// specified, and otherwise is nil.
|
||||||
|
ruleToTokenType []int
|
||||||
|
|
||||||
|
states []ATNState
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewATN(grammarType int, maxTokenType int) *ATN {
|
func NewATN(grammarType int, maxTokenType int) *ATN {
|
||||||
|
return &ATN{
|
||||||
atn := new(ATN)
|
grammarType: grammarType,
|
||||||
|
maxTokenType: maxTokenType,
|
||||||
// Used for runtime deserialization of ATNs from strings///
|
modeNameToStartState: make(map[string]*TokensStartState),
|
||||||
// The type of the ATN.
|
}
|
||||||
atn.grammarType = grammarType
|
|
||||||
// The maximum value for any symbol recognized by a transition in the ATN.
|
|
||||||
atn.maxTokenType = maxTokenType
|
|
||||||
atn.states = make([]ATNState, 0)
|
|
||||||
// Each subrule/rule is a decision point and we must track them so we
|
|
||||||
// can go back later and build DFA predictors for them. This includes
|
|
||||||
// all the rules, subrules, optional blocks, ()+, ()* etc...
|
|
||||||
atn.DecisionToState = make([]DecisionState, 0)
|
|
||||||
// Maps from rule index to starting state number.
|
|
||||||
atn.ruleToStartState = make([]*RuleStartState, 0)
|
|
||||||
// Maps from rule index to stop state number.
|
|
||||||
atn.ruleToStopState = nil
|
|
||||||
atn.modeNameToStartState = make(map[string]*TokensStartState)
|
|
||||||
// For lexer ATNs, atn.maps the rule index to the resulting token type.
|
|
||||||
// For parser ATNs, atn.maps the rule index to the generated bypass token
|
|
||||||
// type if the
|
|
||||||
// {@link ATNDeserializationOptions//isGenerateRuleBypassTransitions}
|
|
||||||
// deserialization option was specified otherwise, atn.is {@code nil}.
|
|
||||||
atn.ruleToTokenType = nil
|
|
||||||
// For lexer ATNs, atn.is an array of {@link LexerAction} objects which may
|
|
||||||
// be referenced by action transitions in the ATN.
|
|
||||||
atn.lexerActions = nil
|
|
||||||
atn.modeToStartState = make([]*TokensStartState, 0)
|
|
||||||
|
|
||||||
return atn
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Compute the set of valid tokens that can occur starting in state {@code s}.
|
// NextTokensInContext computes the set of valid tokens that can occur starting
|
||||||
// If {@code ctx} is nil, the set of tokens will not include what can follow
|
// in state s. If ctx is nil, the set of tokens will not include what can follow
|
||||||
// the rule surrounding {@code s}. In other words, the set will be
|
// the rule surrounding s. In other words, the set will be restricted to tokens
|
||||||
// restricted to tokens reachable staying within {@code s}'s rule.
|
// reachable staying within the rule of s.
|
||||||
func (a *ATN) NextTokensInContext(s ATNState, ctx RuleContext) *IntervalSet {
|
func (a *ATN) NextTokensInContext(s ATNState, ctx RuleContext) *IntervalSet {
|
||||||
var anal = NewLL1Analyzer(a)
|
return NewLL1Analyzer(a).Look(s, nil, ctx)
|
||||||
var res = anal.Look(s, nil, ctx)
|
|
||||||
return res
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Compute the set of valid tokens that can occur starting in {@code s} and
|
// NextTokensNoContext computes the set of valid tokens that can occur starting
|
||||||
// staying in same rule. {@link Token//EPSILON} is in set if we reach end of
|
// in s and staying in same rule. Token.EPSILON is in set if we reach end of
|
||||||
// rule.
|
// rule.
|
||||||
func (a *ATN) NextTokensNoContext(s ATNState) *IntervalSet {
|
func (a *ATN) NextTokensNoContext(s ATNState) *IntervalSet {
|
||||||
if s.GetNextTokenWithinRule() != nil {
|
if s.GetNextTokenWithinRule() != nil {
|
||||||
if PortDebug {
|
if PortDebug {
|
||||||
fmt.Println("DEBUG A")
|
fmt.Println("DEBUG A")
|
||||||
}
|
}
|
||||||
|
|
||||||
return s.GetNextTokenWithinRule()
|
return s.GetNextTokenWithinRule()
|
||||||
}
|
}
|
||||||
|
|
||||||
if PortDebug {
|
if PortDebug {
|
||||||
fmt.Println("DEBUG 2")
|
fmt.Println("DEBUG 2")
|
||||||
fmt.Println(a.NextTokensInContext(s, nil))
|
fmt.Println(a.NextTokensInContext(s, nil))
|
||||||
}
|
}
|
||||||
|
|
||||||
s.SetNextTokenWithinRule(a.NextTokensInContext(s, nil))
|
s.SetNextTokenWithinRule(a.NextTokensInContext(s, nil))
|
||||||
s.GetNextTokenWithinRule().readOnly = true
|
s.GetNextTokenWithinRule().readOnly = true
|
||||||
|
|
||||||
return s.GetNextTokenWithinRule()
|
return s.GetNextTokenWithinRule()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -98,16 +92,18 @@ func (a *ATN) addState(state ATNState) {
|
||||||
state.SetATN(a)
|
state.SetATN(a)
|
||||||
state.SetStateNumber(len(a.states))
|
state.SetStateNumber(len(a.states))
|
||||||
}
|
}
|
||||||
|
|
||||||
a.states = append(a.states, state)
|
a.states = append(a.states, state)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (a *ATN) removeState(state ATNState) {
|
func (a *ATN) removeState(state ATNState) {
|
||||||
a.states[state.GetStateNumber()] = nil // just free mem, don't shift states in list
|
a.states[state.GetStateNumber()] = nil // Just free the memory; don't shift states in the slice
|
||||||
}
|
}
|
||||||
|
|
||||||
func (a *ATN) defineDecisionState(s DecisionState) int {
|
func (a *ATN) defineDecisionState(s DecisionState) int {
|
||||||
a.DecisionToState = append(a.DecisionToState, s)
|
a.DecisionToState = append(a.DecisionToState, s)
|
||||||
s.setDecision(len(a.DecisionToState) - 1)
|
s.setDecision(len(a.DecisionToState) - 1)
|
||||||
|
|
||||||
return s.getDecision()
|
return s.getDecision()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -119,46 +115,48 @@ func (a *ATN) getDecisionState(decision int) DecisionState {
|
||||||
return a.DecisionToState[decision]
|
return a.DecisionToState[decision]
|
||||||
}
|
}
|
||||||
|
|
||||||
// Computes the set of input symbols which could follow ATN state number
|
// getExpectedTokens computes the set of input symbols which could follow ATN
|
||||||
// {@code stateNumber} in the specified full {@code context}. This method
|
// state number stateNumber in the specified full parse context ctx and returns
|
||||||
// considers the complete parser context, but does not evaluate semantic
|
// the set of potentially valid input symbols which could follow the specified
|
||||||
// predicates (i.e. all predicates encountered during the calculation are
|
// state in the specified context. This method considers the complete parser
|
||||||
// assumed true). If a path in the ATN exists from the starting state to the
|
// context, but does not evaluate semantic predicates (i.e. all predicates
|
||||||
// {@link RuleStopState} of the outermost context without Matching any
|
// encountered during the calculation are assumed true). If a path in the ATN
|
||||||
// symbols, {@link Token//EOF} is added to the returned set.
|
// exists from the starting state to the RuleStopState of the outermost context
|
||||||
|
// without Matching any symbols, Token.EOF is added to the returned set.
|
||||||
//
|
//
|
||||||
// <p>If {@code context} is {@code nil}, it is treated as
|
// A nil ctx defaults to ParserRuleContext.EMPTY.
|
||||||
// {@link ParserRuleContext//EMPTY}.</p>
|
|
||||||
//
|
//
|
||||||
// @param stateNumber the ATN state number
|
// It panics if the ATN does not contain state stateNumber.
|
||||||
// @param context the full parse context
|
|
||||||
// @return The set of potentially valid input symbols which could follow the
|
|
||||||
// specified state in the specified context.
|
|
||||||
// @panics IllegalArgumentException if the ATN does not contain a state with
|
|
||||||
// number {@code stateNumber}
|
|
||||||
|
|
||||||
func (a *ATN) getExpectedTokens(stateNumber int, ctx RuleContext) *IntervalSet {
|
func (a *ATN) getExpectedTokens(stateNumber int, ctx RuleContext) *IntervalSet {
|
||||||
if stateNumber < 0 || stateNumber >= len(a.states) {
|
if stateNumber < 0 || stateNumber >= len(a.states) {
|
||||||
panic("Invalid state number.")
|
panic("Invalid state number.")
|
||||||
}
|
}
|
||||||
|
|
||||||
var s = a.states[stateNumber]
|
var s = a.states[stateNumber]
|
||||||
var following = a.NextTokens(s, nil)
|
var following = a.NextTokens(s, nil)
|
||||||
|
|
||||||
if !following.contains(TokenEpsilon) {
|
if !following.contains(TokenEpsilon) {
|
||||||
return following
|
return following
|
||||||
}
|
}
|
||||||
|
|
||||||
var expected = NewIntervalSet()
|
var expected = NewIntervalSet()
|
||||||
|
|
||||||
expected.addSet(following)
|
expected.addSet(following)
|
||||||
expected.removeOne(TokenEpsilon)
|
expected.removeOne(TokenEpsilon)
|
||||||
|
|
||||||
for ctx != nil && ctx.GetInvokingState() >= 0 && following.contains(TokenEpsilon) {
|
for ctx != nil && ctx.GetInvokingState() >= 0 && following.contains(TokenEpsilon) {
|
||||||
var invokingState = a.states[ctx.GetInvokingState()]
|
var invokingState = a.states[ctx.GetInvokingState()]
|
||||||
var rt = invokingState.GetTransitions()[0]
|
var rt = invokingState.GetTransitions()[0]
|
||||||
|
|
||||||
following = a.NextTokens(rt.(*RuleTransition).followState, nil)
|
following = a.NextTokens(rt.(*RuleTransition).followState, nil)
|
||||||
expected.addSet(following)
|
expected.addSet(following)
|
||||||
expected.removeOne(TokenEpsilon)
|
expected.removeOne(TokenEpsilon)
|
||||||
ctx = ctx.GetParent().(RuleContext)
|
ctx = ctx.GetParent().(RuleContext)
|
||||||
}
|
}
|
||||||
|
|
||||||
if following.contains(TokenEpsilon) {
|
if following.contains(TokenEpsilon) {
|
||||||
expected.addOne(TokenEOF)
|
expected.addOne(TokenEOF)
|
||||||
}
|
}
|
||||||
|
|
||||||
return expected
|
return expected
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,29 +2,22 @@ package antlr
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
// "reflect"
|
|
||||||
"strconv"
|
"strconv"
|
||||||
)
|
)
|
||||||
|
|
||||||
// A tuple: (ATN state, predicted alt, syntactic, semantic context).
|
|
||||||
// The syntactic context is a graph-structured stack node whose
|
|
||||||
// path(s) to the root is the rule invocation(s)
|
|
||||||
// chain used to arrive at the state. The semantic context is
|
|
||||||
// the tree of semantic predicates encountered before reaching
|
|
||||||
// an ATN state.
|
|
||||||
//
|
|
||||||
|
|
||||||
type Comparable interface {
|
type Comparable interface {
|
||||||
equals(other interface{}) bool
|
equals(other interface{}) bool
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ATNConfig is a tuple: (ATN state, predicted alt, syntactic, semantic
|
||||||
|
// context). The syntactic context is a graph-structured stack node whose
|
||||||
|
// path(s) to the root is the rule invocation(s) chain used to arrive at the
|
||||||
|
// state. The semantic context is the tree of semantic predicates encountered
|
||||||
|
// before reaching an ATN state.
|
||||||
type ATNConfig interface {
|
type ATNConfig interface {
|
||||||
Hasher
|
Hasher
|
||||||
Comparable
|
Comparable
|
||||||
|
|
||||||
getPrecedenceFilterSuppressed() bool
|
|
||||||
setPrecedenceFilterSuppressed(bool)
|
|
||||||
|
|
||||||
GetState() ATNState
|
GetState() ATNState
|
||||||
GetAlt() int
|
GetAlt() int
|
||||||
GetSemanticContext() SemanticContext
|
GetSemanticContext() SemanticContext
|
||||||
|
@ -37,6 +30,9 @@ type ATNConfig interface {
|
||||||
|
|
||||||
String() string
|
String() string
|
||||||
|
|
||||||
|
getPrecedenceFilterSuppressed() bool
|
||||||
|
setPrecedenceFilterSuppressed(bool)
|
||||||
|
|
||||||
shortHash() string
|
shortHash() string
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -49,14 +45,14 @@ type BaseATNConfig struct {
|
||||||
reachesIntoOuterContext int
|
reachesIntoOuterContext int
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewBaseATNConfig7(old *BaseATNConfig) *BaseATNConfig { // dup
|
func NewBaseATNConfig7(old *BaseATNConfig) *BaseATNConfig { // TODO: Dup
|
||||||
a := new(BaseATNConfig)
|
return &BaseATNConfig{
|
||||||
a.state = old.state
|
state: old.state,
|
||||||
a.alt = old.alt
|
alt: old.alt,
|
||||||
a.context = old.context
|
context: old.context,
|
||||||
a.semanticContext = old.semanticContext
|
semanticContext: old.semanticContext,
|
||||||
a.reachesIntoOuterContext = old.reachesIntoOuterContext
|
reachesIntoOuterContext: old.reachesIntoOuterContext,
|
||||||
return a
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewBaseATNConfig6(state ATNState, alt int, context PredictionContext) *BaseATNConfig {
|
func NewBaseATNConfig6(state ATNState, alt int, context PredictionContext) *BaseATNConfig {
|
||||||
|
@ -64,18 +60,11 @@ func NewBaseATNConfig6(state ATNState, alt int, context PredictionContext) *Base
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewBaseATNConfig5(state ATNState, alt int, context PredictionContext, semanticContext SemanticContext) *BaseATNConfig {
|
func NewBaseATNConfig5(state ATNState, alt int, context PredictionContext, semanticContext SemanticContext) *BaseATNConfig {
|
||||||
a := new(BaseATNConfig)
|
|
||||||
|
|
||||||
if semanticContext == nil {
|
if semanticContext == nil {
|
||||||
panic("SemanticContext cannot be null!")
|
panic("semanticContext cannot be nil") // TODO: Necessary?
|
||||||
}
|
}
|
||||||
|
|
||||||
a.state = state
|
return &BaseATNConfig{state: state, alt: alt, context: context, semanticContext: semanticContext}
|
||||||
a.alt = alt
|
|
||||||
a.context = context
|
|
||||||
a.semanticContext = semanticContext
|
|
||||||
|
|
||||||
return a
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewBaseATNConfig4(c ATNConfig, state ATNState) *BaseATNConfig {
|
func NewBaseATNConfig4(c ATNConfig, state ATNState) *BaseATNConfig {
|
||||||
|
@ -95,19 +84,17 @@ func NewBaseATNConfig1(c ATNConfig, state ATNState, context PredictionContext) *
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewBaseATNConfig(c ATNConfig, state ATNState, context PredictionContext, semanticContext SemanticContext) *BaseATNConfig {
|
func NewBaseATNConfig(c ATNConfig, state ATNState, context PredictionContext, semanticContext SemanticContext) *BaseATNConfig {
|
||||||
a := new(BaseATNConfig)
|
|
||||||
|
|
||||||
if semanticContext == nil {
|
if semanticContext == nil {
|
||||||
panic("SemanticContext cannot be null!")
|
panic("semanticContext cannot be nil")
|
||||||
}
|
}
|
||||||
|
|
||||||
a.state = state
|
return &BaseATNConfig{
|
||||||
a.alt = c.GetAlt()
|
state: state,
|
||||||
a.context = context
|
alt: c.GetAlt(),
|
||||||
a.semanticContext = semanticContext
|
context: context,
|
||||||
a.reachesIntoOuterContext = c.GetReachesIntoOuterContext()
|
semanticContext: semanticContext,
|
||||||
|
reachesIntoOuterContext: c.GetReachesIntoOuterContext(),
|
||||||
return a
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *BaseATNConfig) getPrecedenceFilterSuppressed() bool {
|
func (b *BaseATNConfig) getPrecedenceFilterSuppressed() bool {
|
||||||
|
@ -145,34 +132,35 @@ func (b *BaseATNConfig) SetReachesIntoOuterContext(v int) {
|
||||||
b.reachesIntoOuterContext = v
|
b.reachesIntoOuterContext = v
|
||||||
}
|
}
|
||||||
|
|
||||||
// An ATN configuration is equal to another if both have
|
// An ATN configuration is equal to another if both have the same state, they
|
||||||
// the same state, they predict the same alternative, and
|
// predict the same alternative, and syntactic/semantic contexts are the same.
|
||||||
// syntactic/semantic contexts are the same.
|
|
||||||
///
|
|
||||||
func (b *BaseATNConfig) equals(o interface{}) bool {
|
func (b *BaseATNConfig) equals(o interface{}) bool {
|
||||||
|
|
||||||
if b == o {
|
if b == o {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
other, ok := o.(*BaseATNConfig)
|
var other, ok = o.(*BaseATNConfig)
|
||||||
|
|
||||||
if !ok {
|
if !ok {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
var equal bool
|
var equal bool
|
||||||
|
|
||||||
if b.context == nil {
|
if b.context == nil {
|
||||||
equal = other.context == nil
|
equal = other.context == nil
|
||||||
} else {
|
} else {
|
||||||
equal = b.context.equals(other.context)
|
equal = b.context.equals(other.context)
|
||||||
}
|
}
|
||||||
|
|
||||||
return b.state.GetStateNumber() == other.state.GetStateNumber() &&
|
var (
|
||||||
b.alt == other.alt &&
|
nums = b.state.GetStateNumber() == other.state.GetStateNumber()
|
||||||
b.semanticContext.equals(other.semanticContext) &&
|
alts = b.alt == other.alt
|
||||||
b.precedenceFilterSuppressed == other.precedenceFilterSuppressed &&
|
cons = b.semanticContext.equals(other.semanticContext)
|
||||||
equal
|
sups = b.precedenceFilterSuppressed == other.precedenceFilterSuppressed
|
||||||
|
)
|
||||||
|
|
||||||
|
return nums && alts && cons && sups && equal
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *BaseATNConfig) shortHash() string {
|
func (b *BaseATNConfig) shortHash() string {
|
||||||
|
@ -180,8 +168,8 @@ func (b *BaseATNConfig) shortHash() string {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *BaseATNConfig) Hash() string {
|
func (b *BaseATNConfig) Hash() string {
|
||||||
|
|
||||||
var c string
|
var c string
|
||||||
|
|
||||||
if b.context == nil {
|
if b.context == nil {
|
||||||
c = ""
|
c = ""
|
||||||
} else {
|
} else {
|
||||||
|
@ -192,93 +180,66 @@ func (b *BaseATNConfig) Hash() string {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *BaseATNConfig) String() string {
|
func (b *BaseATNConfig) String() string {
|
||||||
|
var s1, s2, s3 string
|
||||||
|
|
||||||
var s1 string
|
|
||||||
if b.context != nil {
|
if b.context != nil {
|
||||||
s1 = ",[" + fmt.Sprint(b.context) + "]"
|
s1 = ",[" + fmt.Sprint(b.context) + "]"
|
||||||
}
|
}
|
||||||
|
|
||||||
var s2 string
|
|
||||||
if b.semanticContext != SemanticContextNone {
|
if b.semanticContext != SemanticContextNone {
|
||||||
s2 = "," + fmt.Sprint(b.semanticContext)
|
s2 = "," + fmt.Sprint(b.semanticContext)
|
||||||
}
|
}
|
||||||
|
|
||||||
var s3 string
|
|
||||||
if b.reachesIntoOuterContext > 0 {
|
if b.reachesIntoOuterContext > 0 {
|
||||||
s3 = ",up=" + fmt.Sprint(b.reachesIntoOuterContext)
|
s3 = ",up=" + fmt.Sprint(b.reachesIntoOuterContext)
|
||||||
}
|
}
|
||||||
|
|
||||||
return "(" + fmt.Sprint(b.state) + "," + strconv.Itoa(b.alt) + s1 + s2 + s3 + ")"
|
return fmt.Sprintf("(%v,%v%v%v%v)", b.state, b.alt, s1, s2, s3)
|
||||||
}
|
}
|
||||||
|
|
||||||
type LexerATNConfig struct {
|
type LexerATNConfig struct {
|
||||||
*BaseATNConfig
|
*BaseATNConfig
|
||||||
|
|
||||||
lexerActionExecutor *LexerActionExecutor
|
lexerActionExecutor *LexerActionExecutor
|
||||||
passedThroughNonGreedyDecision bool
|
passedThroughNonGreedyDecision bool
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewLexerATNConfig6(state ATNState, alt int, context PredictionContext) *LexerATNConfig {
|
func NewLexerATNConfig6(state ATNState, alt int, context PredictionContext) *LexerATNConfig {
|
||||||
|
return &LexerATNConfig{BaseATNConfig: NewBaseATNConfig5(state, alt, context, SemanticContextNone)}
|
||||||
l := new(LexerATNConfig)
|
|
||||||
|
|
||||||
l.BaseATNConfig = NewBaseATNConfig5(state, alt, context, SemanticContextNone)
|
|
||||||
|
|
||||||
l.passedThroughNonGreedyDecision = false
|
|
||||||
l.lexerActionExecutor = nil
|
|
||||||
return l
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewLexerATNConfig5(state ATNState, alt int, context PredictionContext, lexerActionExecutor *LexerActionExecutor) *LexerATNConfig {
|
func NewLexerATNConfig5(state ATNState, alt int, context PredictionContext, lexerActionExecutor *LexerActionExecutor) *LexerATNConfig {
|
||||||
|
return &LexerATNConfig{
|
||||||
l := new(LexerATNConfig)
|
BaseATNConfig: NewBaseATNConfig5(state, alt, context, SemanticContextNone),
|
||||||
|
lexerActionExecutor: lexerActionExecutor,
|
||||||
l.BaseATNConfig = NewBaseATNConfig5(state, alt, context, SemanticContextNone)
|
}
|
||||||
l.lexerActionExecutor = lexerActionExecutor
|
|
||||||
l.passedThroughNonGreedyDecision = false
|
|
||||||
return l
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewLexerATNConfig4(c *LexerATNConfig, state ATNState) *LexerATNConfig {
|
func NewLexerATNConfig4(c *LexerATNConfig, state ATNState) *LexerATNConfig {
|
||||||
|
return &LexerATNConfig{
|
||||||
l := new(LexerATNConfig)
|
BaseATNConfig: NewBaseATNConfig(c, state, c.GetContext(), c.GetSemanticContext()),
|
||||||
|
lexerActionExecutor: c.lexerActionExecutor,
|
||||||
l.BaseATNConfig = NewBaseATNConfig(c, state, c.GetContext(), c.GetSemanticContext())
|
passedThroughNonGreedyDecision: checkNonGreedyDecision(c, state),
|
||||||
l.lexerActionExecutor = c.lexerActionExecutor
|
}
|
||||||
l.passedThroughNonGreedyDecision = checkNonGreedyDecision(c, state)
|
|
||||||
return l
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewLexerATNConfig3(c *LexerATNConfig, state ATNState, lexerActionExecutor *LexerActionExecutor) *LexerATNConfig {
|
func NewLexerATNConfig3(c *LexerATNConfig, state ATNState, lexerActionExecutor *LexerActionExecutor) *LexerATNConfig {
|
||||||
|
return &LexerATNConfig{
|
||||||
l := new(LexerATNConfig)
|
BaseATNConfig: NewBaseATNConfig(c, state, c.GetContext(), c.GetSemanticContext()),
|
||||||
|
lexerActionExecutor: lexerActionExecutor,
|
||||||
l.BaseATNConfig = NewBaseATNConfig(c, state, c.GetContext(), c.GetSemanticContext())
|
passedThroughNonGreedyDecision: checkNonGreedyDecision(c, state),
|
||||||
l.lexerActionExecutor = lexerActionExecutor
|
}
|
||||||
l.passedThroughNonGreedyDecision = checkNonGreedyDecision(c, state)
|
|
||||||
return l
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewLexerATNConfig2(c *LexerATNConfig, state ATNState, context PredictionContext) *LexerATNConfig {
|
func NewLexerATNConfig2(c *LexerATNConfig, state ATNState, context PredictionContext) *LexerATNConfig {
|
||||||
|
return &LexerATNConfig{
|
||||||
l := new(LexerATNConfig)
|
BaseATNConfig: NewBaseATNConfig(c, state, context, c.GetSemanticContext()),
|
||||||
|
lexerActionExecutor: c.lexerActionExecutor,
|
||||||
l.BaseATNConfig = NewBaseATNConfig(c, state, context, c.GetSemanticContext())
|
passedThroughNonGreedyDecision: checkNonGreedyDecision(c, state),
|
||||||
l.lexerActionExecutor = c.lexerActionExecutor
|
}
|
||||||
l.passedThroughNonGreedyDecision = checkNonGreedyDecision(c, state)
|
|
||||||
return l
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewLexerATNConfig1(state ATNState, alt int, context PredictionContext) *LexerATNConfig {
|
func NewLexerATNConfig1(state ATNState, alt int, context PredictionContext) *LexerATNConfig {
|
||||||
|
return &LexerATNConfig{BaseATNConfig: NewBaseATNConfig5(state, alt, context, SemanticContextNone)}
|
||||||
l := new(LexerATNConfig)
|
|
||||||
|
|
||||||
l.BaseATNConfig = NewBaseATNConfig5(state, alt, context, SemanticContextNone)
|
|
||||||
|
|
||||||
l.lexerActionExecutor = nil
|
|
||||||
l.passedThroughNonGreedyDecision = false
|
|
||||||
|
|
||||||
return l
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (l *LexerATNConfig) Hash() string {
|
func (l *LexerATNConfig) Hash() string {
|
||||||
|
@ -290,13 +251,11 @@ func (l *LexerATNConfig) Hash() string {
|
||||||
f = "0"
|
f = "0"
|
||||||
}
|
}
|
||||||
|
|
||||||
return strconv.Itoa(l.state.GetStateNumber()) + strconv.Itoa(l.alt) + fmt.Sprint(l.context) +
|
return fmt.Sprintf("%v%v%v%v%v%v", l.state.GetStateNumber(), l.alt, l.context, l.semanticContext, f, l.lexerActionExecutor)
|
||||||
fmt.Sprint(l.semanticContext) + f + fmt.Sprint(l.lexerActionExecutor)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (l *LexerATNConfig) equals(other interface{}) bool {
|
func (l *LexerATNConfig) equals(other interface{}) bool {
|
||||||
|
var othert, ok = other.(*LexerATNConfig)
|
||||||
othert, ok := other.(*LexerATNConfig)
|
|
||||||
|
|
||||||
if l == other {
|
if l == other {
|
||||||
return true
|
return true
|
||||||
|
@ -307,6 +266,7 @@ func (l *LexerATNConfig) equals(other interface{}) bool {
|
||||||
}
|
}
|
||||||
|
|
||||||
var b bool
|
var b bool
|
||||||
|
|
||||||
if l.lexerActionExecutor != nil {
|
if l.lexerActionExecutor != nil {
|
||||||
b = !l.lexerActionExecutor.equals(othert.lexerActionExecutor)
|
b = !l.lexerActionExecutor.equals(othert.lexerActionExecutor)
|
||||||
} else {
|
} else {
|
||||||
|
@ -321,6 +281,7 @@ func (l *LexerATNConfig) equals(other interface{}) bool {
|
||||||
}
|
}
|
||||||
|
|
||||||
func checkNonGreedyDecision(source *LexerATNConfig, target ATNState) bool {
|
func checkNonGreedyDecision(source *LexerATNConfig, target ATNState) bool {
|
||||||
ds, ok := target.(DecisionState)
|
var ds, ok = target.(DecisionState)
|
||||||
|
|
||||||
return source.passedThroughNonGreedyDecision || (ok && ds.getNonGreedy())
|
return source.passedThroughNonGreedyDecision || (ok && ds.getNonGreedy())
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,8 +1,6 @@
|
||||||
package antlr
|
package antlr
|
||||||
|
|
||||||
import (
|
import "fmt"
|
||||||
"fmt"
|
|
||||||
)
|
|
||||||
|
|
||||||
type ATNConfigSet interface {
|
type ATNConfigSet interface {
|
||||||
Hasher
|
Hasher
|
||||||
|
@ -43,114 +41,114 @@ type ATNConfigSet interface {
|
||||||
SetDipsIntoOuterContext(bool)
|
SetDipsIntoOuterContext(bool)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Specialized {@link Set}{@code <}{@link ATNConfig}{@code >} that can track
|
// BaseATNConfigSet is a specialized set of ATNConfig that tracks information
|
||||||
// info about the set, with support for combining similar configurations using a
|
// about its elements and can combine similar configurations using a
|
||||||
// graph-structured stack.
|
// graph-structured stack.
|
||||||
|
|
||||||
type BaseATNConfigSet struct {
|
type BaseATNConfigSet struct {
|
||||||
readOnly bool
|
cachedHashString string
|
||||||
fullCtx bool
|
|
||||||
configLookup *Set
|
// configLookup is used to determine whether two BaseATNConfigSets are equal. We
|
||||||
conflictingAlts *BitSet
|
// need all configurations with the same (s, i, _, semctx) to be equal. A key
|
||||||
cachedHashString string
|
// effectively doubles the number of objects associated with ATNConfigs. All
|
||||||
hasSemanticContext bool
|
// keys are hashed by (s, i, _, pi), not including the context. Wiped out when
|
||||||
|
// read-only because a set becomes a DFA state.
|
||||||
|
configLookup *Set
|
||||||
|
|
||||||
|
// configs is the added elements.
|
||||||
|
configs []ATNConfig
|
||||||
|
|
||||||
|
// TODO: These fields make me pretty uncomfortable, but it is nice to pack up
|
||||||
|
// info together because it saves recomputation. Can we track conflicts as they
|
||||||
|
// are added to save scanning configs later?
|
||||||
|
conflictingAlts *BitSet
|
||||||
|
|
||||||
|
// dipsIntoOuterContext is used by parsers and lexers. In a lexer, it indicates
|
||||||
|
// we hit a pred while computing a closure operation. Do not make a DFA state
|
||||||
|
// from the BaseATNConfigSet in this case. TODO: How is this used by parsers?
|
||||||
dipsIntoOuterContext bool
|
dipsIntoOuterContext bool
|
||||||
configs []ATNConfig
|
|
||||||
uniqueAlt int
|
|
||||||
}
|
|
||||||
|
|
||||||
func NewBaseATNConfigSet(fullCtx bool) *BaseATNConfigSet {
|
// fullCtx is whether it is part of a full context LL prediction. Used to
|
||||||
|
// determine how to merge $. It is a wildcard with SLL, but not for an LL
|
||||||
a := new(BaseATNConfigSet)
|
// context merge.
|
||||||
|
fullCtx bool
|
||||||
// The reason that we need a.is because we don't want the hash map to use
|
|
||||||
// the standard hash code and equals. We need all configurations with the
|
|
||||||
// same
|
|
||||||
// {@code (s,i,_,semctx)} to be equal. Unfortunately, a.key effectively
|
|
||||||
// doubles
|
|
||||||
// the number of objects associated with ATNConfigs. The other solution is
|
|
||||||
// to
|
|
||||||
// use a hash table that lets us specify the equals/hashcode operation.
|
|
||||||
// All configs but hashed by (s, i, _, pi) not including context. Wiped out
|
|
||||||
// when we go readonly as a.set becomes a DFA state.
|
|
||||||
a.configLookup = NewSet(hashATNConfig, equalATNConfigs)
|
|
||||||
// Indicates that a.configuration set is part of a full context
|
|
||||||
// LL prediction. It will be used to determine how to merge $. With SLL
|
|
||||||
// it's a wildcard whereas it is not for LL context merge.
|
|
||||||
a.fullCtx = fullCtx
|
|
||||||
// Indicates that the set of configurations is read-only. Do not
|
|
||||||
// allow any code to manipulate the set DFA states will point at
|
|
||||||
// the sets and they must not change. a.does not protect the other
|
|
||||||
// fields in particular, conflictingAlts is set after
|
|
||||||
// we've made a.readonly.
|
|
||||||
a.readOnly = false
|
|
||||||
// Track the elements as they are added to the set supports Get(i)///
|
|
||||||
a.configs = make([]ATNConfig, 0)
|
|
||||||
|
|
||||||
// TODO: these fields make me pretty uncomfortable but nice to pack up info
|
|
||||||
// together, saves recomputation
|
|
||||||
// TODO: can we track conflicts as they are added to save scanning configs
|
|
||||||
// later?
|
|
||||||
a.uniqueAlt = 0
|
|
||||||
a.conflictingAlts = nil
|
|
||||||
|
|
||||||
// Used in parser and lexer. In lexer, it indicates we hit a pred
|
// Used in parser and lexer. In lexer, it indicates we hit a pred
|
||||||
// while computing a closure operation. Don't make a DFA state from a.
|
// while computing a closure operation. Don't make a DFA state from a.
|
||||||
a.hasSemanticContext = false
|
hasSemanticContext bool
|
||||||
a.dipsIntoOuterContext = false
|
|
||||||
|
|
||||||
a.cachedHashString = "-1"
|
// readOnly is whether it is read-only. Do not
|
||||||
|
// allow any code to manipulate the set if true because DFA states will point at
|
||||||
|
// sets and those must not change. It not protect other fields; conflictingAlts
|
||||||
|
// in particular, which is assigned after readOnly.
|
||||||
|
readOnly bool
|
||||||
|
|
||||||
return a
|
// TODO: These fields make me pretty uncomfortable, but it is nice to pack up
|
||||||
|
// info together because it saves recomputation. Can we track conflicts as they
|
||||||
|
// are added to save scanning configs later?
|
||||||
|
uniqueAlt int
|
||||||
}
|
}
|
||||||
|
|
||||||
// Adding a Newconfig means merging contexts with existing configs for
|
func NewBaseATNConfigSet(fullCtx bool) *BaseATNConfigSet {
|
||||||
// {@code (s, i, pi, _)}, where {@code s} is the
|
return &BaseATNConfigSet{
|
||||||
// {@link ATNConfig//state}, {@code i} is the {@link ATNConfig//alt}, and
|
cachedHashString: "-1",
|
||||||
// {@code pi} is the {@link ATNConfig//semanticContext}. We use
|
configLookup: NewSet(hashATNConfig, equalATNConfigs),
|
||||||
// {@code (s,i,pi)} as key.
|
fullCtx: fullCtx,
|
||||||
//
|
|
||||||
// <p>This method updates {@link //dipsIntoOuterContext} and
|
|
||||||
// {@link //hasSemanticContext} when necessary.</p>
|
|
||||||
// /
|
|
||||||
func (b *BaseATNConfigSet) Add(config ATNConfig, mergeCache *DoubleDict) bool {
|
|
||||||
|
|
||||||
if b.readOnly {
|
|
||||||
panic("This set is readonly")
|
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add merges contexts with existing configs for (s, i, pi, _), where s is the
|
||||||
|
// ATNConfig.state, i is the ATNConfig.alt, and pi is the
|
||||||
|
// ATNConfig.semanticContext. We use (s,i,pi) as the key. Updates
|
||||||
|
// dipsIntoOuterContext and hasSemanticContext when necessary.
|
||||||
|
func (b *BaseATNConfigSet) Add(config ATNConfig, mergeCache *DoubleDict) bool {
|
||||||
|
if b.readOnly {
|
||||||
|
panic("set is read-only")
|
||||||
|
}
|
||||||
|
|
||||||
if config.GetSemanticContext() != SemanticContextNone {
|
if config.GetSemanticContext() != SemanticContextNone {
|
||||||
b.hasSemanticContext = true
|
b.hasSemanticContext = true
|
||||||
}
|
}
|
||||||
|
|
||||||
if config.GetReachesIntoOuterContext() > 0 {
|
if config.GetReachesIntoOuterContext() > 0 {
|
||||||
b.dipsIntoOuterContext = true
|
b.dipsIntoOuterContext = true
|
||||||
}
|
}
|
||||||
|
|
||||||
var existing = b.configLookup.add(config).(ATNConfig)
|
var existing = b.configLookup.add(config).(ATNConfig)
|
||||||
|
|
||||||
if existing == config {
|
if existing == config {
|
||||||
b.cachedHashString = "-1"
|
b.cachedHashString = "-1"
|
||||||
b.configs = append(b.configs, config) // track order here
|
b.configs = append(b.configs, config) // Track order here
|
||||||
|
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
// a previous (s,i,pi,_), merge with it and save result
|
|
||||||
|
// Merge a previous (s, i, pi, _) with it and save the result
|
||||||
var rootIsWildcard = !b.fullCtx
|
var rootIsWildcard = !b.fullCtx
|
||||||
var merged = merge(existing.GetContext(), config.GetContext(), rootIsWildcard, mergeCache)
|
var merged = merge(existing.GetContext(), config.GetContext(), rootIsWildcard, mergeCache)
|
||||||
// no need to check for existing.context, config.context in cache
|
|
||||||
// since only way to create Newgraphs is "call rule" and here. We
|
// No need to check for existing.context because config.context is in the cache,
|
||||||
// cache at both places.
|
// since the only way to create new graphs is the "call rule" and here. We cache
|
||||||
|
// at both places.
|
||||||
existing.SetReachesIntoOuterContext(intMax(existing.GetReachesIntoOuterContext(), config.GetReachesIntoOuterContext()))
|
existing.SetReachesIntoOuterContext(intMax(existing.GetReachesIntoOuterContext(), config.GetReachesIntoOuterContext()))
|
||||||
// make sure to preserve the precedence filter suppression during the merge
|
|
||||||
|
// Preserve the precedence filter suppression during the merge
|
||||||
if config.getPrecedenceFilterSuppressed() {
|
if config.getPrecedenceFilterSuppressed() {
|
||||||
existing.setPrecedenceFilterSuppressed(true)
|
existing.setPrecedenceFilterSuppressed(true)
|
||||||
}
|
}
|
||||||
existing.SetContext(merged) // replace context no need to alt mapping
|
|
||||||
|
// Replace the context because there is no need to do alt mapping
|
||||||
|
existing.SetContext(merged)
|
||||||
|
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *BaseATNConfigSet) GetStates() *Set {
|
func (b *BaseATNConfigSet) GetStates() *Set {
|
||||||
var states = NewSet(nil, nil)
|
var states = NewSet(nil, nil)
|
||||||
|
|
||||||
for i := 0; i < len(b.configs); i++ {
|
for i := 0; i < len(b.configs); i++ {
|
||||||
states.add(b.configs[i].GetState())
|
states.add(b.configs[i].GetState())
|
||||||
}
|
}
|
||||||
|
|
||||||
return states
|
return states
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -164,12 +162,15 @@ func (b *BaseATNConfigSet) SetHasSemanticContext(v bool) {
|
||||||
|
|
||||||
func (b *BaseATNConfigSet) GetPredicates() []SemanticContext {
|
func (b *BaseATNConfigSet) GetPredicates() []SemanticContext {
|
||||||
var preds = make([]SemanticContext, 0)
|
var preds = make([]SemanticContext, 0)
|
||||||
|
|
||||||
for i := 0; i < len(b.configs); i++ {
|
for i := 0; i < len(b.configs); i++ {
|
||||||
c := b.configs[i].GetSemanticContext()
|
var c = b.configs[i].GetSemanticContext()
|
||||||
|
|
||||||
if c != SemanticContextNone {
|
if c != SemanticContextNone {
|
||||||
preds = append(preds, c)
|
preds = append(preds, c)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return preds
|
return preds
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -179,13 +180,16 @@ func (b *BaseATNConfigSet) GetItems() []ATNConfig {
|
||||||
|
|
||||||
func (b *BaseATNConfigSet) OptimizeConfigs(interpreter *BaseATNSimulator) {
|
func (b *BaseATNConfigSet) OptimizeConfigs(interpreter *BaseATNSimulator) {
|
||||||
if b.readOnly {
|
if b.readOnly {
|
||||||
panic("This set is readonly")
|
panic("set is read-only")
|
||||||
}
|
}
|
||||||
|
|
||||||
if b.configLookup.length() == 0 {
|
if b.configLookup.length() == 0 {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
for i := 0; i < len(b.configs); i++ {
|
for i := 0; i < len(b.configs); i++ {
|
||||||
var config = b.configs[i]
|
var config = b.configs[i]
|
||||||
|
|
||||||
config.SetContext(interpreter.getCachedContext(config.GetContext()))
|
config.SetContext(interpreter.getCachedContext(config.GetContext()))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -194,6 +198,7 @@ func (b *BaseATNConfigSet) AddAll(coll []ATNConfig) bool {
|
||||||
for i := 0; i < len(coll); i++ {
|
for i := 0; i < len(coll); i++ {
|
||||||
b.Add(coll[i], nil)
|
b.Add(coll[i], nil)
|
||||||
}
|
}
|
||||||
|
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -204,10 +209,10 @@ func (b *BaseATNConfigSet) Equals(other interface{}) bool {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
other2 := other.(*BaseATNConfigSet)
|
var other2 = other.(*BaseATNConfigSet)
|
||||||
|
|
||||||
return b.configs != nil &&
|
return b.configs != nil &&
|
||||||
// b.configs.equals(other2.configs) && // TODO is b necessary?
|
// TODO: b.configs.equals(other2.configs) && // TODO: Is b necessary?
|
||||||
b.fullCtx == other2.fullCtx &&
|
b.fullCtx == other2.fullCtx &&
|
||||||
b.uniqueAlt == other2.uniqueAlt &&
|
b.uniqueAlt == other2.uniqueAlt &&
|
||||||
b.conflictingAlts == other2.conflictingAlts &&
|
b.conflictingAlts == other2.conflictingAlts &&
|
||||||
|
@ -220,6 +225,7 @@ func (b *BaseATNConfigSet) Hash() string {
|
||||||
if b.cachedHashString == "-1" {
|
if b.cachedHashString == "-1" {
|
||||||
b.cachedHashString = b.hashConfigs()
|
b.cachedHashString = b.hashConfigs()
|
||||||
}
|
}
|
||||||
|
|
||||||
return b.cachedHashString
|
return b.cachedHashString
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -228,9 +234,11 @@ func (b *BaseATNConfigSet) Hash() string {
|
||||||
|
|
||||||
func (b *BaseATNConfigSet) hashConfigs() string {
|
func (b *BaseATNConfigSet) hashConfigs() string {
|
||||||
var s = ""
|
var s = ""
|
||||||
|
|
||||||
for _, c := range b.configs {
|
for _, c := range b.configs {
|
||||||
s += fmt.Sprint(c)
|
s += fmt.Sprint(c)
|
||||||
}
|
}
|
||||||
|
|
||||||
return s
|
return s
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -244,22 +252,25 @@ func (b *BaseATNConfigSet) IsEmpty() bool {
|
||||||
|
|
||||||
func (b *BaseATNConfigSet) Contains(item ATNConfig) bool {
|
func (b *BaseATNConfigSet) Contains(item ATNConfig) bool {
|
||||||
if b.configLookup == nil {
|
if b.configLookup == nil {
|
||||||
panic("This method is not implemented for readonly sets.")
|
panic("not implemented for read-only sets")
|
||||||
}
|
}
|
||||||
|
|
||||||
return b.configLookup.contains(item)
|
return b.configLookup.contains(item)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *BaseATNConfigSet) ContainsFast(item ATNConfig) bool {
|
func (b *BaseATNConfigSet) ContainsFast(item ATNConfig) bool {
|
||||||
if b.configLookup == nil {
|
if b.configLookup == nil {
|
||||||
panic("This method is not implemented for readonly sets.")
|
panic("not implemented for read-only sets")
|
||||||
}
|
}
|
||||||
return b.configLookup.contains(item) // TODO containsFast is not implemented for Set
|
|
||||||
|
return b.configLookup.contains(item) // TODO: containsFast is not implemented for Set
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *BaseATNConfigSet) Clear() {
|
func (b *BaseATNConfigSet) Clear() {
|
||||||
if b.readOnly {
|
if b.readOnly {
|
||||||
panic("This set is readonly")
|
panic("set is read-only")
|
||||||
}
|
}
|
||||||
|
|
||||||
b.configs = make([]ATNConfig, 0)
|
b.configs = make([]ATNConfig, 0)
|
||||||
b.cachedHashString = "-1"
|
b.cachedHashString = "-1"
|
||||||
b.configLookup = NewSet(hashATNConfig, equalATNConfigs)
|
b.configLookup = NewSet(hashATNConfig, equalATNConfigs)
|
||||||
|
@ -299,8 +310,9 @@ func (b *BaseATNConfigSet) ReadOnly() bool {
|
||||||
|
|
||||||
func (b *BaseATNConfigSet) SetReadOnly(readOnly bool) {
|
func (b *BaseATNConfigSet) SetReadOnly(readOnly bool) {
|
||||||
b.readOnly = readOnly
|
b.readOnly = readOnly
|
||||||
|
|
||||||
if readOnly {
|
if readOnly {
|
||||||
b.configLookup = nil // can't mod, no need for lookup cache
|
b.configLookup = nil // Read only, so no need for the lookup cache
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -309,6 +321,7 @@ func (b *BaseATNConfigSet) String() string {
|
||||||
|
|
||||||
for i, c := range b.configs {
|
for i, c := range b.configs {
|
||||||
s += c.String()
|
s += c.String()
|
||||||
|
|
||||||
if i != len(b.configs)-1 {
|
if i != len(b.configs)-1 {
|
||||||
s += ", "
|
s += ", "
|
||||||
}
|
}
|
||||||
|
@ -340,13 +353,11 @@ type OrderedATNConfigSet struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewOrderedATNConfigSet() *OrderedATNConfigSet {
|
func NewOrderedATNConfigSet() *OrderedATNConfigSet {
|
||||||
|
var b = NewBaseATNConfigSet(false)
|
||||||
|
|
||||||
o := new(OrderedATNConfigSet)
|
b.configLookup = NewSet(nil, nil)
|
||||||
|
|
||||||
o.BaseATNConfigSet = NewBaseATNConfigSet(false)
|
return &OrderedATNConfigSet{BaseATNConfigSet: b}
|
||||||
o.configLookup = NewSet(nil, nil)
|
|
||||||
|
|
||||||
return o
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func hashATNConfig(c interface{}) string {
|
func hashATNConfig(c interface{}) string {
|
||||||
|
@ -354,7 +365,6 @@ func hashATNConfig(c interface{}) string {
|
||||||
}
|
}
|
||||||
|
|
||||||
func equalATNConfigs(a, b interface{}) bool {
|
func equalATNConfigs(a, b interface{}) bool {
|
||||||
|
|
||||||
if a == nil || b == nil {
|
if a == nil || b == nil {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
@ -363,14 +373,16 @@ func equalATNConfigs(a, b interface{}) bool {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
ai, ok := a.(ATNConfig)
|
var ai, ok = a.(ATNConfig)
|
||||||
bi, ok1 := b.(ATNConfig)
|
var bi, ok1 = b.(ATNConfig)
|
||||||
|
|
||||||
if !ok || !ok1 {
|
if !ok || !ok1 {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
return ai.GetState().GetStateNumber() == bi.GetState().GetStateNumber() &&
|
var nums = ai.GetState().GetStateNumber() == bi.GetState().GetStateNumber()
|
||||||
ai.GetAlt() == bi.GetAlt() &&
|
var alts = ai.GetAlt() == bi.GetAlt()
|
||||||
ai.GetSemanticContext().equals(bi.GetSemanticContext())
|
var cons = ai.GetSemanticContext().equals(bi.GetSemanticContext())
|
||||||
|
|
||||||
|
return nums && alts && cons
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,7 @@
|
||||||
package antlr
|
package antlr
|
||||||
|
|
||||||
|
var ATNDeserializationOptionsdefaultOptions = &ATNDeserializationOptions{true, false, false}
|
||||||
|
|
||||||
type ATNDeserializationOptions struct {
|
type ATNDeserializationOptions struct {
|
||||||
readOnly bool
|
readOnly bool
|
||||||
verifyATN bool
|
verifyATN bool
|
||||||
|
@ -7,7 +9,7 @@ type ATNDeserializationOptions struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewATNDeserializationOptions(CopyFrom *ATNDeserializationOptions) *ATNDeserializationOptions {
|
func NewATNDeserializationOptions(CopyFrom *ATNDeserializationOptions) *ATNDeserializationOptions {
|
||||||
o := new(ATNDeserializationOptions)
|
var o = new(ATNDeserializationOptions)
|
||||||
|
|
||||||
if CopyFrom != nil {
|
if CopyFrom != nil {
|
||||||
o.readOnly = CopyFrom.readOnly
|
o.readOnly = CopyFrom.readOnly
|
||||||
|
@ -17,5 +19,3 @@ func NewATNDeserializationOptions(CopyFrom *ATNDeserializationOptions) *ATNDeser
|
||||||
|
|
||||||
return o
|
return o
|
||||||
}
|
}
|
||||||
|
|
||||||
var ATNDeserializationOptionsdefaultOptions = &ATNDeserializationOptions{true, false, false}
|
|
||||||
|
|
|
@ -39,16 +39,11 @@ type ATNDeserializer struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewATNDeserializer(options *ATNDeserializationOptions) *ATNDeserializer {
|
func NewATNDeserializer(options *ATNDeserializationOptions) *ATNDeserializer {
|
||||||
|
|
||||||
if options == nil {
|
if options == nil {
|
||||||
options = ATNDeserializationOptionsdefaultOptions
|
options = ATNDeserializationOptionsdefaultOptions
|
||||||
}
|
}
|
||||||
|
|
||||||
a := new(ATNDeserializer)
|
return &ATNDeserializer{deserializationOptions: options}
|
||||||
|
|
||||||
a.deserializationOptions = options
|
|
||||||
|
|
||||||
return a
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func stringInSlice(a string, list []string) int {
|
func stringInSlice(a string, list []string) int {
|
||||||
|
@ -57,60 +52,64 @@ func stringInSlice(a string, list []string) int {
|
||||||
return i
|
return i
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return -1
|
return -1
|
||||||
}
|
}
|
||||||
|
|
||||||
// Determines if a particular serialized representation of an ATN supports
|
// isFeatureSupported determines if a particular serialized representation of an
|
||||||
// a particular feature, identified by the {@link UUID} used for serializing
|
// ATN supports a particular feature, identified by the UUID used for
|
||||||
// the ATN at the time the feature was first introduced.
|
// serializing the ATN at the time the feature was first introduced. Feature is
|
||||||
//
|
// the UUID marking the first time the feature was supported in the serialized
|
||||||
// @param feature The {@link UUID} marking the first time the feature was
|
// ATN. ActualUuid is the UUID of the actual serialized ATN which is currently
|
||||||
// supported in the serialized ATN.
|
// being deserialized. It returns true if actualUuid represents a serialized ATN
|
||||||
// @param actualUuid The {@link UUID} of the actual serialized ATN which is
|
// at or after the feature identified by feature was introduced, and otherwise
|
||||||
// currently being deserialized.
|
// false.
|
||||||
// @return {@code true} if the {@code actualUuid} value represents a
|
|
||||||
// serialized ATN at or after the feature identified by {@code feature} was
|
|
||||||
// introduced otherwise, {@code false}.
|
|
||||||
|
|
||||||
func (a *ATNDeserializer) isFeatureSupported(feature, actualUUID string) bool {
|
func (a *ATNDeserializer) isFeatureSupported(feature, actualUUID string) bool {
|
||||||
var idx1 = stringInSlice(feature, SupportedUUIDs)
|
var idx1 = stringInSlice(feature, SupportedUUIDs)
|
||||||
|
|
||||||
if idx1 < 0 {
|
if idx1 < 0 {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
var idx2 = stringInSlice(actualUUID, SupportedUUIDs)
|
var idx2 = stringInSlice(actualUUID, SupportedUUIDs)
|
||||||
|
|
||||||
return idx2 >= idx1
|
return idx2 >= idx1
|
||||||
}
|
}
|
||||||
|
|
||||||
func (a *ATNDeserializer) DeserializeFromUInt16(data []uint16) *ATN {
|
func (a *ATNDeserializer) DeserializeFromUInt16(data []uint16) *ATN {
|
||||||
|
|
||||||
a.reset(utf16.Decode(data))
|
a.reset(utf16.Decode(data))
|
||||||
a.checkVersion()
|
a.checkVersion()
|
||||||
a.checkUUID()
|
a.checkUUID()
|
||||||
|
|
||||||
var atn = a.readATN()
|
var atn = a.readATN()
|
||||||
|
|
||||||
a.readStates(atn)
|
a.readStates(atn)
|
||||||
a.readRules(atn)
|
a.readRules(atn)
|
||||||
a.readModes(atn)
|
a.readModes(atn)
|
||||||
|
|
||||||
var sets = a.readSets(atn)
|
var sets = a.readSets(atn)
|
||||||
|
|
||||||
a.readEdges(atn, sets)
|
a.readEdges(atn, sets)
|
||||||
a.readDecisions(atn)
|
a.readDecisions(atn)
|
||||||
a.readLexerActions(atn)
|
a.readLexerActions(atn)
|
||||||
a.markPrecedenceDecisions(atn)
|
a.markPrecedenceDecisions(atn)
|
||||||
a.verifyATN(atn)
|
a.verifyATN(atn)
|
||||||
|
|
||||||
if a.deserializationOptions.generateRuleBypassTransitions && atn.grammarType == ATNTypeParser {
|
if a.deserializationOptions.generateRuleBypassTransitions && atn.grammarType == ATNTypeParser {
|
||||||
a.generateRuleBypassTransitions(atn)
|
a.generateRuleBypassTransitions(atn)
|
||||||
// re-verify after modification
|
// Re-verify after modification
|
||||||
a.verifyATN(atn)
|
a.verifyATN(atn)
|
||||||
}
|
}
|
||||||
|
|
||||||
return atn
|
return atn
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (a *ATNDeserializer) reset(data []rune) {
|
func (a *ATNDeserializer) reset(data []rune) {
|
||||||
|
var temp = make([]rune, len(data))
|
||||||
temp := make([]rune, len(data))
|
|
||||||
|
|
||||||
for i, c := range data {
|
for i, c := range data {
|
||||||
// don't adjust the first value since that's the version number
|
// Don't adjust the first value since that's the version number
|
||||||
if i == 0 {
|
if i == 0 {
|
||||||
temp[i] = c
|
temp[i] = c
|
||||||
} else {
|
} else {
|
||||||
|
@ -124,6 +123,7 @@ func (a *ATNDeserializer) reset(data []rune) {
|
||||||
|
|
||||||
func (a *ATNDeserializer) checkVersion() {
|
func (a *ATNDeserializer) checkVersion() {
|
||||||
var version = a.readInt()
|
var version = a.readInt()
|
||||||
|
|
||||||
if version != SerializedVersion {
|
if version != SerializedVersion {
|
||||||
panic("Could not deserialize ATN with version " + strconv.Itoa(version) + " (expected " + strconv.Itoa(SerializedVersion) + ").")
|
panic("Could not deserialize ATN with version " + strconv.Itoa(version) + " (expected " + strconv.Itoa(SerializedVersion) + ").")
|
||||||
}
|
}
|
||||||
|
@ -131,20 +131,22 @@ func (a *ATNDeserializer) checkVersion() {
|
||||||
|
|
||||||
func (a *ATNDeserializer) checkUUID() {
|
func (a *ATNDeserializer) checkUUID() {
|
||||||
var uuid = a.readUUID()
|
var uuid = a.readUUID()
|
||||||
|
|
||||||
if stringInSlice(uuid, SupportedUUIDs) < 0 {
|
if stringInSlice(uuid, SupportedUUIDs) < 0 {
|
||||||
panic("Could not deserialize ATN with UUID: " + uuid + " (expected " + SerializedUUID + " or a legacy UUID).")
|
panic("Could not deserialize ATN with UUID: " + uuid + " (expected " + SerializedUUID + " or a legacy UUID).")
|
||||||
}
|
}
|
||||||
|
|
||||||
a.uuid = uuid
|
a.uuid = uuid
|
||||||
}
|
}
|
||||||
|
|
||||||
func (a *ATNDeserializer) readATN() *ATN {
|
func (a *ATNDeserializer) readATN() *ATN {
|
||||||
var grammarType = a.readInt()
|
var grammarType = a.readInt()
|
||||||
var maxTokenType = a.readInt()
|
var maxTokenType = a.readInt()
|
||||||
|
|
||||||
return NewATN(grammarType, maxTokenType)
|
return NewATN(grammarType, maxTokenType)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (a *ATNDeserializer) readStates(atn *ATN) {
|
func (a *ATNDeserializer) readStates(atn *ATN) {
|
||||||
|
|
||||||
var loopBackStateNumbers = make([]LoopEndStateIntPair, 0)
|
var loopBackStateNumbers = make([]LoopEndStateIntPair, 0)
|
||||||
var endStateNumbers = make([]BlockStartStateIntPair, 0)
|
var endStateNumbers = make([]BlockStartStateIntPair, 0)
|
||||||
|
|
||||||
|
@ -152,72 +154,97 @@ func (a *ATNDeserializer) readStates(atn *ATN) {
|
||||||
|
|
||||||
for i := 0; i < nstates; i++ {
|
for i := 0; i < nstates; i++ {
|
||||||
var stype = a.readInt()
|
var stype = a.readInt()
|
||||||
// ignore bad type of states
|
|
||||||
|
// Ignore bad types of states
|
||||||
if stype == ATNStateInvalidType {
|
if stype == ATNStateInvalidType {
|
||||||
atn.addState(nil)
|
atn.addState(nil)
|
||||||
|
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
var ruleIndex = a.readInt()
|
var ruleIndex = a.readInt()
|
||||||
|
|
||||||
if ruleIndex == 0xFFFF {
|
if ruleIndex == 0xFFFF {
|
||||||
ruleIndex = -1
|
ruleIndex = -1
|
||||||
}
|
}
|
||||||
|
|
||||||
var s = a.stateFactory(stype, ruleIndex)
|
var s = a.stateFactory(stype, ruleIndex)
|
||||||
|
|
||||||
if stype == ATNStateLoopEnd {
|
if stype == ATNStateLoopEnd {
|
||||||
var loopBackStateNumber = a.readInt()
|
var loopBackStateNumber = a.readInt()
|
||||||
|
|
||||||
loopBackStateNumbers = append(loopBackStateNumbers, LoopEndStateIntPair{s.(*LoopEndState), loopBackStateNumber})
|
loopBackStateNumbers = append(loopBackStateNumbers, LoopEndStateIntPair{s.(*LoopEndState), loopBackStateNumber})
|
||||||
} else if s2, ok := s.(BlockStartState); ok {
|
} else if s2, ok := s.(BlockStartState); ok {
|
||||||
var endStateNumber = a.readInt()
|
var endStateNumber = a.readInt()
|
||||||
|
|
||||||
endStateNumbers = append(endStateNumbers, BlockStartStateIntPair{s2, endStateNumber})
|
endStateNumbers = append(endStateNumbers, BlockStartStateIntPair{s2, endStateNumber})
|
||||||
}
|
}
|
||||||
|
|
||||||
atn.addState(s)
|
atn.addState(s)
|
||||||
}
|
}
|
||||||
// delay the assignment of loop back and end states until we know all the
|
|
||||||
// state instances have been initialized
|
// Delay the assignment of loop back and end states until we know all the state
|
||||||
|
// instances have been initialized
|
||||||
for j := 0; j < len(loopBackStateNumbers); j++ {
|
for j := 0; j < len(loopBackStateNumbers); j++ {
|
||||||
pair := loopBackStateNumbers[j]
|
var pair = loopBackStateNumbers[j]
|
||||||
|
|
||||||
pair.item0.loopBackState = atn.states[pair.item1]
|
pair.item0.loopBackState = atn.states[pair.item1]
|
||||||
}
|
}
|
||||||
|
|
||||||
for j := 0; j < len(endStateNumbers); j++ {
|
for j := 0; j < len(endStateNumbers); j++ {
|
||||||
pair := endStateNumbers[j]
|
var pair = endStateNumbers[j]
|
||||||
|
|
||||||
pair.item0.setEndState(atn.states[pair.item1].(*BlockEndState))
|
pair.item0.setEndState(atn.states[pair.item1].(*BlockEndState))
|
||||||
}
|
}
|
||||||
|
|
||||||
var numNonGreedyStates = a.readInt()
|
var numNonGreedyStates = a.readInt()
|
||||||
|
|
||||||
for j := 0; j < numNonGreedyStates; j++ {
|
for j := 0; j < numNonGreedyStates; j++ {
|
||||||
stateNumber := a.readInt()
|
var stateNumber = a.readInt()
|
||||||
|
|
||||||
atn.states[stateNumber].(DecisionState).setNonGreedy(true)
|
atn.states[stateNumber].(DecisionState).setNonGreedy(true)
|
||||||
}
|
}
|
||||||
|
|
||||||
var numPrecedenceStates = a.readInt()
|
var numPrecedenceStates = a.readInt()
|
||||||
|
|
||||||
for j := 0; j < numPrecedenceStates; j++ {
|
for j := 0; j < numPrecedenceStates; j++ {
|
||||||
stateNumber := a.readInt()
|
var stateNumber = a.readInt()
|
||||||
|
|
||||||
atn.states[stateNumber].(*RuleStartState).isPrecedenceRule = true
|
atn.states[stateNumber].(*RuleStartState).isPrecedenceRule = true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (a *ATNDeserializer) readRules(atn *ATN) {
|
func (a *ATNDeserializer) readRules(atn *ATN) {
|
||||||
|
|
||||||
var nrules = a.readInt()
|
var nrules = a.readInt()
|
||||||
|
|
||||||
if atn.grammarType == ATNTypeLexer {
|
if atn.grammarType == ATNTypeLexer {
|
||||||
atn.ruleToTokenType = make([]int, nrules) // initIntArray(nrules, 0)
|
atn.ruleToTokenType = make([]int, nrules) // TODO: initIntArray(nrules, 0)
|
||||||
}
|
}
|
||||||
atn.ruleToStartState = make([]*RuleStartState, nrules) // initIntArray(nrules, 0)
|
|
||||||
|
atn.ruleToStartState = make([]*RuleStartState, nrules) // TODO: initIntArray(nrules, 0)
|
||||||
|
|
||||||
for i := 0; i < nrules; i++ {
|
for i := 0; i < nrules; i++ {
|
||||||
var s = a.readInt()
|
var s = a.readInt()
|
||||||
var startState = atn.states[s].(*RuleStartState)
|
var startState = atn.states[s].(*RuleStartState)
|
||||||
|
|
||||||
atn.ruleToStartState[i] = startState
|
atn.ruleToStartState[i] = startState
|
||||||
|
|
||||||
if atn.grammarType == ATNTypeLexer {
|
if atn.grammarType == ATNTypeLexer {
|
||||||
var tokenType = a.readInt()
|
var tokenType = a.readInt()
|
||||||
|
|
||||||
if tokenType == 0xFFFF {
|
if tokenType == 0xFFFF {
|
||||||
tokenType = TokenEOF
|
tokenType = TokenEOF
|
||||||
}
|
}
|
||||||
|
|
||||||
atn.ruleToTokenType[i] = tokenType
|
atn.ruleToTokenType[i] = tokenType
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
atn.ruleToStopState = make([]*RuleStopState, nrules) //initIntArray(nrules, 0)
|
atn.ruleToStopState = make([]*RuleStopState, nrules) //initIntArray(nrules, 0)
|
||||||
|
|
||||||
for i := 0; i < len(atn.states); i++ {
|
for i := 0; i < len(atn.states); i++ {
|
||||||
var state = atn.states[i]
|
var state = atn.states[i]
|
||||||
|
|
||||||
if s2, ok := state.(*RuleStopState); ok {
|
if s2, ok := state.(*RuleStopState); ok {
|
||||||
atn.ruleToStopState[s2.ruleIndex] = s2
|
atn.ruleToStopState[s2.ruleIndex] = s2
|
||||||
atn.ruleToStartState[s2.ruleIndex].stopState = s2
|
atn.ruleToStartState[s2.ruleIndex].stopState = s2
|
||||||
|
@ -227,8 +254,10 @@ func (a *ATNDeserializer) readRules(atn *ATN) {
|
||||||
|
|
||||||
func (a *ATNDeserializer) readModes(atn *ATN) {
|
func (a *ATNDeserializer) readModes(atn *ATN) {
|
||||||
var nmodes = a.readInt()
|
var nmodes = a.readInt()
|
||||||
|
|
||||||
for i := 0; i < nmodes; i++ {
|
for i := 0; i < nmodes; i++ {
|
||||||
var s = a.readInt()
|
var s = a.readInt()
|
||||||
|
|
||||||
atn.modeToStartState = append(atn.modeToStartState, atn.states[s].(*TokensStartState))
|
atn.modeToStartState = append(atn.modeToStartState, atn.states[s].(*TokensStartState))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -236,81 +265,102 @@ func (a *ATNDeserializer) readModes(atn *ATN) {
|
||||||
func (a *ATNDeserializer) readSets(atn *ATN) []*IntervalSet {
|
func (a *ATNDeserializer) readSets(atn *ATN) []*IntervalSet {
|
||||||
var sets = make([]*IntervalSet, 0)
|
var sets = make([]*IntervalSet, 0)
|
||||||
var m = a.readInt()
|
var m = a.readInt()
|
||||||
|
|
||||||
for i := 0; i < m; i++ {
|
for i := 0; i < m; i++ {
|
||||||
var iset = NewIntervalSet()
|
var iset = NewIntervalSet()
|
||||||
|
|
||||||
sets = append(sets, iset)
|
sets = append(sets, iset)
|
||||||
|
|
||||||
var n = a.readInt()
|
var n = a.readInt()
|
||||||
var containsEOF = a.readInt()
|
var containsEOF = a.readInt()
|
||||||
|
|
||||||
if containsEOF != 0 {
|
if containsEOF != 0 {
|
||||||
iset.addOne(-1)
|
iset.addOne(-1)
|
||||||
}
|
}
|
||||||
|
|
||||||
for j := 0; j < n; j++ {
|
for j := 0; j < n; j++ {
|
||||||
var i1 = a.readInt()
|
var i1 = a.readInt()
|
||||||
var i2 = a.readInt()
|
var i2 = a.readInt()
|
||||||
|
|
||||||
iset.addRange(i1, i2)
|
iset.addRange(i1, i2)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return sets
|
return sets
|
||||||
}
|
}
|
||||||
|
|
||||||
func (a *ATNDeserializer) readEdges(atn *ATN, sets []*IntervalSet) {
|
func (a *ATNDeserializer) readEdges(atn *ATN, sets []*IntervalSet) {
|
||||||
|
|
||||||
var nedges = a.readInt()
|
var nedges = a.readInt()
|
||||||
|
|
||||||
for i := 0; i < nedges; i++ {
|
for i := 0; i < nedges; i++ {
|
||||||
var src = a.readInt()
|
var (
|
||||||
var trg = a.readInt()
|
src = a.readInt()
|
||||||
var ttype = a.readInt()
|
trg = a.readInt()
|
||||||
var arg1 = a.readInt()
|
ttype = a.readInt()
|
||||||
var arg2 = a.readInt()
|
arg1 = a.readInt()
|
||||||
var arg3 = a.readInt()
|
arg2 = a.readInt()
|
||||||
trans := a.edgeFactory(atn, ttype, src, trg, arg1, arg2, arg3, sets)
|
arg3 = a.readInt()
|
||||||
var srcState = atn.states[src]
|
trans = a.edgeFactory(atn, ttype, src, trg, arg1, arg2, arg3, sets)
|
||||||
|
srcState = atn.states[src]
|
||||||
|
)
|
||||||
|
|
||||||
srcState.AddTransition(trans, -1)
|
srcState.AddTransition(trans, -1)
|
||||||
}
|
}
|
||||||
// edges for rule stop states can be derived, so they aren't serialized
|
|
||||||
|
// Edges for rule stop states can be derived, so they are not serialized
|
||||||
for i := 0; i < len(atn.states); i++ {
|
for i := 0; i < len(atn.states); i++ {
|
||||||
state := atn.states[i]
|
var state = atn.states[i]
|
||||||
|
|
||||||
for j := 0; j < len(state.GetTransitions()); j++ {
|
for j := 0; j < len(state.GetTransitions()); j++ {
|
||||||
var t, ok = state.GetTransitions()[j].(*RuleTransition)
|
var t, ok = state.GetTransitions()[j].(*RuleTransition)
|
||||||
|
|
||||||
if !ok {
|
if !ok {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
var outermostPrecedenceReturn = -1
|
var outermostPrecedenceReturn = -1
|
||||||
|
|
||||||
if atn.ruleToStartState[t.getTarget().GetRuleIndex()].isPrecedenceRule {
|
if atn.ruleToStartState[t.getTarget().GetRuleIndex()].isPrecedenceRule {
|
||||||
if t.precedence == 0 {
|
if t.precedence == 0 {
|
||||||
outermostPrecedenceReturn = t.getTarget().GetRuleIndex()
|
outermostPrecedenceReturn = t.getTarget().GetRuleIndex()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
trans := NewEpsilonTransition(t.followState, outermostPrecedenceReturn)
|
var trans = NewEpsilonTransition(t.followState, outermostPrecedenceReturn)
|
||||||
|
|
||||||
atn.ruleToStopState[t.getTarget().GetRuleIndex()].AddTransition(trans, -1)
|
atn.ruleToStopState[t.getTarget().GetRuleIndex()].AddTransition(trans, -1)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for i := 0; i < len(atn.states); i++ {
|
for i := 0; i < len(atn.states); i++ {
|
||||||
state := atn.states[i]
|
var state = atn.states[i]
|
||||||
|
|
||||||
if s2, ok := state.(*BaseBlockStartState); ok {
|
if s2, ok := state.(*BaseBlockStartState); ok {
|
||||||
// we need to know the end state to set its start state
|
// We need to know the end state to set its start state
|
||||||
if s2.endState == nil {
|
if s2.endState == nil {
|
||||||
panic("IllegalState")
|
panic("IllegalState")
|
||||||
}
|
}
|
||||||
// block end states can only be associated to a single block start
|
|
||||||
// state
|
// Block end states can only be associated to a single block start state
|
||||||
if s2.endState.startState != nil {
|
if s2.endState.startState != nil {
|
||||||
panic("IllegalState")
|
panic("IllegalState")
|
||||||
}
|
}
|
||||||
|
|
||||||
s2.endState.startState = state
|
s2.endState.startState = state
|
||||||
}
|
}
|
||||||
|
|
||||||
if s2, ok := state.(*PlusLoopbackState); ok {
|
if s2, ok := state.(*PlusLoopbackState); ok {
|
||||||
for j := 0; j < len(s2.GetTransitions()); j++ {
|
for j := 0; j < len(s2.GetTransitions()); j++ {
|
||||||
target := s2.GetTransitions()[j].getTarget()
|
var target = s2.GetTransitions()[j].getTarget()
|
||||||
|
|
||||||
if t2, ok := target.(*PlusBlockStartState); ok {
|
if t2, ok := target.(*PlusBlockStartState); ok {
|
||||||
t2.loopBackState = state
|
t2.loopBackState = state
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else if s2, ok := state.(*StarLoopbackState); ok {
|
} else if s2, ok := state.(*StarLoopbackState); ok {
|
||||||
for j := 0; j < len(s2.GetTransitions()); j++ {
|
for j := 0; j < len(s2.GetTransitions()); j++ {
|
||||||
target := s2.GetTransitions()[j].getTarget()
|
var target = s2.GetTransitions()[j].getTarget()
|
||||||
|
|
||||||
if t2, ok := target.(*StarLoopEntryState); ok {
|
if t2, ok := target.(*StarLoopEntryState); ok {
|
||||||
t2.loopBackState = state
|
t2.loopBackState = state
|
||||||
}
|
}
|
||||||
|
@ -321,9 +371,11 @@ func (a *ATNDeserializer) readEdges(atn *ATN, sets []*IntervalSet) {
|
||||||
|
|
||||||
func (a *ATNDeserializer) readDecisions(atn *ATN) {
|
func (a *ATNDeserializer) readDecisions(atn *ATN) {
|
||||||
var ndecisions = a.readInt()
|
var ndecisions = a.readInt()
|
||||||
|
|
||||||
for i := 0; i < ndecisions; i++ {
|
for i := 0; i < ndecisions; i++ {
|
||||||
var s = a.readInt()
|
var s = a.readInt()
|
||||||
var decState = atn.states[s].(DecisionState)
|
var decState = atn.states[s].(DecisionState)
|
||||||
|
|
||||||
atn.DecisionToState = append(atn.DecisionToState, decState)
|
atn.DecisionToState = append(atn.DecisionToState, decState)
|
||||||
decState.setDecision(i)
|
decState.setDecision(i)
|
||||||
}
|
}
|
||||||
|
@ -332,18 +384,25 @@ func (a *ATNDeserializer) readDecisions(atn *ATN) {
|
||||||
func (a *ATNDeserializer) readLexerActions(atn *ATN) {
|
func (a *ATNDeserializer) readLexerActions(atn *ATN) {
|
||||||
if atn.grammarType == ATNTypeLexer {
|
if atn.grammarType == ATNTypeLexer {
|
||||||
var count = a.readInt()
|
var count = a.readInt()
|
||||||
|
|
||||||
atn.lexerActions = make([]LexerAction, count) // initIntArray(count, nil)
|
atn.lexerActions = make([]LexerAction, count) // initIntArray(count, nil)
|
||||||
|
|
||||||
for i := 0; i < count; i++ {
|
for i := 0; i < count; i++ {
|
||||||
var actionType = a.readInt()
|
var actionType = a.readInt()
|
||||||
var data1 = a.readInt()
|
var data1 = a.readInt()
|
||||||
|
|
||||||
if data1 == 0xFFFF {
|
if data1 == 0xFFFF {
|
||||||
data1 = -1
|
data1 = -1
|
||||||
}
|
}
|
||||||
|
|
||||||
var data2 = a.readInt()
|
var data2 = a.readInt()
|
||||||
|
|
||||||
if data2 == 0xFFFF {
|
if data2 == 0xFFFF {
|
||||||
data2 = -1
|
data2 = -1
|
||||||
}
|
}
|
||||||
|
|
||||||
var lexerAction = a.lexerActionFactory(actionType, data1, data2)
|
var lexerAction = a.lexerActionFactory(actionType, data1, data2)
|
||||||
|
|
||||||
atn.lexerActions[i] = lexerAction
|
atn.lexerActions[i] = lexerAction
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -351,21 +410,24 @@ func (a *ATNDeserializer) readLexerActions(atn *ATN) {
|
||||||
|
|
||||||
func (a *ATNDeserializer) generateRuleBypassTransitions(atn *ATN) {
|
func (a *ATNDeserializer) generateRuleBypassTransitions(atn *ATN) {
|
||||||
var count = len(atn.ruleToStartState)
|
var count = len(atn.ruleToStartState)
|
||||||
|
|
||||||
for i := 0; i < count; i++ {
|
for i := 0; i < count; i++ {
|
||||||
atn.ruleToTokenType[i] = atn.maxTokenType + i + 1
|
atn.ruleToTokenType[i] = atn.maxTokenType + i + 1
|
||||||
}
|
}
|
||||||
|
|
||||||
for i := 0; i < count; i++ {
|
for i := 0; i < count; i++ {
|
||||||
a.generateRuleBypassTransition(atn, i)
|
a.generateRuleBypassTransition(atn, i)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (a *ATNDeserializer) generateRuleBypassTransition(atn *ATN, idx int) {
|
func (a *ATNDeserializer) generateRuleBypassTransition(atn *ATN, idx int) {
|
||||||
|
|
||||||
var bypassStart = NewBasicBlockStartState()
|
var bypassStart = NewBasicBlockStartState()
|
||||||
|
|
||||||
bypassStart.ruleIndex = idx
|
bypassStart.ruleIndex = idx
|
||||||
atn.addState(bypassStart)
|
atn.addState(bypassStart)
|
||||||
|
|
||||||
var bypassStop = NewBlockEndState()
|
var bypassStop = NewBlockEndState()
|
||||||
|
|
||||||
bypassStop.ruleIndex = idx
|
bypassStop.ruleIndex = idx
|
||||||
atn.addState(bypassStop)
|
atn.addState(bypassStop)
|
||||||
|
|
||||||
|
@ -379,16 +441,20 @@ func (a *ATNDeserializer) generateRuleBypassTransition(atn *ATN, idx int) {
|
||||||
var endState ATNState
|
var endState ATNState
|
||||||
|
|
||||||
if atn.ruleToStartState[idx].isPrecedenceRule {
|
if atn.ruleToStartState[idx].isPrecedenceRule {
|
||||||
// wrap from the beginning of the rule to the StarLoopEntryState
|
// Wrap from the beginning of the rule to the StarLoopEntryState
|
||||||
endState = nil
|
endState = nil
|
||||||
|
|
||||||
for i := 0; i < len(atn.states); i++ {
|
for i := 0; i < len(atn.states); i++ {
|
||||||
state := atn.states[i]
|
var state = atn.states[i]
|
||||||
|
|
||||||
if a.stateIsEndStateFor(state, idx) != nil {
|
if a.stateIsEndStateFor(state, idx) != nil {
|
||||||
endState = state
|
endState = state
|
||||||
excludeTransition = state.(*StarLoopEntryState).loopBackState.GetTransitions()[0]
|
excludeTransition = state.(*StarLoopEntryState).loopBackState.GetTransitions()[0]
|
||||||
|
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if excludeTransition == nil {
|
if excludeTransition == nil {
|
||||||
panic("Couldn't identify final state of the precedence rule prefix section.")
|
panic("Couldn't identify final state of the precedence rule prefix section.")
|
||||||
}
|
}
|
||||||
|
@ -396,34 +462,39 @@ func (a *ATNDeserializer) generateRuleBypassTransition(atn *ATN, idx int) {
|
||||||
endState = atn.ruleToStopState[idx]
|
endState = atn.ruleToStopState[idx]
|
||||||
}
|
}
|
||||||
|
|
||||||
// all non-excluded transitions that currently target end state need to
|
// All non-excluded transitions that currently target end state need to target
|
||||||
// target blockEnd instead
|
// blockEnd instead
|
||||||
for i := 0; i < len(atn.states); i++ {
|
for i := 0; i < len(atn.states); i++ {
|
||||||
state := atn.states[i]
|
var state = atn.states[i]
|
||||||
|
|
||||||
for j := 0; j < len(state.GetTransitions()); j++ {
|
for j := 0; j < len(state.GetTransitions()); j++ {
|
||||||
var transition = state.GetTransitions()[j]
|
var transition = state.GetTransitions()[j]
|
||||||
|
|
||||||
if transition == excludeTransition {
|
if transition == excludeTransition {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
if transition.getTarget() == endState {
|
if transition.getTarget() == endState {
|
||||||
transition.setTarget(bypassStop)
|
transition.setTarget(bypassStop)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// all transitions leaving the rule start state need to leave blockStart
|
// All transitions leaving the rule start state need to leave blockStart instead
|
||||||
// instead
|
|
||||||
var ruleToStartState = atn.ruleToStartState[idx]
|
var ruleToStartState = atn.ruleToStartState[idx]
|
||||||
var count = len(ruleToStartState.GetTransitions())
|
var count = len(ruleToStartState.GetTransitions())
|
||||||
|
|
||||||
for count > 0 {
|
for count > 0 {
|
||||||
bypassStart.AddTransition(ruleToStartState.GetTransitions()[count-1], -1)
|
bypassStart.AddTransition(ruleToStartState.GetTransitions()[count-1], -1)
|
||||||
ruleToStartState.SetTransitions([]Transition{ruleToStartState.GetTransitions()[len(ruleToStartState.GetTransitions())-1]})
|
ruleToStartState.SetTransitions([]Transition{ruleToStartState.GetTransitions()[len(ruleToStartState.GetTransitions())-1]})
|
||||||
}
|
}
|
||||||
// link the new states
|
|
||||||
|
// Link the new states
|
||||||
atn.ruleToStartState[idx].AddTransition(NewEpsilonTransition(bypassStart, -1), -1)
|
atn.ruleToStartState[idx].AddTransition(NewEpsilonTransition(bypassStart, -1), -1)
|
||||||
bypassStop.AddTransition(NewEpsilonTransition(endState, -1), -1)
|
bypassStop.AddTransition(NewEpsilonTransition(endState, -1), -1)
|
||||||
|
|
||||||
var MatchState = NewBasicState()
|
var MatchState = NewBasicState()
|
||||||
|
|
||||||
atn.addState(MatchState)
|
atn.addState(MatchState)
|
||||||
MatchState.AddTransition(NewAtomTransition(bypassStop, atn.ruleToTokenType[idx]), -1)
|
MatchState.AddTransition(NewAtomTransition(bypassStop, atn.ruleToTokenType[idx]), -1)
|
||||||
bypassStart.AddTransition(NewEpsilonTransition(MatchState, -1), -1)
|
bypassStart.AddTransition(NewEpsilonTransition(MatchState, -1), -1)
|
||||||
|
@ -433,15 +504,18 @@ func (a *ATNDeserializer) stateIsEndStateFor(state ATNState, idx int) ATNState {
|
||||||
if state.GetRuleIndex() != idx {
|
if state.GetRuleIndex() != idx {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
if _, ok := state.(*StarLoopEntryState); !ok {
|
if _, ok := state.(*StarLoopEntryState); !ok {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
var maybeLoopEndState = state.GetTransitions()[len(state.GetTransitions())-1].getTarget()
|
var maybeLoopEndState = state.GetTransitions()[len(state.GetTransitions())-1].getTarget()
|
||||||
|
|
||||||
if _, ok := maybeLoopEndState.(*LoopEndState); !ok {
|
if _, ok := maybeLoopEndState.(*LoopEndState); !ok {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
_, ok := maybeLoopEndState.GetTransitions()[0].getTarget().(*RuleStopState)
|
var _, ok = maybeLoopEndState.GetTransitions()[0].getTarget().(*RuleStopState)
|
||||||
|
|
||||||
if maybeLoopEndState.(*LoopEndState).epsilonOnlyTransitions && ok {
|
if maybeLoopEndState.(*LoopEndState).epsilonOnlyTransitions && ok {
|
||||||
return state
|
return state
|
||||||
|
@ -450,29 +524,23 @@ func (a *ATNDeserializer) stateIsEndStateFor(state ATNState, idx int) ATNState {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
//
|
// markPrecedenceDecisions analyzes the StarLoopEntryState states in the
|
||||||
// Analyze the {@link StarLoopEntryState} states in the specified ATN to set
|
// specified ATN to set the StarLoopEntryState.precedenceRuleDecision field to
|
||||||
// the {@link StarLoopEntryState//precedenceRuleDecision} field to the
|
// the correct value.
|
||||||
// correct value.
|
|
||||||
//
|
|
||||||
// @param atn The ATN.
|
|
||||||
//
|
|
||||||
func (a *ATNDeserializer) markPrecedenceDecisions(atn *ATN) {
|
func (a *ATNDeserializer) markPrecedenceDecisions(atn *ATN) {
|
||||||
for _, state := range atn.states {
|
for _, state := range atn.states {
|
||||||
if _, ok := state.(*StarLoopEntryState); !ok {
|
if _, ok := state.(*StarLoopEntryState); !ok {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
// We analyze the ATN to determine if a ATN decision state is the
|
// We analyze the ATN to determine if a ATN decision state is the
|
||||||
// decision for the closure block that determines whether a
|
// decision for the closure block that determines whether a
|
||||||
// precedence rule should continue or complete.
|
// precedence rule should continue or complete.
|
||||||
//
|
|
||||||
if atn.ruleToStartState[state.GetRuleIndex()].isPrecedenceRule {
|
if atn.ruleToStartState[state.GetRuleIndex()].isPrecedenceRule {
|
||||||
|
|
||||||
var maybeLoopEndState = state.GetTransitions()[len(state.GetTransitions())-1].getTarget()
|
var maybeLoopEndState = state.GetTransitions()[len(state.GetTransitions())-1].getTarget()
|
||||||
|
|
||||||
if s3, ok := maybeLoopEndState.(*LoopEndState); ok {
|
if s3, ok := maybeLoopEndState.(*LoopEndState); ok {
|
||||||
|
var _, ok2 = maybeLoopEndState.GetTransitions()[0].getTarget().(*RuleStopState)
|
||||||
_, ok2 := maybeLoopEndState.GetTransitions()[0].getTarget().(*RuleStopState)
|
|
||||||
|
|
||||||
if s3.epsilonOnlyTransitions && ok2 {
|
if s3.epsilonOnlyTransitions && ok2 {
|
||||||
state.(*StarLoopEntryState).precedenceRuleDecision = true
|
state.(*StarLoopEntryState).precedenceRuleDecision = true
|
||||||
|
@ -486,53 +554,67 @@ func (a *ATNDeserializer) verifyATN(atn *ATN) {
|
||||||
if !a.deserializationOptions.verifyATN {
|
if !a.deserializationOptions.verifyATN {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
// verify assumptions
|
|
||||||
for i := 0; i < len(atn.states); i++ {
|
|
||||||
|
|
||||||
|
// Verify assumptions
|
||||||
|
for i := 0; i < len(atn.states); i++ {
|
||||||
var state = atn.states[i]
|
var state = atn.states[i]
|
||||||
|
|
||||||
if state == nil {
|
if state == nil {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
a.checkCondition(state.GetEpsilonOnlyTransitions() || len(state.GetTransitions()) <= 1, "")
|
a.checkCondition(state.GetEpsilonOnlyTransitions() || len(state.GetTransitions()) <= 1, "")
|
||||||
|
|
||||||
switch s2 := state.(type) {
|
switch s2 := state.(type) {
|
||||||
|
|
||||||
case *PlusBlockStartState:
|
case *PlusBlockStartState:
|
||||||
a.checkCondition(s2.loopBackState != nil, "")
|
a.checkCondition(s2.loopBackState != nil, "")
|
||||||
case *StarLoopEntryState:
|
|
||||||
|
|
||||||
|
case *StarLoopEntryState:
|
||||||
a.checkCondition(s2.loopBackState != nil, "")
|
a.checkCondition(s2.loopBackState != nil, "")
|
||||||
a.checkCondition(len(s2.GetTransitions()) == 2, "")
|
a.checkCondition(len(s2.GetTransitions()) == 2, "")
|
||||||
|
|
||||||
switch s2 := state.(type) {
|
switch s2 := state.(type) {
|
||||||
case *StarBlockStartState:
|
case *StarBlockStartState:
|
||||||
_, ok2 := s2.GetTransitions()[1].getTarget().(*LoopEndState)
|
var _, ok2 = s2.GetTransitions()[1].getTarget().(*LoopEndState)
|
||||||
|
|
||||||
a.checkCondition(ok2, "")
|
a.checkCondition(ok2, "")
|
||||||
a.checkCondition(!s2.nonGreedy, "")
|
a.checkCondition(!s2.nonGreedy, "")
|
||||||
|
|
||||||
case *LoopEndState:
|
case *LoopEndState:
|
||||||
s3, ok2 := s2.GetTransitions()[1].getTarget().(*StarBlockStartState)
|
var s3, ok2 = s2.GetTransitions()[1].getTarget().(*StarBlockStartState)
|
||||||
|
|
||||||
a.checkCondition(ok2, "")
|
a.checkCondition(ok2, "")
|
||||||
a.checkCondition(s3.nonGreedy, "")
|
a.checkCondition(s3.nonGreedy, "")
|
||||||
|
|
||||||
default:
|
default:
|
||||||
panic("IllegalState")
|
panic("IllegalState")
|
||||||
}
|
}
|
||||||
|
|
||||||
case *StarLoopbackState:
|
case *StarLoopbackState:
|
||||||
a.checkCondition(len(state.GetTransitions()) == 1, "")
|
a.checkCondition(len(state.GetTransitions()) == 1, "")
|
||||||
_, ok2 := state.GetTransitions()[0].getTarget().(*StarLoopEntryState)
|
|
||||||
|
var _, ok2 = state.GetTransitions()[0].getTarget().(*StarLoopEntryState)
|
||||||
|
|
||||||
a.checkCondition(ok2, "")
|
a.checkCondition(ok2, "")
|
||||||
|
|
||||||
case *LoopEndState:
|
case *LoopEndState:
|
||||||
a.checkCondition(s2.loopBackState != nil, "")
|
a.checkCondition(s2.loopBackState != nil, "")
|
||||||
|
|
||||||
case *RuleStartState:
|
case *RuleStartState:
|
||||||
a.checkCondition(s2.stopState != nil, "")
|
a.checkCondition(s2.stopState != nil, "")
|
||||||
|
|
||||||
case *BaseBlockStartState:
|
case *BaseBlockStartState:
|
||||||
a.checkCondition(s2.endState != nil, "")
|
a.checkCondition(s2.endState != nil, "")
|
||||||
|
|
||||||
case *BlockEndState:
|
case *BlockEndState:
|
||||||
a.checkCondition(s2.startState != nil, "")
|
a.checkCondition(s2.startState != nil, "")
|
||||||
|
|
||||||
case DecisionState:
|
case DecisionState:
|
||||||
a.checkCondition(len(s2.GetTransitions()) <= 1 || s2.getDecision() >= 0, "")
|
a.checkCondition(len(s2.GetTransitions()) <= 1 || s2.getDecision() >= 0, "")
|
||||||
|
|
||||||
default:
|
default:
|
||||||
_, ok := s2.(*RuleStopState)
|
var _, ok = s2.(*RuleStopState)
|
||||||
|
|
||||||
a.checkCondition(len(s2.GetTransitions()) <= 1 || ok, "")
|
a.checkCondition(len(s2.GetTransitions()) <= 1 || ok, "")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -543,16 +625,20 @@ func (a *ATNDeserializer) checkCondition(condition bool, message string) {
|
||||||
if message == "" {
|
if message == "" {
|
||||||
message = "IllegalState"
|
message = "IllegalState"
|
||||||
}
|
}
|
||||||
|
|
||||||
panic(message)
|
panic(message)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (a *ATNDeserializer) readInt() int {
|
func (a *ATNDeserializer) readInt() int {
|
||||||
v := a.data[a.pos]
|
var v = a.data[a.pos]
|
||||||
|
|
||||||
a.pos++
|
a.pos++
|
||||||
|
|
||||||
return int(v)
|
return int(v)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
//TODO
|
||||||
//func (a *ATNDeserializer) readLong() int64 {
|
//func (a *ATNDeserializer) readLong() int64 {
|
||||||
// panic("Not implemented")
|
// panic("Not implemented")
|
||||||
// var low = a.readInt32()
|
// var low = a.readInt32()
|
||||||
|
@ -562,9 +648,11 @@ func (a *ATNDeserializer) readInt() int {
|
||||||
|
|
||||||
func createByteToHex() []string {
|
func createByteToHex() []string {
|
||||||
var bth = make([]string, 256)
|
var bth = make([]string, 256)
|
||||||
|
|
||||||
for i := 0; i < 256; i++ {
|
for i := 0; i < 256; i++ {
|
||||||
bth[i] = strings.ToUpper(hex.EncodeToString([]byte{byte(i)}))
|
bth[i] = strings.ToUpper(hex.EncodeToString([]byte{byte(i)}))
|
||||||
}
|
}
|
||||||
|
|
||||||
return bth
|
return bth
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -572,11 +660,14 @@ var byteToHex = createByteToHex()
|
||||||
|
|
||||||
func (a *ATNDeserializer) readUUID() string {
|
func (a *ATNDeserializer) readUUID() string {
|
||||||
var bb = make([]int, 16)
|
var bb = make([]int, 16)
|
||||||
|
|
||||||
for i := 7; i >= 0; i-- {
|
for i := 7; i >= 0; i-- {
|
||||||
var integer = a.readInt()
|
var integer = a.readInt()
|
||||||
|
|
||||||
bb[(2*i)+1] = integer & 0xFF
|
bb[(2*i)+1] = integer & 0xFF
|
||||||
bb[2*i] = (integer >> 8) & 0xFF
|
bb[2*i] = (integer >> 8) & 0xFF
|
||||||
}
|
}
|
||||||
|
|
||||||
return byteToHex[bb[0]] + byteToHex[bb[1]] +
|
return byteToHex[bb[0]] + byteToHex[bb[1]] +
|
||||||
byteToHex[bb[2]] + byteToHex[bb[3]] + "-" +
|
byteToHex[bb[2]] + byteToHex[bb[3]] + "-" +
|
||||||
byteToHex[bb[4]] + byteToHex[bb[5]] + "-" +
|
byteToHex[bb[4]] + byteToHex[bb[5]] + "-" +
|
||||||
|
@ -588,34 +679,44 @@ func (a *ATNDeserializer) readUUID() string {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (a *ATNDeserializer) edgeFactory(atn *ATN, typeIndex, src, trg, arg1, arg2, arg3 int, sets []*IntervalSet) Transition {
|
func (a *ATNDeserializer) edgeFactory(atn *ATN, typeIndex, src, trg, arg1, arg2, arg3 int, sets []*IntervalSet) Transition {
|
||||||
|
|
||||||
var target = atn.states[trg]
|
var target = atn.states[trg]
|
||||||
|
|
||||||
switch typeIndex {
|
switch typeIndex {
|
||||||
case TransitionEPSILON:
|
case TransitionEPSILON:
|
||||||
return NewEpsilonTransition(target, -1)
|
return NewEpsilonTransition(target, -1)
|
||||||
|
|
||||||
case TransitionRANGE:
|
case TransitionRANGE:
|
||||||
if arg3 != 0 {
|
if arg3 != 0 {
|
||||||
return NewRangeTransition(target, TokenEOF, arg2)
|
return NewRangeTransition(target, TokenEOF, arg2)
|
||||||
}
|
}
|
||||||
|
|
||||||
return NewRangeTransition(target, arg1, arg2)
|
return NewRangeTransition(target, arg1, arg2)
|
||||||
|
|
||||||
case TransitionRULE:
|
case TransitionRULE:
|
||||||
return NewRuleTransition(atn.states[arg1], arg2, arg3, target)
|
return NewRuleTransition(atn.states[arg1], arg2, arg3, target)
|
||||||
|
|
||||||
case TransitionPREDICATE:
|
case TransitionPREDICATE:
|
||||||
return NewPredicateTransition(target, arg1, arg2, arg3 != 0)
|
return NewPredicateTransition(target, arg1, arg2, arg3 != 0)
|
||||||
|
|
||||||
case TransitionPRECEDENCE:
|
case TransitionPRECEDENCE:
|
||||||
return NewPrecedencePredicateTransition(target, arg1)
|
return NewPrecedencePredicateTransition(target, arg1)
|
||||||
|
|
||||||
case TransitionATOM:
|
case TransitionATOM:
|
||||||
if arg3 != 0 {
|
if arg3 != 0 {
|
||||||
return NewAtomTransition(target, TokenEOF)
|
return NewAtomTransition(target, TokenEOF)
|
||||||
}
|
}
|
||||||
|
|
||||||
return NewAtomTransition(target, arg1)
|
return NewAtomTransition(target, arg1)
|
||||||
|
|
||||||
case TransitionACTION:
|
case TransitionACTION:
|
||||||
return NewActionTransition(target, arg1, arg2, arg3 != 0)
|
return NewActionTransition(target, arg1, arg2, arg3 != 0)
|
||||||
|
|
||||||
case TransitionSET:
|
case TransitionSET:
|
||||||
return NewSetTransition(target, sets[arg1])
|
return NewSetTransition(target, sets[arg1])
|
||||||
|
|
||||||
case TransitionNOTSET:
|
case TransitionNOTSET:
|
||||||
return NewNotSetTransition(target, sets[arg1])
|
return NewNotSetTransition(target, sets[arg1])
|
||||||
|
|
||||||
case TransitionWILDCARD:
|
case TransitionWILDCARD:
|
||||||
return NewWildcardTransition(target)
|
return NewWildcardTransition(target)
|
||||||
}
|
}
|
||||||
|
@ -624,41 +725,54 @@ func (a *ATNDeserializer) edgeFactory(atn *ATN, typeIndex, src, trg, arg1, arg2,
|
||||||
}
|
}
|
||||||
|
|
||||||
func (a *ATNDeserializer) stateFactory(typeIndex, ruleIndex int) ATNState {
|
func (a *ATNDeserializer) stateFactory(typeIndex, ruleIndex int) ATNState {
|
||||||
|
|
||||||
var s ATNState
|
var s ATNState
|
||||||
|
|
||||||
switch typeIndex {
|
switch typeIndex {
|
||||||
case ATNStateInvalidType:
|
case ATNStateInvalidType:
|
||||||
return nil
|
return nil
|
||||||
|
|
||||||
case ATNStateBasic:
|
case ATNStateBasic:
|
||||||
s = NewBasicState()
|
s = NewBasicState()
|
||||||
|
|
||||||
case ATNStateRuleStart:
|
case ATNStateRuleStart:
|
||||||
s = NewRuleStartState()
|
s = NewRuleStartState()
|
||||||
|
|
||||||
case ATNStateBlockStart:
|
case ATNStateBlockStart:
|
||||||
s = NewBasicBlockStartState()
|
s = NewBasicBlockStartState()
|
||||||
|
|
||||||
case ATNStatePlusBlockStart:
|
case ATNStatePlusBlockStart:
|
||||||
s = NewPlusBlockStartState()
|
s = NewPlusBlockStartState()
|
||||||
|
|
||||||
case ATNStateStarBlockStart:
|
case ATNStateStarBlockStart:
|
||||||
s = NewStarBlockStartState()
|
s = NewStarBlockStartState()
|
||||||
|
|
||||||
case ATNStateTokenStart:
|
case ATNStateTokenStart:
|
||||||
s = NewTokensStartState()
|
s = NewTokensStartState()
|
||||||
|
|
||||||
case ATNStateRuleStop:
|
case ATNStateRuleStop:
|
||||||
s = NewRuleStopState()
|
s = NewRuleStopState()
|
||||||
|
|
||||||
case ATNStateBlockEnd:
|
case ATNStateBlockEnd:
|
||||||
s = NewBlockEndState()
|
s = NewBlockEndState()
|
||||||
|
|
||||||
case ATNStateStarLoopBack:
|
case ATNStateStarLoopBack:
|
||||||
s = NewStarLoopbackState()
|
s = NewStarLoopbackState()
|
||||||
|
|
||||||
case ATNStateStarLoopEntry:
|
case ATNStateStarLoopEntry:
|
||||||
s = NewStarLoopEntryState()
|
s = NewStarLoopEntryState()
|
||||||
|
|
||||||
case ATNStatePlusLoopBack:
|
case ATNStatePlusLoopBack:
|
||||||
s = NewPlusLoopbackState()
|
s = NewPlusLoopbackState()
|
||||||
|
|
||||||
case ATNStateLoopEnd:
|
case ATNStateLoopEnd:
|
||||||
s = NewLoopEndState()
|
s = NewLoopEndState()
|
||||||
|
|
||||||
default:
|
default:
|
||||||
message := fmt.Sprintf("The specified state type %d is not valid.", typeIndex)
|
panic(fmt.Sprintf("state type %d is invalid", typeIndex))
|
||||||
panic(message)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
s.SetRuleIndex(ruleIndex)
|
s.SetRuleIndex(ruleIndex)
|
||||||
|
|
||||||
return s
|
return s
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -666,22 +780,29 @@ func (a *ATNDeserializer) lexerActionFactory(typeIndex, data1, data2 int) LexerA
|
||||||
switch typeIndex {
|
switch typeIndex {
|
||||||
case LexerActionTypeChannel:
|
case LexerActionTypeChannel:
|
||||||
return NewLexerChannelAction(data1)
|
return NewLexerChannelAction(data1)
|
||||||
|
|
||||||
case LexerActionTypeCustom:
|
case LexerActionTypeCustom:
|
||||||
return NewLexerCustomAction(data1, data2)
|
return NewLexerCustomAction(data1, data2)
|
||||||
|
|
||||||
case LexerActionTypeMode:
|
case LexerActionTypeMode:
|
||||||
return NewLexerModeAction(data1)
|
return NewLexerModeAction(data1)
|
||||||
|
|
||||||
case LexerActionTypeMore:
|
case LexerActionTypeMore:
|
||||||
return LexerMoreActionINSTANCE
|
return LexerMoreActionINSTANCE
|
||||||
|
|
||||||
case LexerActionTypePopMode:
|
case LexerActionTypePopMode:
|
||||||
return LexerPopModeActionINSTANCE
|
return LexerPopModeActionINSTANCE
|
||||||
|
|
||||||
case LexerActionTypePushMode:
|
case LexerActionTypePushMode:
|
||||||
return NewLexerPushModeAction(data1)
|
return NewLexerPushModeAction(data1)
|
||||||
|
|
||||||
case LexerActionTypeSkip:
|
case LexerActionTypeSkip:
|
||||||
return LexerSkipActionINSTANCE
|
return LexerSkipActionINSTANCE
|
||||||
|
|
||||||
case LexerActionTypeType:
|
case LexerActionTypeType:
|
||||||
return NewLexerTypeAction(data1)
|
return NewLexerTypeAction(data1)
|
||||||
|
|
||||||
default:
|
default:
|
||||||
message := fmt.Sprintf("The specified lexer action typeIndex%d is not valid.", typeIndex)
|
panic(fmt.Sprintf("lexer action %d is invalid", typeIndex))
|
||||||
panic(message)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,13 +1,14 @@
|
||||||
package antlr
|
package antlr
|
||||||
|
|
||||||
|
var ATNSimulatorError = NewDFAState(0x7FFFFFFF, NewBaseATNConfigSet(false))
|
||||||
|
|
||||||
type BaseATNSimulator struct {
|
type BaseATNSimulator struct {
|
||||||
atn *ATN
|
atn *ATN
|
||||||
sharedContextCache *PredictionContextCache
|
sharedContextCache *PredictionContextCache
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewBaseATNSimulator(atn *ATN, sharedContextCache *PredictionContextCache) *BaseATNSimulator {
|
func NewBaseATNSimulator(atn *ATN, sharedContextCache *PredictionContextCache) *BaseATNSimulator {
|
||||||
|
var b = new(BaseATNSimulator)
|
||||||
b := new(BaseATNSimulator)
|
|
||||||
|
|
||||||
b.atn = atn
|
b.atn = atn
|
||||||
b.sharedContextCache = sharedContextCache
|
b.sharedContextCache = sharedContextCache
|
||||||
|
@ -15,12 +16,12 @@ func NewBaseATNSimulator(atn *ATN, sharedContextCache *PredictionContextCache) *
|
||||||
return b
|
return b
|
||||||
}
|
}
|
||||||
|
|
||||||
var ATNSimulatorError = NewDFAState(0x7FFFFFFF, NewBaseATNConfigSet(false))
|
|
||||||
|
|
||||||
func (b *BaseATNSimulator) getCachedContext(context PredictionContext) PredictionContext {
|
func (b *BaseATNSimulator) getCachedContext(context PredictionContext) PredictionContext {
|
||||||
if b.sharedContextCache == nil {
|
if b.sharedContextCache == nil {
|
||||||
return context
|
return context
|
||||||
}
|
}
|
||||||
|
|
||||||
var visited = make(map[PredictionContext]PredictionContext)
|
var visited = make(map[PredictionContext]PredictionContext)
|
||||||
|
|
||||||
return getCachedBasePredictionContext(context, b.sharedContextCache, visited)
|
return getCachedBasePredictionContext(context, b.sharedContextCache, visited)
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,8 +2,8 @@ package antlr
|
||||||
|
|
||||||
import "strconv"
|
import "strconv"
|
||||||
|
|
||||||
|
// Constants for serialization.
|
||||||
const (
|
const (
|
||||||
// constants for serialization
|
|
||||||
ATNStateInvalidType = 0
|
ATNStateInvalidType = 0
|
||||||
ATNStateBasic = 1
|
ATNStateBasic = 1
|
||||||
ATNStateRuleStart = 2
|
ATNStateRuleStart = 2
|
||||||
|
@ -48,34 +48,27 @@ type ATNState interface {
|
||||||
}
|
}
|
||||||
|
|
||||||
type BaseATNState struct {
|
type BaseATNState struct {
|
||||||
// Which ATN are we in?
|
// NextTokenWithinRule caches lookahead during parsing. Not used during construction.
|
||||||
atn *ATN
|
NextTokenWithinRule *IntervalSet
|
||||||
stateNumber int
|
|
||||||
stateType int
|
// atn is the current ATN.
|
||||||
ruleIndex int
|
atn *ATN
|
||||||
|
|
||||||
epsilonOnlyTransitions bool
|
epsilonOnlyTransitions bool
|
||||||
|
|
||||||
|
// ruleIndex tracks the Rule index because there are no Rule objects at runtime.
|
||||||
|
ruleIndex int
|
||||||
|
|
||||||
|
stateNumber int
|
||||||
|
|
||||||
|
stateType int
|
||||||
|
|
||||||
// Track the transitions emanating from this ATN state.
|
// Track the transitions emanating from this ATN state.
|
||||||
transitions []Transition
|
transitions []Transition
|
||||||
// Used to cache lookahead during parsing, not used during construction
|
|
||||||
NextTokenWithinRule *IntervalSet
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewBaseATNState() *BaseATNState {
|
func NewBaseATNState() *BaseATNState {
|
||||||
|
return &BaseATNState{stateNumber: ATNStateInvalidStateNumber, stateType: ATNStateInvalidType}
|
||||||
as := new(BaseATNState)
|
|
||||||
|
|
||||||
// Which ATN are we in?
|
|
||||||
as.atn = nil
|
|
||||||
as.stateNumber = ATNStateInvalidStateNumber
|
|
||||||
as.stateType = ATNStateInvalidType
|
|
||||||
as.ruleIndex = 0 // at runtime, we don't have Rule objects
|
|
||||||
as.epsilonOnlyTransitions = false
|
|
||||||
// Track the transitions emanating from this ATN state.
|
|
||||||
as.transitions = make([]Transition, 0)
|
|
||||||
// Used to cache lookahead during parsing, not used during construction
|
|
||||||
as.NextTokenWithinRule = nil
|
|
||||||
|
|
||||||
return as
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (as *BaseATNState) GetRuleIndex() int {
|
func (as *BaseATNState) GetRuleIndex() int {
|
||||||
|
@ -147,11 +140,12 @@ func (as *BaseATNState) AddTransition(trans Transition, index int) {
|
||||||
} else if as.epsilonOnlyTransitions != trans.getIsEpsilon() {
|
} else if as.epsilonOnlyTransitions != trans.getIsEpsilon() {
|
||||||
as.epsilonOnlyTransitions = false
|
as.epsilonOnlyTransitions = false
|
||||||
}
|
}
|
||||||
|
|
||||||
if index == -1 {
|
if index == -1 {
|
||||||
as.transitions = append(as.transitions, trans)
|
as.transitions = append(as.transitions, trans)
|
||||||
} else {
|
} else {
|
||||||
as.transitions = append(as.transitions[:index], append([]Transition{trans}, as.transitions[index:]...)...)
|
as.transitions = append(as.transitions[:index], append([]Transition{trans}, as.transitions[index:]...)...)
|
||||||
// as.transitions.splice(index, 1, trans)
|
// TODO: as.transitions.splice(index, 1, trans)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -160,11 +154,11 @@ type BasicState struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewBasicState() *BasicState {
|
func NewBasicState() *BasicState {
|
||||||
b := new(BasicState)
|
var b = NewBaseATNState()
|
||||||
b.BaseATNState = NewBaseATNState()
|
|
||||||
|
|
||||||
b.stateType = ATNStateBasic
|
b.stateType = ATNStateBasic
|
||||||
return b
|
|
||||||
|
return &BasicState{BaseATNState: b}
|
||||||
}
|
}
|
||||||
|
|
||||||
type DecisionState interface {
|
type DecisionState interface {
|
||||||
|
@ -179,21 +173,12 @@ type DecisionState interface {
|
||||||
|
|
||||||
type BaseDecisionState struct {
|
type BaseDecisionState struct {
|
||||||
*BaseATNState
|
*BaseATNState
|
||||||
|
|
||||||
decision int
|
decision int
|
||||||
nonGreedy bool
|
nonGreedy bool
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewBaseDecisionState() *BaseDecisionState {
|
func NewBaseDecisionState() *BaseDecisionState {
|
||||||
|
return &BaseDecisionState{BaseATNState: NewBaseATNState(), decision: -1}
|
||||||
b := new(BaseDecisionState)
|
|
||||||
|
|
||||||
b.BaseATNState = NewBaseATNState()
|
|
||||||
|
|
||||||
b.decision = -1
|
|
||||||
b.nonGreedy = false
|
|
||||||
|
|
||||||
return b
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *BaseDecisionState) getDecision() int {
|
func (s *BaseDecisionState) getDecision() int {
|
||||||
|
@ -219,21 +204,14 @@ type BlockStartState interface {
|
||||||
setEndState(*BlockEndState)
|
setEndState(*BlockEndState)
|
||||||
}
|
}
|
||||||
|
|
||||||
// The start of a regular {@code (...)} block.
|
// BaseBlockStartState is the start of a regular (...) block.
|
||||||
type BaseBlockStartState struct {
|
type BaseBlockStartState struct {
|
||||||
*BaseDecisionState
|
*BaseDecisionState
|
||||||
|
|
||||||
endState *BlockEndState
|
endState *BlockEndState
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewBlockStartState() *BaseBlockStartState {
|
func NewBlockStartState() *BaseBlockStartState {
|
||||||
|
return &BaseBlockStartState{BaseDecisionState: NewBaseDecisionState()}
|
||||||
b := new(BaseBlockStartState)
|
|
||||||
|
|
||||||
b.BaseDecisionState = NewBaseDecisionState()
|
|
||||||
b.endState = nil
|
|
||||||
|
|
||||||
return b
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *BaseBlockStartState) getEndState() *BlockEndState {
|
func (s *BaseBlockStartState) getEndState() *BlockEndState {
|
||||||
|
@ -249,123 +227,99 @@ type BasicBlockStartState struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewBasicBlockStartState() *BasicBlockStartState {
|
func NewBasicBlockStartState() *BasicBlockStartState {
|
||||||
|
var b = NewBlockStartState()
|
||||||
b := new(BasicBlockStartState)
|
|
||||||
|
|
||||||
b.BaseBlockStartState = NewBlockStartState()
|
|
||||||
|
|
||||||
b.stateType = ATNStateBlockStart
|
b.stateType = ATNStateBlockStart
|
||||||
return b
|
|
||||||
|
return &BasicBlockStartState{BaseBlockStartState: b}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Terminal node of a simple {@code (a|b|c)} block.
|
// BlockEndState is a terminal node of a simple (a|b|c) block.
|
||||||
type BlockEndState struct {
|
type BlockEndState struct {
|
||||||
*BaseATNState
|
*BaseATNState
|
||||||
|
|
||||||
startState ATNState
|
startState ATNState
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewBlockEndState() *BlockEndState {
|
func NewBlockEndState() *BlockEndState {
|
||||||
|
var b = NewBaseATNState()
|
||||||
|
|
||||||
b := new(BlockEndState)
|
|
||||||
|
|
||||||
b.BaseATNState = NewBaseATNState()
|
|
||||||
b.stateType = ATNStateBlockEnd
|
b.stateType = ATNStateBlockEnd
|
||||||
b.startState = nil
|
|
||||||
|
|
||||||
return b
|
return &BlockEndState{BaseATNState: b}
|
||||||
}
|
}
|
||||||
|
|
||||||
// The last node in the ATN for a rule, unless that rule is the start symbol.
|
// RuleStopState is the last node in the ATN for a rule, unless that rule is the
|
||||||
// In that case, there is one transition to EOF. Later, we might encode
|
// start symbol. In that case, there is one transition to EOF. Later, we might
|
||||||
// references to all calls to this rule to compute FOLLOW sets for
|
// encode references to all calls to this rule to compute FOLLOW sets for error
|
||||||
// error handling.
|
// handling.
|
||||||
//
|
|
||||||
type RuleStopState struct {
|
type RuleStopState struct {
|
||||||
*BaseATNState
|
*BaseATNState
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewRuleStopState() *RuleStopState {
|
func NewRuleStopState() *RuleStopState {
|
||||||
r := new(RuleStopState)
|
var b = NewBaseATNState()
|
||||||
|
|
||||||
r.BaseATNState = NewBaseATNState()
|
b.stateType = ATNStateRuleStop
|
||||||
r.stateType = ATNStateRuleStop
|
|
||||||
return r
|
return &RuleStopState{BaseATNState: b}
|
||||||
}
|
}
|
||||||
|
|
||||||
type RuleStartState struct {
|
type RuleStartState struct {
|
||||||
*BaseATNState
|
*BaseATNState
|
||||||
|
|
||||||
stopState ATNState
|
stopState ATNState
|
||||||
isPrecedenceRule bool
|
isPrecedenceRule bool
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewRuleStartState() *RuleStartState {
|
func NewRuleStartState() *RuleStartState {
|
||||||
|
var b = NewBaseATNState()
|
||||||
|
|
||||||
r := new(RuleStartState)
|
b.stateType = ATNStateRuleStart
|
||||||
|
|
||||||
r.BaseATNState = NewBaseATNState()
|
return &RuleStartState{BaseATNState: b}
|
||||||
r.stateType = ATNStateRuleStart
|
|
||||||
r.stopState = nil
|
|
||||||
r.isPrecedenceRule = false
|
|
||||||
|
|
||||||
return r
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Decision state for {@code A+} and {@code (A|B)+}. It has two transitions:
|
// PlusLoopbackState is a decision state for A+ and (A|B)+. It has two
|
||||||
// one to the loop back to start of the block and one to exit.
|
// transitions: one to the loop back to start of the block, and one to exit.
|
||||||
//
|
|
||||||
type PlusLoopbackState struct {
|
type PlusLoopbackState struct {
|
||||||
*BaseDecisionState
|
*BaseDecisionState
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewPlusLoopbackState() *PlusLoopbackState {
|
func NewPlusLoopbackState() *PlusLoopbackState {
|
||||||
|
var b = NewBaseDecisionState()
|
||||||
|
|
||||||
p := new(PlusLoopbackState)
|
b.stateType = ATNStatePlusLoopBack
|
||||||
|
|
||||||
p.BaseDecisionState = NewBaseDecisionState()
|
return &PlusLoopbackState{BaseDecisionState: b}
|
||||||
|
|
||||||
p.stateType = ATNStatePlusLoopBack
|
|
||||||
return p
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Start of {@code (A|B|...)+} loop. Technically a decision state, but
|
// PlusBlockStartState is the start of a (A|B|...)+ loop. Technically it is a
|
||||||
// we don't use for code generation somebody might need it, so I'm defining
|
// decision state; we don't use it for code generation. Somebody might need it,
|
||||||
// it for completeness. In reality, the {@link PlusLoopbackState} node is the
|
// it is included for completeness. In reality, PlusLoopbackState is the real
|
||||||
// real decision-making note for {@code A+}.
|
// decision-making node for A+.
|
||||||
//
|
|
||||||
type PlusBlockStartState struct {
|
type PlusBlockStartState struct {
|
||||||
*BaseBlockStartState
|
*BaseBlockStartState
|
||||||
|
|
||||||
loopBackState ATNState
|
loopBackState ATNState
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewPlusBlockStartState() *PlusBlockStartState {
|
func NewPlusBlockStartState() *PlusBlockStartState {
|
||||||
|
var b = NewBlockStartState()
|
||||||
|
|
||||||
p := new(PlusBlockStartState)
|
b.stateType = ATNStatePlusBlockStart
|
||||||
|
|
||||||
p.BaseBlockStartState = NewBlockStartState()
|
return &PlusBlockStartState{BaseBlockStartState: b}
|
||||||
|
|
||||||
p.stateType = ATNStatePlusBlockStart
|
|
||||||
p.loopBackState = nil
|
|
||||||
|
|
||||||
return p
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// The block that begins a closure loop.
|
// StarBlockStartState is the block that begins a closure loop.
|
||||||
type StarBlockStartState struct {
|
type StarBlockStartState struct {
|
||||||
*BaseBlockStartState
|
*BaseBlockStartState
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewStarBlockStartState() *StarBlockStartState {
|
func NewStarBlockStartState() *StarBlockStartState {
|
||||||
|
var b = NewBlockStartState()
|
||||||
|
|
||||||
s := new(StarBlockStartState)
|
b.stateType = ATNStateStarBlockStart
|
||||||
|
|
||||||
s.BaseBlockStartState = NewBlockStartState()
|
return &StarBlockStartState{BaseBlockStartState: b}
|
||||||
|
|
||||||
s.stateType = ATNStateStarBlockStart
|
|
||||||
|
|
||||||
return s
|
|
||||||
}
|
}
|
||||||
|
|
||||||
type StarLoopbackState struct {
|
type StarLoopbackState struct {
|
||||||
|
@ -373,67 +327,51 @@ type StarLoopbackState struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewStarLoopbackState() *StarLoopbackState {
|
func NewStarLoopbackState() *StarLoopbackState {
|
||||||
|
var b = NewBaseATNState()
|
||||||
|
|
||||||
s := new(StarLoopbackState)
|
b.stateType = ATNStateStarLoopBack
|
||||||
|
|
||||||
s.BaseATNState = NewBaseATNState()
|
return &StarLoopbackState{BaseATNState: b}
|
||||||
|
|
||||||
s.stateType = ATNStateStarLoopBack
|
|
||||||
return s
|
|
||||||
}
|
}
|
||||||
|
|
||||||
type StarLoopEntryState struct {
|
type StarLoopEntryState struct {
|
||||||
*BaseDecisionState
|
*BaseDecisionState
|
||||||
|
|
||||||
loopBackState ATNState
|
loopBackState ATNState
|
||||||
precedenceRuleDecision bool
|
precedenceRuleDecision bool
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewStarLoopEntryState() *StarLoopEntryState {
|
func NewStarLoopEntryState() *StarLoopEntryState {
|
||||||
|
var b = NewBaseDecisionState()
|
||||||
|
|
||||||
s := new(StarLoopEntryState)
|
b.stateType = ATNStateStarLoopEntry
|
||||||
|
|
||||||
s.BaseDecisionState = NewBaseDecisionState()
|
// False precedenceRuleDecision indicates whether s state can benefit from a precedence DFA during SLL decision making.
|
||||||
|
return &StarLoopEntryState{BaseDecisionState: b}
|
||||||
s.stateType = ATNStateStarLoopEntry
|
|
||||||
s.loopBackState = nil
|
|
||||||
|
|
||||||
// Indicates whether s state can benefit from a precedence DFA during SLL decision making.
|
|
||||||
s.precedenceRuleDecision = false
|
|
||||||
|
|
||||||
return s
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Mark the end of a * or + loop.
|
// LoopEndState marks the end of a * or + loop.
|
||||||
type LoopEndState struct {
|
type LoopEndState struct {
|
||||||
*BaseATNState
|
*BaseATNState
|
||||||
|
|
||||||
loopBackState ATNState
|
loopBackState ATNState
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewLoopEndState() *LoopEndState {
|
func NewLoopEndState() *LoopEndState {
|
||||||
|
var b = NewBaseATNState()
|
||||||
|
|
||||||
l := new(LoopEndState)
|
b.stateType = ATNStateLoopEnd
|
||||||
|
|
||||||
l.BaseATNState = NewBaseATNState()
|
return &LoopEndState{BaseATNState: b}
|
||||||
|
|
||||||
l.stateType = ATNStateLoopEnd
|
|
||||||
l.loopBackState = nil
|
|
||||||
|
|
||||||
return l
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// The Tokens rule start state linking to each lexer rule start state */
|
// TokensStartState is the Tokens rule start state linking to each lexer rule start state.
|
||||||
type TokensStartState struct {
|
type TokensStartState struct {
|
||||||
*BaseDecisionState
|
*BaseDecisionState
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewTokensStartState() *TokensStartState {
|
func NewTokensStartState() *TokensStartState {
|
||||||
|
var b = NewBaseDecisionState()
|
||||||
|
|
||||||
t := new(TokensStartState)
|
b.stateType = ATNStateTokenStart
|
||||||
|
|
||||||
t.BaseDecisionState = NewBaseDecisionState()
|
return &TokensStartState{BaseDecisionState: b}
|
||||||
|
|
||||||
t.stateType = ATNStateTokenStart
|
|
||||||
return t
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
package antlr
|
package antlr
|
||||||
|
|
||||||
// Represents the type of recognizer an ATN applies to.
|
// Represent the type of recognizer an ATN applies to.
|
||||||
|
|
||||||
const (
|
const (
|
||||||
ATNTypeLexer = 0
|
ATNTypeLexer = 0
|
||||||
ATNTypeParser = 1
|
ATNTypeParser = 1
|
||||||
|
|
|
@ -41,7 +41,7 @@ type BaseParser struct {
|
||||||
|
|
||||||
tracer *TraceListener
|
tracer *TraceListener
|
||||||
parseListeners []ParseTreeListener
|
parseListeners []ParseTreeListener
|
||||||
_SyntaxErrors int
|
_SyntaxErrors int
|
||||||
}
|
}
|
||||||
|
|
||||||
// p.is all the parsing support code essentially most of it is error
|
// p.is all the parsing support code essentially most of it is error
|
||||||
|
|
|
@ -26,7 +26,7 @@ type Recognizer interface {
|
||||||
|
|
||||||
type BaseRecognizer struct {
|
type BaseRecognizer struct {
|
||||||
listeners []ErrorListener
|
listeners []ErrorListener
|
||||||
state int
|
state int
|
||||||
|
|
||||||
RuleNames []string
|
RuleNames []string
|
||||||
LiteralNames []string
|
LiteralNames []string
|
||||||
|
|
|
@ -42,7 +42,7 @@ type BaseToken struct {
|
||||||
tokenIndex int // from 0..n-1 of the token object in the input stream
|
tokenIndex int // from 0..n-1 of the token object in the input stream
|
||||||
line int // line=1..n of the 1st character
|
line int // line=1..n of the 1st character
|
||||||
column int // beginning of the line at which it occurs, 0..n-1
|
column int // beginning of the line at which it occurs, 0..n-1
|
||||||
text string // text of the token.
|
text string // text of the token.
|
||||||
readOnly bool
|
readOnly bool
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue