forked from jasder/antlr
Lint: Outdent else blocks for if blocks that return
This commit is contained in:
parent
33fd68778e
commit
26df617532
|
@ -88,9 +88,9 @@ func (a *ATN) NextTokensNoContext(s ATNState) *IntervalSet {
|
|||
func (a *ATN) NextTokens(s ATNState, ctx RuleContext) *IntervalSet {
|
||||
if ctx == nil {
|
||||
return a.NextTokensNoContext(s)
|
||||
} else {
|
||||
return a.NextTokensInContext(s, ctx)
|
||||
}
|
||||
|
||||
return a.NextTokensInContext(s, ctx)
|
||||
}
|
||||
|
||||
func (a *ATN) addState(state ATNState) {
|
||||
|
@ -114,9 +114,9 @@ func (a *ATN) defineDecisionState(s DecisionState) int {
|
|||
func (a *ATN) getDecisionState(decision int) DecisionState {
|
||||
if len(a.DecisionToState) == 0 {
|
||||
return nil
|
||||
} else {
|
||||
return a.DecisionToState[decision]
|
||||
}
|
||||
|
||||
return a.DecisionToState[decision]
|
||||
}
|
||||
|
||||
// Computes the set of input symbols which could follow ATN state number
|
||||
|
|
|
@ -315,9 +315,9 @@ func (l *LexerATNConfig) equals(other interface{}) bool {
|
|||
|
||||
if b {
|
||||
return false
|
||||
} else {
|
||||
return l.BaseATNConfig.equals(othert.BaseATNConfig)
|
||||
}
|
||||
|
||||
return l.BaseATNConfig.equals(othert.BaseATNConfig)
|
||||
}
|
||||
|
||||
func checkNonGreedyDecision(source *LexerATNConfig, target ATNState) bool {
|
||||
|
|
|
@ -221,9 +221,9 @@ func (b *BaseATNConfigSet) Hash() string {
|
|||
b.cachedHashString = b.hashConfigs()
|
||||
}
|
||||
return b.cachedHashString
|
||||
} else {
|
||||
return b.hashConfigs()
|
||||
}
|
||||
|
||||
return b.hashConfigs()
|
||||
}
|
||||
|
||||
func (b *BaseATNConfigSet) hashConfigs() string {
|
||||
|
|
|
@ -445,9 +445,9 @@ func (a *ATNDeserializer) stateIsEndStateFor(state ATNState, idx int) ATNState {
|
|||
|
||||
if maybeLoopEndState.(*LoopEndState).epsilonOnlyTransitions && ok {
|
||||
return state
|
||||
} else {
|
||||
return nil
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
//
|
||||
|
@ -597,9 +597,8 @@ func (a *ATNDeserializer) edgeFactory(atn *ATN, typeIndex, src, trg, arg1, arg2,
|
|||
case TransitionRANGE:
|
||||
if arg3 != 0 {
|
||||
return NewRangeTransition(target, TokenEOF, arg2)
|
||||
} else {
|
||||
return NewRangeTransition(target, arg1, arg2)
|
||||
}
|
||||
return NewRangeTransition(target, arg1, arg2)
|
||||
case TransitionRULE:
|
||||
return NewRuleTransition(atn.states[arg1], arg2, arg3, target)
|
||||
case TransitionPREDICATE:
|
||||
|
@ -609,9 +608,8 @@ func (a *ATNDeserializer) edgeFactory(atn *ATN, typeIndex, src, trg, arg1, arg2,
|
|||
case TransitionATOM:
|
||||
if arg3 != 0 {
|
||||
return NewAtomTransition(target, TokenEOF)
|
||||
} else {
|
||||
return NewAtomTransition(target, arg1)
|
||||
}
|
||||
return NewAtomTransition(target, arg1)
|
||||
case TransitionACTION:
|
||||
return NewActionTransition(target, arg1, arg2, arg3 != 0)
|
||||
case TransitionSET:
|
||||
|
|
|
@ -132,9 +132,9 @@ func (as *BaseATNState) String() string {
|
|||
func (as *BaseATNState) equals(other interface{}) bool {
|
||||
if ot, ok := other.(ATNState); ok {
|
||||
return as.stateNumber == ot.GetStateNumber()
|
||||
} else {
|
||||
return false
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func (as *BaseATNState) isNonGreedyExitState() bool {
|
||||
|
|
|
@ -69,9 +69,9 @@ func (d *DFASerializer) getEdgeLabel(i int) string {
|
|||
return d.literalNames[i-1]
|
||||
} else if d.symbolicNames != nil && i-1 < len(d.symbolicNames) {
|
||||
return d.symbolicNames[i-1]
|
||||
} else {
|
||||
return strconv.Itoa(i - 1)
|
||||
}
|
||||
|
||||
return strconv.Itoa(i - 1)
|
||||
}
|
||||
|
||||
func (d *DFASerializer) GetStateString(s *DFAState) string {
|
||||
|
@ -90,13 +90,13 @@ func (d *DFASerializer) GetStateString(s *DFAState) string {
|
|||
if s.isAcceptState {
|
||||
if s.predicates != nil {
|
||||
return baseStateStr + "=>" + fmt.Sprint(s.predicates)
|
||||
} else {
|
||||
}
|
||||
|
||||
return baseStateStr + "=>" + fmt.Sprint(s.prediction)
|
||||
}
|
||||
} else {
|
||||
|
||||
return baseStateStr
|
||||
}
|
||||
}
|
||||
|
||||
type LexerDFASerializer struct {
|
||||
*DFASerializer
|
||||
|
|
|
@ -115,9 +115,9 @@ func (d *DFAState) GetAltSet() *Set {
|
|||
}
|
||||
if alts.length() == 0 {
|
||||
return nil
|
||||
} else {
|
||||
return alts
|
||||
}
|
||||
|
||||
return alts
|
||||
}
|
||||
|
||||
func (d *DFAState) setPrediction(v int) {
|
||||
|
|
|
@ -249,9 +249,8 @@ func (d *DefaultErrorStrategy) Sync(recognizer Parser) {
|
|||
// Report error and recover if possible
|
||||
if d.singleTokenDeletion(recognizer) != nil {
|
||||
return
|
||||
} else {
|
||||
panic(NewInputMisMatchException(recognizer))
|
||||
}
|
||||
panic(NewInputMisMatchException(recognizer))
|
||||
case ATNStatePlusLoopBack:
|
||||
fallthrough
|
||||
case ATNStateStarLoopBack:
|
||||
|
@ -472,9 +471,9 @@ func (d *DefaultErrorStrategy) singleTokenInsertion(recognizer Parser) bool {
|
|||
if expectingAtLL2.contains(currentSymbolType) {
|
||||
d.ReportMissingToken(recognizer)
|
||||
return true
|
||||
} else {
|
||||
return false
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
// This method implements the single-token deletion inline error recovery
|
||||
|
@ -509,9 +508,9 @@ func (d *DefaultErrorStrategy) singleTokenDeletion(recognizer Parser) Token {
|
|||
var MatchedSymbol = recognizer.GetCurrentToken()
|
||||
d.ReportMatch(recognizer) // we know current token is correct
|
||||
return MatchedSymbol
|
||||
} else {
|
||||
return nil
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Conjure up a missing token during error recovery.
|
||||
|
|
|
@ -83,9 +83,9 @@ func (b *BaseRecognitionException) GetInputStream() IntStream {
|
|||
func (b *BaseRecognitionException) getExpectedTokens() *IntervalSet {
|
||||
if b.recognizer != nil {
|
||||
return b.recognizer.GetATN().getExpectedTokens(b.offendingState, b.ctx)
|
||||
} else {
|
||||
return nil
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (b *BaseRecognitionException) String() string {
|
||||
|
@ -222,9 +222,9 @@ func NewFailedPredicateException(recognizer Parser, predicate string, message st
|
|||
func (f *FailedPredicateException) formatMessage(predicate, message string) string {
|
||||
if message != "" {
|
||||
return message
|
||||
} else {
|
||||
return "failed predicate: {" + predicate + "}?"
|
||||
}
|
||||
|
||||
return "failed predicate: {" + predicate + "}?"
|
||||
}
|
||||
|
||||
type ParseCancellationException struct {
|
||||
|
|
|
@ -88,9 +88,9 @@ func (is *InputStream) GetText(start int, stop int) string {
|
|||
}
|
||||
if start >= is.size {
|
||||
return ""
|
||||
} else {
|
||||
return string(is.data[start : stop+1])
|
||||
}
|
||||
|
||||
return string(is.data[start : stop+1])
|
||||
}
|
||||
|
||||
func (is *InputStream) GetTextFromTokens(start, stop Token) string {
|
||||
|
|
|
@ -27,9 +27,9 @@ func (i *Interval) contains(item int) bool {
|
|||
func (i *Interval) String() string {
|
||||
if i.start == i.stop-1 {
|
||||
return strconv.Itoa(i.start)
|
||||
} else {
|
||||
return strconv.Itoa(i.start) + ".." + strconv.Itoa(i.stop-1)
|
||||
}
|
||||
|
||||
return strconv.Itoa(i.start) + ".." + strconv.Itoa(i.stop-1)
|
||||
}
|
||||
|
||||
func (i *Interval) length() int {
|
||||
|
@ -54,9 +54,9 @@ func NewIntervalSet() *IntervalSet {
|
|||
func (i *IntervalSet) first() int {
|
||||
if len(i.intervals) == 0 {
|
||||
return TokenInvalidType
|
||||
} else {
|
||||
return i.intervals[0].start
|
||||
}
|
||||
|
||||
return i.intervals[0].start
|
||||
}
|
||||
|
||||
func (i *IntervalSet) addOne(v int) {
|
||||
|
@ -141,7 +141,7 @@ func (i *IntervalSet) complement(start int, stop int) *IntervalSet {
|
|||
func (i *IntervalSet) contains(item int) bool {
|
||||
if i.intervals == nil {
|
||||
return false
|
||||
} else {
|
||||
}
|
||||
for k := 0; k < len(i.intervals); k++ {
|
||||
if i.intervals[k].contains(item) {
|
||||
return true
|
||||
|
@ -149,7 +149,6 @@ func (i *IntervalSet) contains(item int) bool {
|
|||
}
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
func (i *IntervalSet) length() int {
|
||||
len := 0
|
||||
|
@ -231,9 +230,9 @@ func (i *IntervalSet) StringVerbose(literalNames []string, symbolicNames []strin
|
|||
return i.toTokenString(literalNames, symbolicNames)
|
||||
} else if elemsAreChar {
|
||||
return i.toCharString()
|
||||
} else {
|
||||
return i.toIndexString()
|
||||
}
|
||||
|
||||
return i.toIndexString()
|
||||
}
|
||||
|
||||
func (i *IntervalSet) toCharString() string {
|
||||
|
@ -253,9 +252,9 @@ func (i *IntervalSet) toCharString() string {
|
|||
}
|
||||
if len(names) > 1 {
|
||||
return "{" + strings.Join(names, ", ") + "}"
|
||||
} else {
|
||||
return names[0]
|
||||
}
|
||||
|
||||
return names[0]
|
||||
}
|
||||
|
||||
func (i *IntervalSet) toIndexString() string {
|
||||
|
@ -275,9 +274,9 @@ func (i *IntervalSet) toIndexString() string {
|
|||
}
|
||||
if len(names) > 1 {
|
||||
return "{" + strings.Join(names, ", ") + "}"
|
||||
} else {
|
||||
return names[0]
|
||||
}
|
||||
|
||||
return names[0]
|
||||
}
|
||||
|
||||
func (i *IntervalSet) toTokenString(literalNames []string, symbolicNames []string) string {
|
||||
|
@ -289,9 +288,9 @@ func (i *IntervalSet) toTokenString(literalNames []string, symbolicNames []strin
|
|||
}
|
||||
if len(names) > 1 {
|
||||
return "{" + strings.Join(names, ", ") + "}"
|
||||
} else {
|
||||
return names[0]
|
||||
}
|
||||
|
||||
return names[0]
|
||||
}
|
||||
|
||||
func (i *IntervalSet) elementName(literalNames []string, symbolicNames []string, a int) string {
|
||||
|
@ -302,8 +301,8 @@ func (i *IntervalSet) elementName(literalNames []string, symbolicNames []string,
|
|||
} else {
|
||||
if a < len(literalNames) && literalNames[a] != "" {
|
||||
return literalNames[a]
|
||||
} else {
|
||||
}
|
||||
|
||||
return symbolicNames[a]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -346,9 +346,9 @@ func (b *BaseLexer) getCharIndex() int {
|
|||
func (b *BaseLexer) GetText() string {
|
||||
if b._text != "" {
|
||||
return b._text
|
||||
} else {
|
||||
return b.Interpreter.GetText(b._input)
|
||||
}
|
||||
|
||||
return b.Interpreter.GetText(b._input)
|
||||
}
|
||||
|
||||
func (b *BaseLexer) SetText(text string) {
|
||||
|
|
|
@ -105,9 +105,9 @@ func (l *LexerActionExecutor) fixOffsetBeforeMatch(offset int) *LexerActionExecu
|
|||
}
|
||||
if updatedLexerActions == nil {
|
||||
return l
|
||||
} else {
|
||||
return NewLexerActionExecutor(updatedLexerActions)
|
||||
}
|
||||
|
||||
return NewLexerActionExecutor(updatedLexerActions)
|
||||
}
|
||||
|
||||
// Execute the actions encapsulated by l executor within the context of a
|
||||
|
|
|
@ -130,13 +130,14 @@ func (l *LexerATNSimulator) Match(input CharStream, mode int) int {
|
|||
fmt.Println("MatchATN")
|
||||
}
|
||||
return l.MatchATN(input)
|
||||
} else {
|
||||
}
|
||||
|
||||
if PortDebug {
|
||||
fmt.Println("execATN")
|
||||
}
|
||||
|
||||
return l.execATN(input, dfa.s0)
|
||||
}
|
||||
}
|
||||
|
||||
func (l *LexerATNSimulator) reset() {
|
||||
l.prevAccept.reset()
|
||||
|
@ -303,14 +304,15 @@ func (l *LexerATNSimulator) failOrAccept(prevAccept *SimState, input CharStream,
|
|||
fmt.Println(prevAccept.dfaState.prediction)
|
||||
}
|
||||
return prevAccept.dfaState.prediction
|
||||
} else {
|
||||
}
|
||||
|
||||
// if no accept and EOF is first char, return EOF
|
||||
if t == TokenEOF && input.Index() == l.startIndex {
|
||||
return TokenEOF
|
||||
}
|
||||
|
||||
panic(NewLexerNoViableAltException(l.recog, input, l.startIndex, reach))
|
||||
}
|
||||
}
|
||||
|
||||
// Given a starting configuration set, figure out all ATN configurations
|
||||
// we can reach upon input {@code t}. Parameter {@code reach} is a return
|
||||
|
@ -371,9 +373,9 @@ func (l *LexerATNSimulator) accept(input CharStream, lexerActionExecutor *LexerA
|
|||
func (l *LexerATNSimulator) getReachableTarget(trans Transition, t int) ATNState {
|
||||
if trans.Matches(t, 0, 0xFFFE) {
|
||||
return trans.getTarget()
|
||||
} else {
|
||||
return nil
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (l *LexerATNSimulator) computeStartState(input CharStream, p ATNState) *OrderedATNConfigSet {
|
||||
|
@ -422,11 +424,11 @@ func (l *LexerATNSimulator) closure(input CharStream, config *LexerATNConfig, co
|
|||
if config.context == nil || config.context.isEmpty() {
|
||||
configs.Add(config, nil)
|
||||
return true
|
||||
} else {
|
||||
}
|
||||
|
||||
configs.Add(NewLexerATNConfig2(config, config.state, BasePredictionContextEMPTY), nil)
|
||||
currentAltReachedAcceptState = true
|
||||
}
|
||||
}
|
||||
if config.context != nil && !config.context.isEmpty() {
|
||||
for i := 0; i < config.context.length(); i++ {
|
||||
if config.context.getReturnState(i) != BasePredictionContextEMPTY_RETURN_STATE {
|
||||
|
@ -686,7 +688,7 @@ func (l *LexerATNSimulator) GetTokenName(tt int) string {
|
|||
}
|
||||
if tt == -1 {
|
||||
return "EOF"
|
||||
} else {
|
||||
}
|
||||
|
||||
return "'" + string(tt) + "'"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -513,9 +513,9 @@ func (p *BaseParser) EnterOuterAlt(localctx ParserRuleContext, altNum int) {
|
|||
func (p *BaseParser) GetPrecedence() int {
|
||||
if len(p._precedenceStack) == 0 {
|
||||
return -1
|
||||
} else {
|
||||
return p._precedenceStack[len(p._precedenceStack)-1]
|
||||
}
|
||||
|
||||
return p._precedenceStack[len(p._precedenceStack)-1]
|
||||
}
|
||||
|
||||
func (p *BaseParser) EnterRecursionRule(localctx ParserRuleContext, state, ruleIndex, precedence int) {
|
||||
|
@ -626,9 +626,9 @@ func (p *BaseParser) IsExpectedToken(symbol int) bool {
|
|||
}
|
||||
if following.contains(TokenEpsilon) && symbol == TokenEOF {
|
||||
return true
|
||||
} else {
|
||||
return false
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
// Computes the set of input symbols which could follow the current parser
|
||||
|
@ -652,9 +652,9 @@ func (p *BaseParser) GetRuleIndex(ruleName string) int {
|
|||
var ruleIndex, ok = p.GetRuleIndexMap()[ruleName]
|
||||
if ok {
|
||||
return ruleIndex
|
||||
} else {
|
||||
return -1
|
||||
}
|
||||
|
||||
return -1
|
||||
}
|
||||
|
||||
// Return List<String> of the rule names in your parser instance
|
||||
|
|
|
@ -216,9 +216,9 @@ func (p *ParserATNSimulator) execATN(dfa *DFA, s0 *DFAState, input TokenStream,
|
|||
alt := p.getSynValidOrSemInvalidAltThatFinishedDecisionEntryRule(previousD.configs, outerContext)
|
||||
if alt != ATNInvalidAltNumber {
|
||||
return alt
|
||||
} else {
|
||||
panic(e)
|
||||
}
|
||||
|
||||
panic(e)
|
||||
}
|
||||
if D.requiresFullContext && p.predictionMode != PredictionModeSLL {
|
||||
// IF PREDS, MIGHT RESOLVE TO SINGLE ALT => SLL (or syntax error)
|
||||
|
@ -295,9 +295,9 @@ func (p *ParserATNSimulator) getExistingTargetState(previousD *DFAState, t int)
|
|||
var edges = previousD.edges
|
||||
if edges == nil {
|
||||
return nil
|
||||
} else {
|
||||
return edges[t+1]
|
||||
}
|
||||
|
||||
return edges[t+1]
|
||||
}
|
||||
|
||||
// Compute a target state for an edge in the DFA, and attempt to add the
|
||||
|
@ -408,9 +408,9 @@ func (p *ParserATNSimulator) execATNWithFullContext(dfa *DFA, D *DFAState, s0 AT
|
|||
var alt = p.getSynValidOrSemInvalidAltThatFinishedDecisionEntryRule(previous, outerContext)
|
||||
if alt != ATNInvalidAltNumber {
|
||||
return alt
|
||||
} else {
|
||||
panic(e)
|
||||
}
|
||||
|
||||
panic(e)
|
||||
}
|
||||
var altSubSets = PredictionModegetConflictingAltSubsets(reach)
|
||||
if ParserATNSimulatorDebug {
|
||||
|
@ -610,9 +610,9 @@ func (p *ParserATNSimulator) computeReachSet(closure ATNConfigSet, t int, fullCt
|
|||
}
|
||||
if len(reach.GetItems()) == 0 {
|
||||
return nil
|
||||
} else {
|
||||
return reach
|
||||
}
|
||||
|
||||
return reach
|
||||
}
|
||||
|
||||
//
|
||||
|
@ -777,9 +777,9 @@ func (p *ParserATNSimulator) applyPrecedenceFilter(configs ATNConfigSet) ATNConf
|
|||
func (p *ParserATNSimulator) getReachableTarget(trans Transition, ttype int) ATNState {
|
||||
if trans.Matches(ttype, 0, p.atn.maxTokenType) {
|
||||
return trans.getTarget()
|
||||
} else {
|
||||
return nil
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p *ParserATNSimulator) getPredsForAmbigAlts(ambigAlts *BitSet, configs ATNConfigSet, nalts int) []SemanticContext {
|
||||
|
@ -904,9 +904,9 @@ func (p *ParserATNSimulator) GetAltThatFinishedDecisionEntryRule(configs ATNConf
|
|||
}
|
||||
if alts.length() == 0 {
|
||||
return ATNInvalidAltNumber
|
||||
} else {
|
||||
return alts.first()
|
||||
}
|
||||
|
||||
return alts.first()
|
||||
}
|
||||
|
||||
// Walk the list of configurations and split them according to
|
||||
|
@ -1123,9 +1123,9 @@ func (p *ParserATNSimulator) closure_(config ATNConfig, configs ATNConfigSet, cl
|
|||
func (p *ParserATNSimulator) getRuleName(index int) string {
|
||||
if p.parser != nil && index >= 0 {
|
||||
return p.parser.GetRuleNames()[index]
|
||||
} else {
|
||||
return "<rule " + fmt.Sprint(index) + ">"
|
||||
}
|
||||
|
||||
return "<rule " + fmt.Sprint(index) + ">"
|
||||
}
|
||||
|
||||
func (p *ParserATNSimulator) getEpsilonTarget(config ATNConfig, t Transition, collectPredicates, inContext, fullCtx, treatEofAsEpsilon bool) ATNConfig {
|
||||
|
|
|
@ -79,7 +79,8 @@ func (prc *BaseParserRuleContext) CopyFrom(ctx *BaseParserRuleContext) {
|
|||
func (prc *BaseParserRuleContext) GetText() string {
|
||||
if prc.GetChildCount() == 0 {
|
||||
return ""
|
||||
} else {
|
||||
}
|
||||
|
||||
var s string
|
||||
for _, child := range prc.children {
|
||||
s += child.(ParseTree).GetText()
|
||||
|
@ -87,7 +88,6 @@ func (prc *BaseParserRuleContext) GetText() string {
|
|||
|
||||
return s
|
||||
}
|
||||
}
|
||||
|
||||
// Double dispatch methods for listeners
|
||||
func (prc *BaseParserRuleContext) EnterRule(listener ParseTreeListener) {
|
||||
|
@ -148,28 +148,29 @@ func (prc *BaseParserRuleContext) AddErrorNode(badToken Token) *ErrorNodeImpl {
|
|||
func (prc *BaseParserRuleContext) GetChild(i int) Tree {
|
||||
if prc.children != nil && len(prc.children) >= i {
|
||||
return prc.children[i]
|
||||
} else {
|
||||
return nil
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (prc *BaseParserRuleContext) GetChildOfType(i int, childType reflect.Type) RuleContext {
|
||||
if childType == nil {
|
||||
return prc.GetChild(i).(RuleContext)
|
||||
} else {
|
||||
}
|
||||
|
||||
for j := 0; j < len(prc.children); j++ {
|
||||
var child = prc.children[j]
|
||||
if reflect.TypeOf(child) == childType {
|
||||
if i == 0 {
|
||||
return child.(RuleContext)
|
||||
} else {
|
||||
}
|
||||
|
||||
i -= 1
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
func (prc *BaseParserRuleContext) ToStringTree(ruleNames []string, recog Recognizer) string {
|
||||
return TreesStringTree(prc, ruleNames, recog)
|
||||
|
@ -207,9 +208,9 @@ func (prc *BaseParserRuleContext) GetToken(ttype int, i int) TerminalNode {
|
|||
if c2.GetSymbol().GetTokenType() == ttype {
|
||||
if i == 0 {
|
||||
return c2
|
||||
} else {
|
||||
i -= 1
|
||||
}
|
||||
|
||||
i -= 1
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -219,8 +220,10 @@ func (prc *BaseParserRuleContext) GetToken(ttype int, i int) TerminalNode {
|
|||
func (prc *BaseParserRuleContext) GetTokens(ttype int) []TerminalNode {
|
||||
if prc.children == nil {
|
||||
return make([]TerminalNode, 0)
|
||||
} else {
|
||||
}
|
||||
|
||||
var tokens = make([]TerminalNode, 0)
|
||||
|
||||
for j := 0; j < len(prc.children); j++ {
|
||||
var child = prc.children[j]
|
||||
if tchild, ok := child.(TerminalNode); ok {
|
||||
|
@ -229,9 +232,9 @@ func (prc *BaseParserRuleContext) GetTokens(ttype int) []TerminalNode {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
return tokens
|
||||
}
|
||||
}
|
||||
|
||||
func (prc *BaseParserRuleContext) GetPayload() interface{} {
|
||||
return prc
|
||||
|
@ -284,17 +287,17 @@ func (prc *BaseParserRuleContext) GetTypedRuleContexts(ctxType reflect.Type) []R
|
|||
func (prc *BaseParserRuleContext) GetChildCount() int {
|
||||
if prc.children == nil {
|
||||
return 0
|
||||
} else {
|
||||
return len(prc.children)
|
||||
}
|
||||
|
||||
return len(prc.children)
|
||||
}
|
||||
|
||||
func (prc *BaseParserRuleContext) GetSourceInterval() *Interval {
|
||||
if prc.start == nil || prc.stop == nil {
|
||||
return TreeInvalidInterval
|
||||
} else {
|
||||
return NewInterval(prc.start.GetTokenIndex(), prc.stop.GetTokenIndex())
|
||||
}
|
||||
|
||||
return NewInterval(prc.start.GetTokenIndex(), prc.stop.GetTokenIndex())
|
||||
}
|
||||
|
||||
//need to manage circular dependencies, so export now
|
||||
|
|
|
@ -154,9 +154,9 @@ func SingletonBasePredictionContextCreate(parent PredictionContext, returnState
|
|||
if returnState == BasePredictionContextEMPTY_RETURN_STATE && parent == nil {
|
||||
// someone can pass in the bits of an array ctx that mean $
|
||||
return BasePredictionContextEMPTY
|
||||
} else {
|
||||
return NewBaseSingletonPredictionContext(parent, returnState)
|
||||
}
|
||||
|
||||
return NewBaseSingletonPredictionContext(parent, returnState)
|
||||
}
|
||||
|
||||
func (b *BaseSingletonPredictionContext) length() int {
|
||||
|
@ -182,7 +182,7 @@ func (b *BaseSingletonPredictionContext) equals(other PredictionContext) bool {
|
|||
return false
|
||||
} else if b.Hash() != other.Hash() {
|
||||
return false // can't be same if hash is different
|
||||
} else {
|
||||
}
|
||||
|
||||
otherP := other.(*BaseSingletonPredictionContext)
|
||||
|
||||
|
@ -190,11 +190,10 @@ func (b *BaseSingletonPredictionContext) equals(other PredictionContext) bool {
|
|||
return false
|
||||
} else if b.parentCtx == nil {
|
||||
return otherP.parentCtx == nil
|
||||
} else {
|
||||
}
|
||||
|
||||
return b.parentCtx.equals(otherP.parentCtx)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (b *BaseSingletonPredictionContext) Hash() string {
|
||||
return b.cachedHashString
|
||||
|
@ -212,13 +211,13 @@ func (b *BaseSingletonPredictionContext) String() string {
|
|||
if len(up) == 0 {
|
||||
if b.returnState == BasePredictionContextEMPTY_RETURN_STATE {
|
||||
return "$"
|
||||
} else {
|
||||
}
|
||||
|
||||
return strconv.Itoa(b.returnState)
|
||||
}
|
||||
} else {
|
||||
|
||||
return strconv.Itoa(b.returnState) + " " + up
|
||||
}
|
||||
}
|
||||
|
||||
var BasePredictionContextEMPTY = NewEmptyPredictionContext()
|
||||
|
||||
|
@ -321,7 +320,8 @@ func (a *ArrayPredictionContext) equals(other PredictionContext) bool {
|
|||
func (a *ArrayPredictionContext) String() string {
|
||||
if a.isEmpty() {
|
||||
return "[]"
|
||||
} else {
|
||||
}
|
||||
|
||||
var s = "["
|
||||
for i := 0; i < len(a.returnStates); i++ {
|
||||
if i > 0 {
|
||||
|
@ -338,9 +338,9 @@ func (a *ArrayPredictionContext) String() string {
|
|||
s = s + "nil"
|
||||
}
|
||||
}
|
||||
|
||||
return s + "]"
|
||||
}
|
||||
}
|
||||
|
||||
// Convert a {@link RuleContext} tree to a {@link BasePredictionContext} graph.
|
||||
// Return {@link //EMPTY} if {@code outerContext} is empty or nil.
|
||||
|
@ -477,7 +477,8 @@ func mergeSingletons(a, b *BaseSingletonPredictionContext, rootIsWildcard bool,
|
|||
mergeCache.set(a.Hash(), b.Hash(), spc)
|
||||
}
|
||||
return spc
|
||||
} else { // a != b payloads differ
|
||||
}
|
||||
// a != b payloads differ
|
||||
// see if we can collapse parents due to $+x parents if local ctx
|
||||
var singleParent PredictionContext = nil
|
||||
if a == b || (a.parentCtx != nil && a.parentCtx == b.parentCtx) { // ax +
|
||||
|
@ -515,7 +516,6 @@ func mergeSingletons(a, b *BaseSingletonPredictionContext, rootIsWildcard bool,
|
|||
}
|
||||
return a_
|
||||
}
|
||||
}
|
||||
|
||||
//
|
||||
// Handle case where at least one of {@code a} or {@code b} is
|
||||
|
|
|
@ -457,9 +457,9 @@ func PredictionModegetUniqueAlt(altsets []*BitSet) int {
|
|||
var all = PredictionModeGetAlts(altsets)
|
||||
if all.length() == 1 {
|
||||
return all.minValue()
|
||||
} else {
|
||||
return ATNInvalidAltNumber
|
||||
}
|
||||
|
||||
return ATNInvalidAltNumber
|
||||
}
|
||||
|
||||
// Gets the complete set of represented alternatives for a collection of
|
||||
|
|
|
@ -31,9 +31,9 @@ func SemanticContextandContext(a, b SemanticContext) SemanticContext {
|
|||
var result = NewAND(a, b)
|
||||
if len(result.opnds) == 1 {
|
||||
return result.opnds[0]
|
||||
} else {
|
||||
return result
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
func SemanticContextorContext(a, b SemanticContext) SemanticContext {
|
||||
|
@ -49,9 +49,9 @@ func SemanticContextorContext(a, b SemanticContext) SemanticContext {
|
|||
var result = NewOR(a, b)
|
||||
if len(result.opnds) == 1 {
|
||||
return result.opnds[0]
|
||||
} else {
|
||||
return result
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
type Predicate struct {
|
||||
|
@ -128,9 +128,9 @@ func (p *PrecedencePredicate) evaluate(parser Recognizer, outerContext RuleConte
|
|||
func (p *PrecedencePredicate) evalPrecedence(parser Recognizer, outerContext RuleContext) SemanticContext {
|
||||
if parser.Precpred(outerContext, p.precedence) {
|
||||
return SemanticContextNone
|
||||
} else {
|
||||
return nil
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p *PrecedencePredicate) compareTo(other *PrecedencePredicate) int {
|
||||
|
@ -300,9 +300,9 @@ func (a *AND) String() string {
|
|||
|
||||
if len(s) > 3 {
|
||||
return s[0:3]
|
||||
} else {
|
||||
return s
|
||||
}
|
||||
|
||||
return s
|
||||
}
|
||||
|
||||
//
|
||||
|
@ -435,7 +435,7 @@ func (o *OR) String() string {
|
|||
|
||||
if len(s) > 3 {
|
||||
return s[0:3]
|
||||
} else {
|
||||
}
|
||||
|
||||
return s
|
||||
}
|
||||
}
|
||||
|
|
|
@ -175,9 +175,8 @@ func (c *CommonToken) GetText() string {
|
|||
var n = input.Size()
|
||||
if c.start < n && c.stop < n {
|
||||
return input.GetTextFromInterval(NewInterval(c.start, c.stop))
|
||||
} else {
|
||||
return "<EOF>"
|
||||
}
|
||||
return "<EOF>"
|
||||
}
|
||||
|
||||
func (c *CommonToken) SetText(text string) {
|
||||
|
|
|
@ -162,9 +162,9 @@ func (t *TerminalNodeImpl) GetText() string {
|
|||
func (t *TerminalNodeImpl) String() string {
|
||||
if t.symbol.GetTokenType() == TokenEOF {
|
||||
return "<EOF>"
|
||||
} else {
|
||||
return t.symbol.GetText()
|
||||
}
|
||||
|
||||
return t.symbol.GetText()
|
||||
}
|
||||
|
||||
func (t *TerminalNodeImpl) ToStringTree(s []string, r Recognizer) string {
|
||||
|
|
Loading…
Reference in New Issue