This commit is contained in:
Will Faught 2016-05-20 18:08:47 -07:00
parent baaf1fe72f
commit 21fa2d1310
16 changed files with 49 additions and 56 deletions

View File

@ -2,7 +2,7 @@ package antlr
import ( import (
"fmt" "fmt"
// "reflect" // "reflect"
"strconv" "strconv"
) )
@ -162,17 +162,17 @@ func (this *BaseATNConfig) equals(o interface{}) bool {
} }
var b bool var b bool
if this.context==nil { if this.context == nil {
b = other.context==nil b = other.context == nil
} else { } else {
b = this.context.equals(other.context) b = this.context.equals(other.context)
} }
return this.state.GetStateNumber() == other.state.GetStateNumber() && return this.state.GetStateNumber() == other.state.GetStateNumber() &&
this.alt==other.alt && this.alt == other.alt &&
this.semanticContext.equals(other.semanticContext) && this.semanticContext.equals(other.semanticContext) &&
this.precedenceFilterSuppressed==other.precedenceFilterSuppressed && this.precedenceFilterSuppressed == other.precedenceFilterSuppressed &&
b; b
} }
func (this *BaseATNConfig) shortHash() string { func (this *BaseATNConfig) shortHash() string {

View File

@ -71,9 +71,9 @@ func (this *DFA) setPrecedenceStartState(precedence int, startState *DFAState) {
// s0.edges is never nil for a precedence DFA // s0.edges is never nil for a precedence DFA
// s0.edges is never null for a precedence DFA // s0.edges is never null for a precedence DFA
if (precedence >= len(this.s0.edges)) { if precedence >= len(this.s0.edges) {
// enlarge the slice // enlarge the slice
this.s0.edges = append( this.s0.edges, make([]*DFAState, precedence + 1 - len(this.s0.edges))...) this.s0.edges = append(this.s0.edges, make([]*DFAState, precedence+1-len(this.s0.edges))...)
} }
this.s0.edges[precedence] = startState this.s0.edges[precedence] = startState

View File

@ -2,8 +2,8 @@ package antlr
import ( import (
"fmt" "fmt"
"strconv"
"os" "os"
"strconv"
) )
// Provides an empty default implementation of {@link ANTLRErrorListener}. The // Provides an empty default implementation of {@link ANTLRErrorListener}. The
@ -74,7 +74,7 @@ var ConsoleErrorListenerINSTANCE = NewConsoleErrorListener()
// </pre> // </pre>
// //
func (this *ConsoleErrorListener) SyntaxError(recognizer Recognizer, offendingSymbol interface{}, line, column int, msg string, e RecognitionException) { func (this *ConsoleErrorListener) SyntaxError(recognizer Recognizer, offendingSymbol interface{}, line, column int, msg string, e RecognitionException) {
fmt.Fprintln(os.Stderr, "line " + strconv.Itoa(line) + ":" + strconv.Itoa(column) + " " + msg) fmt.Fprintln(os.Stderr, "line "+strconv.Itoa(line)+":"+strconv.Itoa(column)+" "+msg)
} }
type ProxyErrorListener struct { type ProxyErrorListener struct {

View File

@ -543,7 +543,7 @@ func (this *DefaultErrorStrategy) getMissingSymbol(recognizer Parser) Token {
tokenText = "<missing EOF>" tokenText = "<missing EOF>"
} else { } else {
ln := recognizer.GetLiteralNames() ln := recognizer.GetLiteralNames()
if expectedTokenType > 0 && expectedTokenType < len(ln) { if expectedTokenType > 0 && expectedTokenType < len(ln) {
tokenText = "<missing " + recognizer.GetLiteralNames()[expectedTokenType] + ">" tokenText = "<missing " + recognizer.GetLiteralNames()[expectedTokenType] + ">"
} else { } else {
tokenText = "<missing undefined>" // TODO matches the JS impl tokenText = "<missing undefined>" // TODO matches the JS impl

View File

@ -1,7 +1,5 @@
package antlr package antlr
import ()
// The root of the ANTLR exception hierarchy. In general, ANTLR tracks just // The root of the ANTLR exception hierarchy. In general, ANTLR tracks just
// 3 kinds of errors: prediction errors, failed predicate errors, and // 3 kinds of errors: prediction errors, failed predicate errors, and
// misMatched input errors. In each case, the parser knows where it is // misMatched input errors. In each case, the parser knows where it is

View File

@ -94,11 +94,11 @@ func (is *InputStream) GetText(start int, stop int) string {
} }
func (is *InputStream) GetTextFromTokens(start, stop Token) string { func (is *InputStream) GetTextFromTokens(start, stop Token) string {
if ( start!=nil && stop !=nil ) { if start != nil && stop != nil {
return is.GetTextFromInterval(NewInterval(start.GetTokenIndex(), stop.GetTokenIndex())); return is.GetTextFromInterval(NewInterval(start.GetTokenIndex(), stop.GetTokenIndex()))
} }
return ""; return ""
} }
func (is *InputStream) GetTextFromInterval(i *Interval) string { func (is *InputStream) GetTextFromInterval(i *Interval) string {

View File

@ -282,7 +282,7 @@ func (is *IntervalSet) toIndexString() string {
func (is *IntervalSet) toTokenString(literalNames []string, symbolicNames []string) string { func (is *IntervalSet) toTokenString(literalNames []string, symbolicNames []string) string {
var names = make([]string, 0) var names = make([]string, 0)
for _,v := range is.intervals { for _, v := range is.intervals {
for j := v.start; j < v.stop; j++ { for j := v.start; j < v.stop; j++ {
names = append(names, is.elementName(literalNames, symbolicNames, j)) names = append(names, is.elementName(literalNames, symbolicNames, j))
} }

View File

@ -25,11 +25,11 @@ type Lexer interface {
type BaseLexer struct { type BaseLexer struct {
*BaseRecognizer *BaseRecognizer
Interpreter *LexerATNSimulator Interpreter *LexerATNSimulator
TokenStartCharIndex int TokenStartCharIndex int
TokenStartLine int TokenStartLine int
TokenStartColumn int TokenStartColumn int
ActionType int ActionType int
_input CharStream _input CharStream
_factory TokenFactory _factory TokenFactory
@ -41,7 +41,6 @@ type BaseLexer struct {
_modeStack IntStack _modeStack IntStack
_mode int _mode int
_text string _text string
} }
func NewBaseLexer(input CharStream) *BaseLexer { func NewBaseLexer(input CharStream) *BaseLexer {

View File

@ -52,7 +52,7 @@ type LexerATNSimulator struct {
recog Lexer recog Lexer
predictionMode int predictionMode int
DecisionToDFA []*DFA DecisionToDFA []*DFA
mergeCache DoubleDict mergeCache DoubleDict
startIndex int startIndex int
line int line int
@ -207,9 +207,9 @@ func (this *LexerATNSimulator) execATN(input CharStream, ds0 *DFAState) int {
// that already has lots of edges out of it. e.g., .* in comments. // that already has lots of edges out of it. e.g., .* in comments.
// print("Target for:" + str(s) + " and:" + str(t)) // print("Target for:" + str(s) + " and:" + str(t))
var target = this.getExistingTargetState(s, t) var target = this.getExistingTargetState(s, t)
// if PortDebug { // if PortDebug {
// fmt.Println(target) // fmt.Println(target)
// } // }
if target == nil { if target == nil {
target = this.computeTargetState(input, s, t) target = this.computeTargetState(input, s, t)
// print("Computed:" + str(target)) // print("Computed:" + str(target))

View File

@ -199,7 +199,7 @@ func (p *BaseParser) GetParserRuleContext() ParserRuleContext {
return p._ctx return p._ctx
} }
func (p *BaseParser) SetParserRuleContext(v ParserRuleContext) { func (p *BaseParser) SetParserRuleContext(v ParserRuleContext) {
p._ctx = v p._ctx = v
} }
@ -698,8 +698,8 @@ func (p *BaseParser) GetDFAStrings() string {
func (p *BaseParser) DumpDFA() { func (p *BaseParser) DumpDFA() {
var seenOne = false var seenOne = false
for _, dfa := range p.Interpreter.DecisionToDFA { for _, dfa := range p.Interpreter.DecisionToDFA {
if ( len(dfa.GetStates()) > 0) { if len(dfa.GetStates()) > 0 {
if (seenOne) { if seenOne {
fmt.Println() fmt.Println()
} }
fmt.Println("Decision " + strconv.Itoa(dfa.decision) + ":") fmt.Println("Decision " + strconv.Itoa(dfa.decision) + ":")

View File

@ -14,7 +14,7 @@ type ParserATNSimulator struct {
_input TokenStream _input TokenStream
_startIndex int _startIndex int
_dfa *DFA _dfa *DFA
DecisionToDFA []*DFA DecisionToDFA []*DFA
mergeCache *DoubleDict mergeCache *DoubleDict
_outerContext ParserRuleContext _outerContext ParserRuleContext
} }

View File

@ -3,7 +3,7 @@ package antlr
import ( import (
"reflect" "reflect"
"strconv" "strconv"
// "fmt" // "fmt"
) )
type ParserRuleContext interface { type ParserRuleContext interface {
@ -32,7 +32,7 @@ type BaseParserRuleContext struct {
start, stop Token start, stop Token
exception RecognitionException exception RecognitionException
children []Tree children []Tree
} }
func NewBaseParserRuleContext(parent ParserRuleContext, invokingStateNumber int) *BaseParserRuleContext { func NewBaseParserRuleContext(parent ParserRuleContext, invokingStateNumber int) *BaseParserRuleContext {
@ -233,29 +233,30 @@ func (prc *BaseParserRuleContext) GetTokens(ttype int) []TerminalNode {
} }
} }
func (prc *BaseParserRuleContext) GetPayload() interface{}{ func (prc *BaseParserRuleContext) GetPayload() interface{} {
return prc return prc
} }
func (prc *BaseParserRuleContext) getChild(ctxType reflect.Type, i int) RuleContext { func (prc *BaseParserRuleContext) getChild(ctxType reflect.Type, i int) RuleContext {
if ( prc.children==nil || i < 0 || i >= len(prc.children) ) { if prc.children == nil || i < 0 || i >= len(prc.children) {
return nil return nil
} }
var j int = -1 // what element have we found with ctxType? var j int = -1 // what element have we found with ctxType?
for _,o := range prc.children { for _, o := range prc.children {
childType := reflect.TypeOf(o) childType := reflect.TypeOf(o)
if ( childType.Implements(ctxType) ) { if childType.Implements(ctxType) {
j++ j++
if ( j == i ) { if j == i {
return o.(RuleContext) return o.(RuleContext)
} }
} }
} }
return nil return nil
} }
// Go lacks generics, so it's not possible for us to return the child with the correct type, but we do // Go lacks generics, so it's not possible for us to return the child with the correct type, but we do
// check for convertibility // check for convertibility
@ -270,7 +271,7 @@ func (prc *BaseParserRuleContext) GetTypedRuleContexts(ctxType reflect.Type) []R
var contexts = make([]RuleContext, 0) var contexts = make([]RuleContext, 0)
for _,child := range prc.children { for _, child := range prc.children {
childType := reflect.TypeOf(child) childType := reflect.TypeOf(child)
if childType.ConvertibleTo(ctxType) { if childType.ConvertibleTo(ctxType) {
@ -335,11 +336,8 @@ func (this *BaseParserRuleContext) String(ruleNames []string, stop RuleContext)
return s return s
} }
var RuleContextEmpty = NewBaseParserRuleContext(nil, -1) var RuleContextEmpty = NewBaseParserRuleContext(nil, -1)
type InterpreterRuleContext interface { type InterpreterRuleContext interface {
ParserRuleContext ParserRuleContext
} }

View File

@ -110,7 +110,7 @@ func (this *BaseRecognizer) SetState(v int) {
// <p>Used for XPath and tree pattern compilation.</p> // <p>Used for XPath and tree pattern compilation.</p>
// //
func (this *BaseRecognizer) GetRuleIndexMap() map[string]int { func (this *BaseRecognizer) GetRuleIndexMap() map[string]int {
panic("Method not defined!") panic("Method not defined!")
// var ruleNames = this.GetRuleNames() // var ruleNames = this.GetRuleNames()
// if (ruleNames==nil) { // if (ruleNames==nil) {

View File

@ -34,11 +34,9 @@ type RuleContext interface {
} }
type BaseRuleContext struct { type BaseRuleContext struct {
parentCtx RuleContext parentCtx RuleContext
invokingState int invokingState int
RuleIndex int RuleIndex int
} }
func NewBaseRuleContext(parent RuleContext, invokingState int) *BaseRuleContext { func NewBaseRuleContext(parent RuleContext, invokingState int) *BaseRuleContext {

View File

@ -54,12 +54,12 @@ type ParseTreeVisitor interface {
VisitErrorNode(node ErrorNode) interface{} VisitErrorNode(node ErrorNode) interface{}
} }
type BaseParseTreeVisitor struct {} type BaseParseTreeVisitor struct{}
func (v *BaseParseTreeVisitor) Visit(tree ParseTree) interface{} { return nil } func (v *BaseParseTreeVisitor) Visit(tree ParseTree) interface{} { return nil }
func (v *BaseParseTreeVisitor) VisitChildren(node RuleNode) interface{} { return nil } func (v *BaseParseTreeVisitor) VisitChildren(node RuleNode) interface{} { return nil }
func (v *BaseParseTreeVisitor) VisitTerminal(node TerminalNode) interface{} { return nil } func (v *BaseParseTreeVisitor) VisitTerminal(node TerminalNode) interface{} { return nil }
func (v *BaseParseTreeVisitor) VisitErrorNode(node ErrorNode) interface{} { return nil } func (v *BaseParseTreeVisitor) VisitErrorNode(node ErrorNode) interface{} { return nil }
// TODO // TODO
//func (this ParseTreeVisitor) Visit(ctx) { //func (this ParseTreeVisitor) Visit(ctx) {
@ -89,11 +89,12 @@ type ParseTreeListener interface {
ExitEveryRule(ctx ParserRuleContext) ExitEveryRule(ctx ParserRuleContext)
} }
type BaseParseTreeListener struct {} type BaseParseTreeListener struct{}
func (l *BaseParseTreeListener) VisitTerminal(node TerminalNode){}
func (l *BaseParseTreeListener) VisitErrorNode(node ErrorNode){} func (l *BaseParseTreeListener) VisitTerminal(node TerminalNode) {}
func (l *BaseParseTreeListener) EnterEveryRule(ctx ParserRuleContext){} func (l *BaseParseTreeListener) VisitErrorNode(node ErrorNode) {}
func (l *BaseParseTreeListener) ExitEveryRule(ctx ParserRuleContext){} func (l *BaseParseTreeListener) EnterEveryRule(ctx ParserRuleContext) {}
func (l *BaseParseTreeListener) ExitEveryRule(ctx ParserRuleContext) {}
type TerminalNodeImpl struct { type TerminalNodeImpl struct {
parentCtx RuleContext parentCtx RuleContext

View File

@ -114,7 +114,6 @@ func hashCode(s string) string {
return fmt.Sprint(h.Sum32()) return fmt.Sprint(h.Sum32())
} }
func (this *Set) length() int { func (this *Set) length() int {
return len(this.data) return len(this.data)
} }
@ -264,10 +263,10 @@ func (this *BitSet) String() string {
vals := this.values() vals := this.values()
valsS := make([]string, len(vals)) valsS := make([]string, len(vals))
for i,val := range vals { for i, val := range vals {
valsS[i] = strconv.Itoa(val) valsS[i] = strconv.Itoa(val)
} }
return "{" + strings.Join(valsS, ", ") + "}"; return "{" + strings.Join(valsS, ", ") + "}"
} }
type AltDict struct { type AltDict struct {