diff --git a/runtime/Go/src/antlr4/FileStream.go b/runtime/Go/src/antlr4/FileStream.go index 3ee7e7d7c..c60e97774 100644 --- a/runtime/Go/src/antlr4/FileStream.go +++ b/runtime/Go/src/antlr4/FileStream.go @@ -1,22 +1,30 @@ package antlr4 +import ( + "bytes" + "os" + "io" +) + // // This is an InputStream that is loaded from a file all at once // when you construct the object. // type FileStream struct { + InputStream filename string } -func FileStream(fileName) { - var data = fs.readFileSync(fileName, "utf8") +func NewFileStream(fileName string) { - InputStream.call(this, data) + buf := bytes.NewBuffer(nil) + + f, _ := os.Open(fileName) // Error handling elided for brevity. + io.Copy(buf, f) // Error handling elided for brevity. + f.Close() - fs.fileName = fileName - return fs } diff --git a/runtime/Go/src/antlr4/InputStream.go b/runtime/Go/src/antlr4/InputStream.go index 4ef944477..5aee36941 100644 --- a/runtime/Go/src/antlr4/InputStream.go +++ b/runtime/Go/src/antlr4/InputStream.go @@ -4,12 +4,13 @@ import ( "math" ) -// Vacuum all input from a string and then treat it like a buffer. +// Vacuums all input from a string and then treat it like a buffer. type InputStream struct { name string strdata string index int + data []rune size int } @@ -19,19 +20,17 @@ func NewInputStream(data string) *InputStream { is.name = "" is.strdata = data - _loadString(is) + loadString(is) return is - } -func _loadString(stream) { +func loadString(stream *InputStream) { + stream.index = 0 - stream.data = [] - for i := 0; i < stream.strdata.length; i++ { - stream.data.push(stream.strdata.charCodeAt(i)) - } - stream.size = stream.data.length + stream.data = []rune(stream.strdata) + stream.size = len(stream.data) + } // Reset the stream so that it's in the same state it was diff --git a/runtime/Go/src/antlr4/Parser.go b/runtime/Go/src/antlr4/Parser.go index ba5afb485..a1c0b6eea 100644 --- a/runtime/Go/src/antlr4/Parser.go +++ b/runtime/Go/src/antlr4/Parser.go @@ -18,32 +18,28 @@ func NewTraceListener(parser *Parser) *TraceListener { return tl } -//TraceListener.prototype = Object.create(ParseTreeListener) -//TraceListener.prototype.constructor = TraceListener - -func (this *TraceListener) enterEveryRule(ctx) { +func (this *TraceListener) enterEveryRule(ctx *ParserRuleContext) { fmt.Println("enter " + this.parser.ruleNames[ctx.ruleIndex] + ", LT(1)=" + this.parser._input.LT(1).text) } -func (this *TraceListener) visitTerminal( node) { +func (this *TraceListener) visitTerminal( node *tree.TerminalNode ) { fmt.Println("consume " + node.symbol + " rule " + this.parser.ruleNames[this.parser._ctx.ruleIndex]) } -func (this *TraceListener) exitEveryRule(ctx) { +func (this *TraceListener) exitEveryRule(ctx *ParserRuleContext) { fmt.Println("exit " + this.parser.ruleNames[ctx.ruleIndex] + ", LT(1)=" + this.parser._input.LT(1).text) } type Parser struct { Recognizer - _input *Lexer + _input *TokenStream _errHandler *error.ErrorStrategy _precedenceStack IntStack - _ctx RuleContext + _ctx *ParserRuleContext buildParseTrees bool _tracer bool _parseListeners []tree.ParseTreeListener _syntaxErrors int - } // p.is all the parsing support code essentially most of it is error @@ -163,7 +159,7 @@ func (p *Parser) matchWildcard() { p._errHandler.reportMatch(p. p.consume() } else { - t = p._errHandler.recoverInline(p. + t = p._errHandler.recoverInline(p) if (p._buildParseTrees && t.tokenIndex == -1) { // we must have conjured up a Newtoken during single token // insertion @@ -292,10 +288,9 @@ func (p *Parser) getATNWithBypassAlts() { } var result = p.bypassAltsAtnCache[serializedAtn] if (result == nil) { - var deserializationOptions = NewATNDeserializationOptions() + var deserializationOptions = atn.NewATNDeserializationOptions() deserializationOptions.generateRuleBypassTransitions = true - result = NewATNDeserializer(deserializationOptions) - .deserialize(serializedAtn) + result = atn.NewATNDeserializer(deserializationOptions).deserialize(serializedAtn) p.bypassAltsAtnCache[serializedAtn] = result } return result @@ -312,8 +307,6 @@ func (p *Parser) getATNWithBypassAlts() { // String id = m.get("ID") // -//var Lexer = require('./Lexer').Lexer - func (p *Parser) compileParseTreePattern(pattern, patternRuleIndex, lexer *Lexer) { if (lexer == nil) { @@ -331,20 +324,20 @@ func (p *Parser) compileParseTreePattern(pattern, patternRuleIndex, lexer *Lexer return m.compile(pattern, patternRuleIndex) } -func (p *Parser) getInputStream() { +func (p *Parser) getInputStream() *TokenStream { return p.getTokenStream() } -func (p *Parser) setInputStream(input) { +func (p *Parser) setInputStream(input *TokenStream) { p.setTokenStream(input) } -func (p *Parser) getTokenStream() { +func (p *Parser) getTokenStream() *TokenStream { return p._input } // Set the token stream and reset the parser.// -func (p *Parser) setTokenStream(input) { +func (p *Parser) setTokenStream(input *TokenStream) { p._input = nil p.reset() p._input = input @@ -393,7 +386,7 @@ func (p *Parser) notifyErrorListeners(msg, offendingToken, err) { // func (p *Parser) consume() { var o = p.getCurrentToken() - if (o.type != TokenEOF) { + if (o.tokenType != TokenEOF) { p.getInputStream().consume() } var hasListener = p._parseListeners != nil && p._parseListeners.length > 0 @@ -644,7 +637,7 @@ func (p *Parser) getDFAStrings() { // For debugging and other purposes.// func (p *Parser) dumpDFA() { var seenOne = false - for i := 0 i < p._interp.decisionToDFA.length i++) { + for i := 0; i < p._interp.decisionToDFA.length; i++) { var dfa = p._interp.decisionToDFA[i] if (dfa.states.length > 0) { if (seenOne) { @@ -664,15 +657,15 @@ func (p *Parser) dumpDFA() { " }\r\n" + */ -func (p *Parser) getSourceName() { +func (p *Parser) getSourceName() string { return p._input.sourceName } // During a parse is sometimes useful to listen in on the rule entry and exit // events as well as token matches. p.is for quick and dirty debugging. // -func (p *Parser) setTrace(trace bool) { - if (!trace) { +func (p *Parser) setTrace(trace *TraceListener) { + if (trace == nil) { p.removeParseListener(p._tracer) p._tracer = nil } else { diff --git a/runtime/Go/src/antlr4/Recognizer.go b/runtime/Go/src/antlr4/Recognizer.go index 38d5816c2..b637e34cd 100644 --- a/runtime/Go/src/antlr4/Recognizer.go +++ b/runtime/Go/src/antlr4/Recognizer.go @@ -39,7 +39,7 @@ func (this *Recognizer) addErrorListener(listener *tree.ParseTreeListener) { func (this *Recognizer) removeErrorListeners() { this._listeners = make([]tree.ParseTreeListener, 1) } -// + //func (this *Recognizer) getTokenTypeMap() { // var tokenNames = this.getTokenNames() // if (tokenNames==nil) { @@ -65,6 +65,9 @@ func (this *Recognizer) getRuleIndexMap() { } var result = ruleIndexMapCache[ruleNames] if(result==nil) { + + + result = ruleNames.reduce(function(o, k, i) { o[k] = i }) ruleIndexMapCache[ruleNames] = result } @@ -80,7 +83,6 @@ func (this *Recognizer) getRuleIndexMap() { // } //} - // What is the error header, normally line/character position information?// func (this *Recognizer) getErrorHeader(e error) string { panic("Method not defined!") diff --git a/runtime/Go/src/antlr4/Token.go b/runtime/Go/src/antlr4/Token.go index f4dfdb399..e69312768 100644 --- a/runtime/Go/src/antlr4/Token.go +++ b/runtime/Go/src/antlr4/Token.go @@ -60,16 +60,13 @@ const ( // should be obtained from the input along with the start and stop indexes // of the token. -// -// -//Object.defineProperty(Token.prototype, "text", { -// get : function() { -// return this._text -// }, -// set : function(text) { -// this._text = text -// } -//}) +func (this *Token) text() string{ + return this._text +} + +func (this *Token) setText(s string) { + this._text = s +} func (this *Token) getTokenSource() *TokenSource { return this.source.tokenSource diff --git a/runtime/Go/src/antlr4/tree/Tree.go b/runtime/Go/src/antlr4/tree/Tree.go index 5acefa2b5..27814cb22 100644 --- a/runtime/Go/src/antlr4/tree/Tree.go +++ b/runtime/Go/src/antlr4/tree/Tree.go @@ -100,9 +100,6 @@ type ParseTreeListener struct { } func NewParseTreeListener() *ParseTreeListener { -} - -func NewParseTreeListener() ParseTreeListener { return new(ParseTreeListener) } @@ -120,7 +117,7 @@ func (this *ParseTreeListener) exitEveryRule(node) { type TerminalNodeImpl struct { TerminalNode - parentCtx + parentCtx *antlr4.RuleContext symbol }