Fix FileStream, InputStream

This commit is contained in:
Peter Boyer 2015-12-17 09:42:07 -05:00
parent 2cd064a94e
commit 42e05d7147
6 changed files with 50 additions and 54 deletions

View File

@ -1,22 +1,30 @@
package antlr4 package antlr4
import (
"bytes"
"os"
"io"
)
// //
// This is an InputStream that is loaded from a file all at once // This is an InputStream that is loaded from a file all at once
// when you construct the object. // when you construct the object.
// //
type FileStream struct { type FileStream struct {
InputStream
filename string filename string
} }
func FileStream(fileName) { func NewFileStream(fileName string) {
var data = fs.readFileSync(fileName, "utf8")
InputStream.call(this, data) buf := bytes.NewBuffer(nil)
f, _ := os.Open(fileName) // Error handling elided for brevity.
io.Copy(buf, f) // Error handling elided for brevity.
f.Close()
fs.fileName = fileName
return fs
} }

View File

@ -4,12 +4,13 @@ import (
"math" "math"
) )
// Vacuum all input from a string and then treat it like a buffer. // Vacuums all input from a string and then treat it like a buffer.
type InputStream struct { type InputStream struct {
name string name string
strdata string strdata string
index int index int
data []rune
size int size int
} }
@ -19,19 +20,17 @@ func NewInputStream(data string) *InputStream {
is.name = "<empty>" is.name = "<empty>"
is.strdata = data is.strdata = data
_loadString(is) loadString(is)
return is return is
} }
func _loadString(stream) { func loadString(stream *InputStream) {
stream.index = 0 stream.index = 0
stream.data = [] stream.data = []rune(stream.strdata)
for i := 0; i < stream.strdata.length; i++ { stream.size = len(stream.data)
stream.data.push(stream.strdata.charCodeAt(i))
}
stream.size = stream.data.length
} }
// Reset the stream so that it's in the same state it was // Reset the stream so that it's in the same state it was

View File

@ -18,32 +18,28 @@ func NewTraceListener(parser *Parser) *TraceListener {
return tl return tl
} }
//TraceListener.prototype = Object.create(ParseTreeListener) func (this *TraceListener) enterEveryRule(ctx *ParserRuleContext) {
//TraceListener.prototype.constructor = TraceListener
func (this *TraceListener) enterEveryRule(ctx) {
fmt.Println("enter " + this.parser.ruleNames[ctx.ruleIndex] + ", LT(1)=" + this.parser._input.LT(1).text) fmt.Println("enter " + this.parser.ruleNames[ctx.ruleIndex] + ", LT(1)=" + this.parser._input.LT(1).text)
} }
func (this *TraceListener) visitTerminal( node) { func (this *TraceListener) visitTerminal( node *tree.TerminalNode ) {
fmt.Println("consume " + node.symbol + " rule " + this.parser.ruleNames[this.parser._ctx.ruleIndex]) fmt.Println("consume " + node.symbol + " rule " + this.parser.ruleNames[this.parser._ctx.ruleIndex])
} }
func (this *TraceListener) exitEveryRule(ctx) { func (this *TraceListener) exitEveryRule(ctx *ParserRuleContext) {
fmt.Println("exit " + this.parser.ruleNames[ctx.ruleIndex] + ", LT(1)=" + this.parser._input.LT(1).text) fmt.Println("exit " + this.parser.ruleNames[ctx.ruleIndex] + ", LT(1)=" + this.parser._input.LT(1).text)
} }
type Parser struct { type Parser struct {
Recognizer Recognizer
_input *Lexer _input *TokenStream
_errHandler *error.ErrorStrategy _errHandler *error.ErrorStrategy
_precedenceStack IntStack _precedenceStack IntStack
_ctx RuleContext _ctx *ParserRuleContext
buildParseTrees bool buildParseTrees bool
_tracer bool _tracer bool
_parseListeners []tree.ParseTreeListener _parseListeners []tree.ParseTreeListener
_syntaxErrors int _syntaxErrors int
} }
// p.is all the parsing support code essentially most of it is error // p.is all the parsing support code essentially most of it is error
@ -163,7 +159,7 @@ func (p *Parser) matchWildcard() {
p._errHandler.reportMatch(p. p._errHandler.reportMatch(p.
p.consume() p.consume()
} else { } else {
t = p._errHandler.recoverInline(p. t = p._errHandler.recoverInline(p)
if (p._buildParseTrees && t.tokenIndex == -1) { if (p._buildParseTrees && t.tokenIndex == -1) {
// we must have conjured up a Newtoken during single token // we must have conjured up a Newtoken during single token
// insertion // insertion
@ -292,10 +288,9 @@ func (p *Parser) getATNWithBypassAlts() {
} }
var result = p.bypassAltsAtnCache[serializedAtn] var result = p.bypassAltsAtnCache[serializedAtn]
if (result == nil) { if (result == nil) {
var deserializationOptions = NewATNDeserializationOptions() var deserializationOptions = atn.NewATNDeserializationOptions()
deserializationOptions.generateRuleBypassTransitions = true deserializationOptions.generateRuleBypassTransitions = true
result = NewATNDeserializer(deserializationOptions) result = atn.NewATNDeserializer(deserializationOptions).deserialize(serializedAtn)
.deserialize(serializedAtn)
p.bypassAltsAtnCache[serializedAtn] = result p.bypassAltsAtnCache[serializedAtn] = result
} }
return result return result
@ -312,8 +307,6 @@ func (p *Parser) getATNWithBypassAlts() {
// String id = m.get("ID") // String id = m.get("ID")
// </pre> // </pre>
//var Lexer = require('./Lexer').Lexer
func (p *Parser) compileParseTreePattern(pattern, patternRuleIndex, lexer *Lexer) { func (p *Parser) compileParseTreePattern(pattern, patternRuleIndex, lexer *Lexer) {
if (lexer == nil) { if (lexer == nil) {
@ -331,20 +324,20 @@ func (p *Parser) compileParseTreePattern(pattern, patternRuleIndex, lexer *Lexer
return m.compile(pattern, patternRuleIndex) return m.compile(pattern, patternRuleIndex)
} }
func (p *Parser) getInputStream() { func (p *Parser) getInputStream() *TokenStream {
return p.getTokenStream() return p.getTokenStream()
} }
func (p *Parser) setInputStream(input) { func (p *Parser) setInputStream(input *TokenStream) {
p.setTokenStream(input) p.setTokenStream(input)
} }
func (p *Parser) getTokenStream() { func (p *Parser) getTokenStream() *TokenStream {
return p._input return p._input
} }
// Set the token stream and reset the parser.// // Set the token stream and reset the parser.//
func (p *Parser) setTokenStream(input) { func (p *Parser) setTokenStream(input *TokenStream) {
p._input = nil p._input = nil
p.reset() p.reset()
p._input = input p._input = input
@ -393,7 +386,7 @@ func (p *Parser) notifyErrorListeners(msg, offendingToken, err) {
// //
func (p *Parser) consume() { func (p *Parser) consume() {
var o = p.getCurrentToken() var o = p.getCurrentToken()
if (o.type != TokenEOF) { if (o.tokenType != TokenEOF) {
p.getInputStream().consume() p.getInputStream().consume()
} }
var hasListener = p._parseListeners != nil && p._parseListeners.length > 0 var hasListener = p._parseListeners != nil && p._parseListeners.length > 0
@ -644,7 +637,7 @@ func (p *Parser) getDFAStrings() {
// For debugging and other purposes.// // For debugging and other purposes.//
func (p *Parser) dumpDFA() { func (p *Parser) dumpDFA() {
var seenOne = false var seenOne = false
for i := 0 i < p._interp.decisionToDFA.length i++) { for i := 0; i < p._interp.decisionToDFA.length; i++) {
var dfa = p._interp.decisionToDFA[i] var dfa = p._interp.decisionToDFA[i]
if (dfa.states.length > 0) { if (dfa.states.length > 0) {
if (seenOne) { if (seenOne) {
@ -664,15 +657,15 @@ func (p *Parser) dumpDFA() {
" }\r\n" + " }\r\n" +
*/ */
func (p *Parser) getSourceName() { func (p *Parser) getSourceName() string {
return p._input.sourceName return p._input.sourceName
} }
// During a parse is sometimes useful to listen in on the rule entry and exit // During a parse is sometimes useful to listen in on the rule entry and exit
// events as well as token matches. p.is for quick and dirty debugging. // events as well as token matches. p.is for quick and dirty debugging.
// //
func (p *Parser) setTrace(trace bool) { func (p *Parser) setTrace(trace *TraceListener) {
if (!trace) { if (trace == nil) {
p.removeParseListener(p._tracer) p.removeParseListener(p._tracer)
p._tracer = nil p._tracer = nil
} else { } else {

View File

@ -39,7 +39,7 @@ func (this *Recognizer) addErrorListener(listener *tree.ParseTreeListener) {
func (this *Recognizer) removeErrorListeners() { func (this *Recognizer) removeErrorListeners() {
this._listeners = make([]tree.ParseTreeListener, 1) this._listeners = make([]tree.ParseTreeListener, 1)
} }
//
//func (this *Recognizer) getTokenTypeMap() { //func (this *Recognizer) getTokenTypeMap() {
// var tokenNames = this.getTokenNames() // var tokenNames = this.getTokenNames()
// if (tokenNames==nil) { // if (tokenNames==nil) {
@ -65,6 +65,9 @@ func (this *Recognizer) getRuleIndexMap() {
} }
var result = ruleIndexMapCache[ruleNames] var result = ruleIndexMapCache[ruleNames]
if(result==nil) { if(result==nil) {
result = ruleNames.reduce(function(o, k, i) { o[k] = i }) result = ruleNames.reduce(function(o, k, i) { o[k] = i })
ruleIndexMapCache[ruleNames] = result ruleIndexMapCache[ruleNames] = result
} }
@ -80,7 +83,6 @@ func (this *Recognizer) getRuleIndexMap() {
// } // }
//} //}
// What is the error header, normally line/character position information?// // What is the error header, normally line/character position information?//
func (this *Recognizer) getErrorHeader(e error) string { func (this *Recognizer) getErrorHeader(e error) string {
panic("Method not defined!") panic("Method not defined!")

View File

@ -60,16 +60,13 @@ const (
// should be obtained from the input along with the start and stop indexes // should be obtained from the input along with the start and stop indexes
// of the token. // of the token.
// func (this *Token) text() string{
// return this._text
//Object.defineProperty(Token.prototype, "text", { }
// get : function() {
// return this._text func (this *Token) setText(s string) {
// }, this._text = s
// set : function(text) { }
// this._text = text
// }
//})
func (this *Token) getTokenSource() *TokenSource { func (this *Token) getTokenSource() *TokenSource {
return this.source.tokenSource return this.source.tokenSource

View File

@ -100,9 +100,6 @@ type ParseTreeListener struct {
} }
func NewParseTreeListener() *ParseTreeListener { func NewParseTreeListener() *ParseTreeListener {
}
func NewParseTreeListener() ParseTreeListener {
return new(ParseTreeListener) return new(ParseTreeListener)
} }
@ -120,7 +117,7 @@ func (this *ParseTreeListener) exitEveryRule(node) {
type TerminalNodeImpl struct { type TerminalNodeImpl struct {
TerminalNode TerminalNode
parentCtx parentCtx *antlr4.RuleContext
symbol symbol
} }