Fix FileStream, InputStream

This commit is contained in:
Peter Boyer 2015-12-17 09:42:07 -05:00
parent 2cd064a94e
commit 42e05d7147
6 changed files with 50 additions and 54 deletions

View File

@ -1,22 +1,30 @@
package antlr4
import (
"bytes"
"os"
"io"
)
//
// This is an InputStream that is loaded from a file all at once
// when you construct the object.
//
type FileStream struct {
InputStream
filename string
}
func FileStream(fileName) {
var data = fs.readFileSync(fileName, "utf8")
func NewFileStream(fileName string) {
InputStream.call(this, data)
buf := bytes.NewBuffer(nil)
f, _ := os.Open(fileName) // Error handling elided for brevity.
io.Copy(buf, f) // Error handling elided for brevity.
f.Close()
fs.fileName = fileName
return fs
}

View File

@ -4,12 +4,13 @@ import (
"math"
)
// Vacuum all input from a string and then treat it like a buffer.
// Vacuums all input from a string and then treat it like a buffer.
type InputStream struct {
name string
strdata string
index int
data []rune
size int
}
@ -19,19 +20,17 @@ func NewInputStream(data string) *InputStream {
is.name = "<empty>"
is.strdata = data
_loadString(is)
loadString(is)
return is
}
func _loadString(stream) {
func loadString(stream *InputStream) {
stream.index = 0
stream.data = []
for i := 0; i < stream.strdata.length; i++ {
stream.data.push(stream.strdata.charCodeAt(i))
}
stream.size = stream.data.length
stream.data = []rune(stream.strdata)
stream.size = len(stream.data)
}
// Reset the stream so that it's in the same state it was

View File

@ -18,32 +18,28 @@ func NewTraceListener(parser *Parser) *TraceListener {
return tl
}
//TraceListener.prototype = Object.create(ParseTreeListener)
//TraceListener.prototype.constructor = TraceListener
func (this *TraceListener) enterEveryRule(ctx) {
func (this *TraceListener) enterEveryRule(ctx *ParserRuleContext) {
fmt.Println("enter " + this.parser.ruleNames[ctx.ruleIndex] + ", LT(1)=" + this.parser._input.LT(1).text)
}
func (this *TraceListener) visitTerminal( node) {
func (this *TraceListener) visitTerminal( node *tree.TerminalNode ) {
fmt.Println("consume " + node.symbol + " rule " + this.parser.ruleNames[this.parser._ctx.ruleIndex])
}
func (this *TraceListener) exitEveryRule(ctx) {
func (this *TraceListener) exitEveryRule(ctx *ParserRuleContext) {
fmt.Println("exit " + this.parser.ruleNames[ctx.ruleIndex] + ", LT(1)=" + this.parser._input.LT(1).text)
}
type Parser struct {
Recognizer
_input *Lexer
_input *TokenStream
_errHandler *error.ErrorStrategy
_precedenceStack IntStack
_ctx RuleContext
_ctx *ParserRuleContext
buildParseTrees bool
_tracer bool
_parseListeners []tree.ParseTreeListener
_syntaxErrors int
}
// p.is all the parsing support code essentially most of it is error
@ -163,7 +159,7 @@ func (p *Parser) matchWildcard() {
p._errHandler.reportMatch(p.
p.consume()
} else {
t = p._errHandler.recoverInline(p.
t = p._errHandler.recoverInline(p)
if (p._buildParseTrees && t.tokenIndex == -1) {
// we must have conjured up a Newtoken during single token
// insertion
@ -292,10 +288,9 @@ func (p *Parser) getATNWithBypassAlts() {
}
var result = p.bypassAltsAtnCache[serializedAtn]
if (result == nil) {
var deserializationOptions = NewATNDeserializationOptions()
var deserializationOptions = atn.NewATNDeserializationOptions()
deserializationOptions.generateRuleBypassTransitions = true
result = NewATNDeserializer(deserializationOptions)
.deserialize(serializedAtn)
result = atn.NewATNDeserializer(deserializationOptions).deserialize(serializedAtn)
p.bypassAltsAtnCache[serializedAtn] = result
}
return result
@ -312,8 +307,6 @@ func (p *Parser) getATNWithBypassAlts() {
// String id = m.get("ID")
// </pre>
//var Lexer = require('./Lexer').Lexer
func (p *Parser) compileParseTreePattern(pattern, patternRuleIndex, lexer *Lexer) {
if (lexer == nil) {
@ -331,20 +324,20 @@ func (p *Parser) compileParseTreePattern(pattern, patternRuleIndex, lexer *Lexer
return m.compile(pattern, patternRuleIndex)
}
func (p *Parser) getInputStream() {
func (p *Parser) getInputStream() *TokenStream {
return p.getTokenStream()
}
func (p *Parser) setInputStream(input) {
func (p *Parser) setInputStream(input *TokenStream) {
p.setTokenStream(input)
}
func (p *Parser) getTokenStream() {
func (p *Parser) getTokenStream() *TokenStream {
return p._input
}
// Set the token stream and reset the parser.//
func (p *Parser) setTokenStream(input) {
func (p *Parser) setTokenStream(input *TokenStream) {
p._input = nil
p.reset()
p._input = input
@ -393,7 +386,7 @@ func (p *Parser) notifyErrorListeners(msg, offendingToken, err) {
//
func (p *Parser) consume() {
var o = p.getCurrentToken()
if (o.type != TokenEOF) {
if (o.tokenType != TokenEOF) {
p.getInputStream().consume()
}
var hasListener = p._parseListeners != nil && p._parseListeners.length > 0
@ -644,7 +637,7 @@ func (p *Parser) getDFAStrings() {
// For debugging and other purposes.//
func (p *Parser) dumpDFA() {
var seenOne = false
for i := 0 i < p._interp.decisionToDFA.length i++) {
for i := 0; i < p._interp.decisionToDFA.length; i++) {
var dfa = p._interp.decisionToDFA[i]
if (dfa.states.length > 0) {
if (seenOne) {
@ -664,15 +657,15 @@ func (p *Parser) dumpDFA() {
" }\r\n" +
*/
func (p *Parser) getSourceName() {
func (p *Parser) getSourceName() string {
return p._input.sourceName
}
// During a parse is sometimes useful to listen in on the rule entry and exit
// events as well as token matches. p.is for quick and dirty debugging.
//
func (p *Parser) setTrace(trace bool) {
if (!trace) {
func (p *Parser) setTrace(trace *TraceListener) {
if (trace == nil) {
p.removeParseListener(p._tracer)
p._tracer = nil
} else {

View File

@ -39,7 +39,7 @@ func (this *Recognizer) addErrorListener(listener *tree.ParseTreeListener) {
func (this *Recognizer) removeErrorListeners() {
this._listeners = make([]tree.ParseTreeListener, 1)
}
//
//func (this *Recognizer) getTokenTypeMap() {
// var tokenNames = this.getTokenNames()
// if (tokenNames==nil) {
@ -65,6 +65,9 @@ func (this *Recognizer) getRuleIndexMap() {
}
var result = ruleIndexMapCache[ruleNames]
if(result==nil) {
result = ruleNames.reduce(function(o, k, i) { o[k] = i })
ruleIndexMapCache[ruleNames] = result
}
@ -80,7 +83,6 @@ func (this *Recognizer) getRuleIndexMap() {
// }
//}
// What is the error header, normally line/character position information?//
func (this *Recognizer) getErrorHeader(e error) string {
panic("Method not defined!")

View File

@ -60,16 +60,13 @@ const (
// should be obtained from the input along with the start and stop indexes
// of the token.
//
//
//Object.defineProperty(Token.prototype, "text", {
// get : function() {
// return this._text
// },
// set : function(text) {
// this._text = text
// }
//})
func (this *Token) text() string{
return this._text
}
func (this *Token) setText(s string) {
this._text = s
}
func (this *Token) getTokenSource() *TokenSource {
return this.source.tokenSource

View File

@ -100,9 +100,6 @@ type ParseTreeListener struct {
}
func NewParseTreeListener() *ParseTreeListener {
}
func NewParseTreeListener() ParseTreeListener {
return new(ParseTreeListener)
}
@ -120,7 +117,7 @@ func (this *ParseTreeListener) exitEveryRule(node) {
type TerminalNodeImpl struct {
TerminalNode
parentCtx
parentCtx *antlr4.RuleContext
symbol
}