forked from jasder/antlr
Move files into one directory
This commit is contained in:
parent
4ea8768751
commit
5d0b4dfa09
|
@ -1,49 +1,5 @@
|
|||
package atn
|
||||
|
||||
//var Token = require('./../Token').Token
|
||||
//var ATN = require('./ATN').ATN
|
||||
//var ATNType = require('./ATNType').ATNType
|
||||
//var ATNStates = require('./ATNState')
|
||||
var ATNState = ATNStates.ATNState
|
||||
var BasicState = ATNStates.BasicState
|
||||
var DecisionState = ATNStates.DecisionState
|
||||
var BlockStartState = ATNStates.BlockStartState
|
||||
var BlockEndState = ATNStates.BlockEndState
|
||||
var LoopEndState = ATNStates.LoopEndState
|
||||
var RuleStartState = ATNStates.RuleStartState
|
||||
var RuleStopState = ATNStates.RuleStopState
|
||||
var TokensStartState = ATNStates.TokensStartState
|
||||
var PlusLoopbackState = ATNStates.PlusLoopbackState
|
||||
var StarLoopbackState = ATNStates.StarLoopbackState
|
||||
var StarLoopEntryState = ATNStates.StarLoopEntryState
|
||||
var PlusBlockStartState = ATNStates.PlusBlockStartState
|
||||
var StarBlockStartState = ATNStates.StarBlockStartState
|
||||
var BasicBlockStartState = ATNStates.BasicBlockStartState
|
||||
//var Transitions = require('./Transition')
|
||||
var Transition = Transitions.Transition
|
||||
var AtomTransition = Transitions.AtomTransition
|
||||
var SetTransition = Transitions.SetTransition
|
||||
var NotSetTransition = Transitions.NotSetTransition
|
||||
var RuleTransition = Transitions.RuleTransition
|
||||
var RangeTransition = Transitions.RangeTransition
|
||||
var ActionTransition = Transitions.ActionTransition
|
||||
var EpsilonTransition = Transitions.EpsilonTransition
|
||||
var WildcardTransition = Transitions.WildcardTransition
|
||||
var PredicateTransition = Transitions.PredicateTransition
|
||||
var PrecedencePredicateTransition = Transitions.PrecedencePredicateTransition
|
||||
//var IntervalSet = require('./../IntervalSet').IntervalSet
|
||||
//var Interval = require('./../IntervalSet').Interval
|
||||
//var ATNDeserializationOptions = require('./ATNDeserializationOptions').ATNDeserializationOptions
|
||||
//var LexerActions = require('./LexerAction')
|
||||
var LexerActionType = LexerActions.LexerActionType
|
||||
var LexerSkipAction = LexerActions.LexerSkipAction
|
||||
var LexerChannelAction = LexerActions.LexerChannelAction
|
||||
var LexerCustomAction = LexerActions.LexerCustomAction
|
||||
var LexerMoreAction = LexerActions.LexerMoreAction
|
||||
var LexerTypeAction = LexerActions.LexerTypeAction
|
||||
var LexerPushModeAction = LexerActions.LexerPushModeAction
|
||||
var LexerPopModeAction = LexerActions.LexerPopModeAction
|
||||
var LexerModeAction = LexerActions.LexerModeAction
|
||||
// This is the earliest supported serialized UUID.
|
||||
// stick to serialized version for now, we don't need a UUID instance
|
||||
var BASE_SERIALIZED_UUID = "AADB8D7E-AEEF-4415-AD2B-8204D6CF042E"
|
|
@ -44,20 +44,20 @@ func NewCommonTokenFactory(copyText bool) *CommonTokenFactory {
|
|||
//
|
||||
var CommonTokenFactoryDEFAULT = NewCommonTokenFactory(false)
|
||||
|
||||
func (this *CommonTokenFactory) create(source *TokenSource, ttype, text, channel, start, stop, line, column int) *CommonToken {
|
||||
func (this *CommonTokenFactory) create(source *TokenFactorySourcePair, ttype, text, channel, start, stop, line, column int) *CommonToken {
|
||||
var t = NewCommonToken(source, ttype, channel, start, stop)
|
||||
t.line = line
|
||||
t.column = column
|
||||
if (text !=nil) {
|
||||
t.text = text
|
||||
} else if (this.copyText && source[1] !=nil) {
|
||||
t.text = source[1].getText(start,stop)
|
||||
t.text = source.inputStream.getText(start,stop)
|
||||
}
|
||||
return t
|
||||
}
|
||||
|
||||
func (this *CommonTokenFactory) createThin(ttype int, text string) *CommonToken {
|
||||
var t = NewCommonToken(nil, ttype)
|
||||
var t = NewCommonToken(nil, ttype, TokenDefaultChannel, -1, -1)
|
||||
t.text = text
|
||||
return t
|
||||
}
|
||||
|
|
|
@ -51,12 +51,18 @@ func NewLexer(input *InputStream) *Lexer {
|
|||
lexer := new(Lexer)
|
||||
|
||||
lexer.initRecognizer()
|
||||
lexer.initLexer(input)
|
||||
|
||||
lexer._input = input
|
||||
lexer._factory = CommonTokenFactoryDEFAULT
|
||||
lexer._tokenFactorySourcePair = TokenFactorySourcePair{lexer, input}
|
||||
return lexer
|
||||
}
|
||||
|
||||
lexer._interp = nil // child classes must populate l
|
||||
func (l *Lexer) initLexer(input *InputStream){
|
||||
|
||||
l._input = input
|
||||
l._factory = CommonTokenFactoryDEFAULT
|
||||
l._tokenFactorySourcePair = TokenFactorySourcePair{l, input}
|
||||
|
||||
l._interp = nil // child classes must populate l
|
||||
|
||||
// The goal of all lexer rules/methods is to create a token object.
|
||||
// l is an instance variable as multiple rules may collaborate to
|
||||
|
@ -65,43 +71,36 @@ func NewLexer(input *InputStream) *Lexer {
|
|||
// emissions, then set l to the last token to be matched or
|
||||
// something nonnil so that the auto token emit mechanism will not
|
||||
// emit another token.
|
||||
lexer._token = nil
|
||||
l._token = nil
|
||||
|
||||
// What character index in the stream did the current token start at?
|
||||
// Needed, for example, to get the text for current token. Set at
|
||||
// the start of nextToken.
|
||||
lexer._tokenStartCharIndex = -1
|
||||
l._tokenStartCharIndex = -1
|
||||
|
||||
// The line on which the first character of the token resides///
|
||||
lexer._tokenStartLine = -1
|
||||
l._tokenStartLine = -1
|
||||
|
||||
// The character position of first character within the line///
|
||||
lexer._tokenStartColumn = -1
|
||||
l._tokenStartColumn = -1
|
||||
|
||||
// Once we see EOF on char stream, next token will be EOF.
|
||||
// If you have DONE : EOF then you see DONE EOF.
|
||||
lexer._hitEOF = false
|
||||
l._hitEOF = false
|
||||
|
||||
// The channel number for the current token///
|
||||
lexer._channel = TokenDefaultChannel
|
||||
l._channel = TokenDefaultChannel
|
||||
|
||||
// The token type for the current token///
|
||||
lexer._type = TokenInvalidType
|
||||
l._type = TokenInvalidType
|
||||
|
||||
lexer._modeStack = make([]int,0)
|
||||
lexer._mode = LexerDefaultMode
|
||||
l._modeStack = make([]int,0)
|
||||
l._mode = LexerDefaultMode
|
||||
|
||||
// You can set the text for the current token to override what is in
|
||||
// the input char buffer. Use setText() or can set l instance var.
|
||||
// /
|
||||
lexer._text = nil
|
||||
|
||||
return lexer
|
||||
}
|
||||
|
||||
func InitLexer(lexer Lexer){
|
||||
|
||||
|
||||
l._text = nil
|
||||
|
||||
}
|
||||
|
||||
|
@ -284,15 +283,13 @@ func (l *Lexer) emitEOF() int {
|
|||
}
|
||||
|
||||
|
||||
func (l *Lexer) getType() {
|
||||
return l._type
|
||||
}
|
||||
|
||||
Object.defineProperty(Lexer.prototype, "type", {
|
||||
get : function() {
|
||||
return l.type
|
||||
},
|
||||
set : function(type) {
|
||||
l._type = type
|
||||
}
|
||||
})
|
||||
func (l *Lexer) setType(t int) {
|
||||
l._type = t
|
||||
}
|
||||
|
||||
// What is the index of the current character of lookahead?///
|
||||
func (l *Lexer) getCharIndex() {
|
||||
|
@ -301,18 +298,17 @@ func (l *Lexer) getCharIndex() {
|
|||
|
||||
// Return the text matched so far for the current token or any text override.
|
||||
//Set the complete text of l token it wipes any previous changes to the text.
|
||||
//Object.defineProperty(Lexer.prototype, "text", {
|
||||
// get : function() {
|
||||
// if (l._text != nil) {
|
||||
// return l._text
|
||||
// } else {
|
||||
// return l._interp.getText(l._input)
|
||||
// }
|
||||
// },
|
||||
// set : function(text) {
|
||||
// l._text = text
|
||||
// }
|
||||
//})
|
||||
func (l *Lexer) text() string {
|
||||
if (l._text != nil) {
|
||||
return l._text
|
||||
} else {
|
||||
return l._interp.getText(l._input)
|
||||
}
|
||||
}
|
||||
|
||||
func (l *Lexer) setText(text string) {
|
||||
l._text = text
|
||||
}
|
||||
|
||||
// Return a list of all Token objects in input char stream.
|
||||
// Forces load of all tokens. Does not include EOF token.
|
||||
|
|
|
@ -1,5 +1,8 @@
|
|||
package atn
|
||||
import "antlr4/dfa"
|
||||
import (
|
||||
"antlr4/dfa"
|
||||
"fmt"
|
||||
)
|
||||
|
||||
// When we hit an accept state in either the DFA or the ATN, we
|
||||
// have to notify the character stream to start buffering characters
|
||||
|
@ -343,8 +346,7 @@ func (this *LexerATNSimulator) computeStartState(input, p) {
|
|||
//
|
||||
// @return {@code true} if an accept state is reached, otherwise
|
||||
// {@code false}.
|
||||
func (this *LexerATNSimulator) closure(input, config, configs,
|
||||
currentAltReachedAcceptState, speculative, treatEofAsEpsilon) {
|
||||
func (this *LexerATNSimulator) closure(input, config, configs, currentAltReachedAcceptState, speculative, treatEofAsEpsilon) {
|
||||
var cfg = nil
|
||||
if (this.debug) {
|
||||
fmt.Println("closure(" + config.toString(this.recog, true) + ")")
|
|
@ -1,4 +1,5 @@
|
|||
package antlr4
|
||||
|
||||
import (
|
||||
"antlr4/atn"
|
||||
"fmt"
|
||||
|
|
|
@ -666,12 +666,10 @@ type <struct.name> struct {
|
|||
}
|
||||
|
||||
func New<struct.name>(parser *antlr4.Parser, parent *antlr4.ParserRuleContext, invokingState int<struct.ctorAttrs:{a | , <a.name>}>) <struct.name> {
|
||||
// if( invokingState==nil ) {
|
||||
// invokingState = -1
|
||||
//}
|
||||
<superClass>.call(this, parent, invokingState) // TODO
|
||||
|
||||
var p = new(<struct.name>)
|
||||
var p = new(<struct.name>)
|
||||
p.initParserRuleContext( parent, invokingState )
|
||||
|
||||
p.parser = parser
|
||||
p.ruleIndex = <parser.name>RULE_<struct.derivedFromName>
|
||||
<attrs:{a | <a>}; separator="\n">
|
||||
|
@ -705,6 +703,8 @@ func New<struct.name>(parser *antlr4.Parser, ctx *antlr4.ParserRuleContext) <str
|
|||
var p = new(<struct.name>)
|
||||
|
||||
<currentRule.name; format="cap">Context.call(this, parser)
|
||||
|
||||
|
||||
<attrs:{a | <a>;}; separator="\n">
|
||||
<currentRule.name; format="cap">Context.prototype.copyFrom.call(this, ctx)
|
||||
|
||||
|
@ -822,7 +822,7 @@ type <lexer.name> struct {
|
|||
EOF string
|
||||
}
|
||||
|
||||
func New<lexer.name>(input *antlr4.TokenStream) <lexer.name> {
|
||||
func New<lexer.name>(input *antlr4.TokenStream) *<lexer.name> {
|
||||
|
||||
// TODO could be package level variables
|
||||
|
||||
|
@ -837,7 +837,7 @@ func New<lexer.name>(input *antlr4.TokenStream) <lexer.name> {
|
|||
|
||||
lex := new(<lexer.name>)
|
||||
|
||||
antlr4.InitLexer(lex, input);
|
||||
antlr4.initLexer(lex, input);
|
||||
|
||||
lex._interp = atn.NewLexerATNSimulator(lex, atn, decisionToDFA, antlr4.NewPredictionContextCache())
|
||||
lex.modeNames = [...]string{ <lexer.modes:{m| "<m>"}; separator=", ", wrap, anchor> }
|
||||
|
|
Loading…
Reference in New Issue