new -> New

This commit is contained in:
Peter Boyer 2015-12-15 15:59:04 -05:00
parent f12eaef24f
commit fc75fbfab0
29 changed files with 222 additions and 224 deletions

View File

@ -338,7 +338,7 @@ func (bt *BufferedTokenStream) getText(interval) string {
bt.lazyInit() bt.lazyInit()
bt.fill() bt.fill()
if (interval == undefined || interval == nil) { if (interval == undefined || interval == nil) {
interval = new Interval(0, len(bt.tokens) - 1) interval = NewInterval(0, len(bt.tokens) - 1)
} }
var start = interval.start var start = interval.start
if (start instanceof Token) { if (start instanceof Token) {

View File

@ -45,7 +45,7 @@ func NewCommonTokenFactory(copyText bool) CommonTokenFactory {
var CommonTokenFactoryDEFAULT = NewCommonTokenFactory(false) var CommonTokenFactoryDEFAULT = NewCommonTokenFactory(false)
func (this *CommonTokenFactory) create(source, type, text, channel, start, stop, line, column) { func (this *CommonTokenFactory) create(source, type, text, channel, start, stop, line, column) {
var t = new CommonToken(source, type, channel, start, stop) var t = NewCommonToken(source, type, channel, start, stop)
t.line = line t.line = line
t.column = column t.column = column
if (text !=nil) { if (text !=nil) {
@ -57,7 +57,7 @@ func (this *CommonTokenFactory) create(source, type, text, channel, start, stop,
} }
func (this *CommonTokenFactory) createThin(type, text) { func (this *CommonTokenFactory) createThin(type, text) {
var t = new CommonToken(nil, type) var t = NewCommonToken(nil, type)
t.text = text t.text = text
return t return t
} }

View File

@ -86,7 +86,7 @@ func (i *IntervalSet) addInterval(v Interval) {
} }
// overlapping range -> adjust and reduce // overlapping range -> adjust and reduce
else if (v.start <= i.stop) { else if (v.start <= i.stop) {
i.intervals[k] = new Interval(Math.min(i.start, v.start), Math.max(i.stop, v.stop)) i.intervals[k] = NewInterval(Math.min(i.start, v.start), Math.max(i.stop, v.stop))
i.reduce(k) i.reduce(k)
return return
} }
@ -167,8 +167,8 @@ func (i *IntervalSet) removeRange(v Interval) {
} }
// check for including range, split it // check for including range, split it
else if(v.start>i.start && v.stop<i.stop) { else if(v.start>i.start && v.stop<i.stop) {
i.intervals[k] = new Interval(i.start, v.start) i.intervals[k] = NewInterval(i.start, v.start)
var x = new Interval(v.stop, i.stop) var x = NewInterval(v.stop, i.stop)
i.intervals.splice(k, 0, x) i.intervals.splice(k, 0, x)
return return
} }
@ -179,11 +179,11 @@ func (i *IntervalSet) removeRange(v Interval) {
} }
// check for lower boundary // check for lower boundary
else if(v.start<i.stop) { else if(v.start<i.stop) {
i.intervals[k] = new Interval(i.start, v.start) i.intervals[k] = NewInterval(i.start, v.start)
} }
// check for upper boundary // check for upper boundary
else if(v.stop<i.stop) { else if(v.stop<i.stop) {
i.intervals[k] = new Interval(v.stop, i.stop) i.intervals[k] = NewInterval(v.stop, i.stop)
} }
k += 1 k += 1
} }

View File

@ -26,7 +26,9 @@ type LL1Analyzer struct {
} }
func NewLL1Analyzer (atn) *LL1Analyzer { func NewLL1Analyzer (atn) *LL1Analyzer {
this.atn = atn la = new(LL1Analyzer)
la.atn = atn
return la
} }
//* Special value added to the lookahead sets to indicate that we hit //* Special value added to the lookahead sets to indicate that we hit
@ -34,7 +36,6 @@ func NewLL1Analyzer (atn) *LL1Analyzer {
/// ///
LL1Analyzer.HIT_PRED = Token.INVALID_TYPE LL1Analyzer.HIT_PRED = Token.INVALID_TYPE
//* //*
// Calculates the SLL(1) expected lookahead set for each outgoing transition // Calculates the SLL(1) expected lookahead set for each outgoing transition
// of an {@link ATNState}. The returned array has one element for each // of an {@link ATNState}. The returned array has one element for each
@ -45,19 +46,19 @@ LL1Analyzer.HIT_PRED = Token.INVALID_TYPE
// @param s the ATN state // @param s the ATN state
// @return the expected symbols for each outgoing transition of {@code s}. // @return the expected symbols for each outgoing transition of {@code s}.
/// ///
LL1func (this *Analyzer) getDecisionLookahead(s) { func (la *LL1Analyzer) getDecisionLookahead(s) {
if (s == nil) { if (s == nil) {
return nil return nil
} }
var count = s.transitions.length var count = s.transitions.length
var look = [] var look = []
for(var alt=0 alt< count alt++) { for(var alt=0 alt< count alt++) {
look[alt] = new IntervalSet() look[alt] = NewIntervalSet()
var lookBusy = new Set() var lookBusy = NewSet()
var seeThruPreds = false // fail to get lookahead upon pred var seeThruPreds = false // fail to get lookahead upon pred
this._LOOK(s.transition(alt).target, nil, PredictionContext.EMPTY, la._LOOK(s.transition(alt).target, nil, PredictionContext.EMPTY,
look[alt], lookBusy, new BitSet(), seeThruPreds, false) look[alt], lookBusy, NewBitSet(), seeThruPreds, false)
// Wipe out lookahead for this alternative if we found nothing // Wipe out lookahead for la alternative if we found nothing
// or we had a predicate when we !seeThruPreds // or we had a predicate when we !seeThruPreds
if (look[alt].length==0 || look[alt].contains(LL1Analyzer.HIT_PRED)) { if (look[alt].length==0 || look[alt].contains(LL1Analyzer.HIT_PRED)) {
look[alt] = nil look[alt] = nil
@ -84,12 +85,12 @@ LL1func (this *Analyzer) getDecisionLookahead(s) {
// @return The set of tokens that can follow {@code s} in the ATN in the // @return The set of tokens that can follow {@code s} in the ATN in the
// specified {@code ctx}. // specified {@code ctx}.
/// ///
LL1func (this *Analyzer) LOOK(s, stopState, ctx) { func (la *LL1Analyzer) LOOK(s, stopState, ctx) {
var r = new IntervalSet() var r = NewIntervalSet()
var seeThruPreds = true // ignore preds get all lookahead var seeThruPreds = true // ignore preds get all lookahead
ctx = ctx || nil ctx = ctx || nil
var lookContext = ctx!=nil ? predictionContextFromRuleContext(s.atn, ctx) : nil var lookContext = ctx!=nil ? predictionContextFromRuleContext(s.atn, ctx) : nil
this._LOOK(s, stopState, lookContext, r, new Set(), new BitSet(), seeThruPreds, true) la._LOOK(s, stopState, lookContext, r, NewSet(), NewBitSet(), seeThruPreds, true)
return r return r
} }
@ -111,10 +112,10 @@ LL1func (this *Analyzer) LOOK(s, stopState, ctx) {
// @param look The result lookahead set. // @param look The result lookahead set.
// @param lookBusy A set used for preventing epsilon closures in the ATN // @param lookBusy A set used for preventing epsilon closures in the ATN
// from causing a stack overflow. Outside code should pass // from causing a stack overflow. Outside code should pass
// {@code new Set<ATNConfig>} for this argument. // {@code NewSet<ATNConfig>} for la argument.
// @param calledRuleStack A set used for preventing left recursion in the // @param calledRuleStack A set used for preventing left recursion in the
// ATN from causing a stack overflow. Outside code should pass // ATN from causing a stack overflow. Outside code should pass
// {@code new BitSet()} for this argument. // {@code NewBitSet()} for la argument.
// @param seeThruPreds {@code true} to true semantic predicates as // @param seeThruPreds {@code true} to true semantic predicates as
// implicitly {@code true} and "see through them", otherwise {@code false} // implicitly {@code true} and "see through them", otherwise {@code false}
// to treat semantic predicates as opaque and add {@link //HIT_PRED} to the // to treat semantic predicates as opaque and add {@link //HIT_PRED} to the
@ -123,8 +124,8 @@ LL1func (this *Analyzer) LOOK(s, stopState, ctx) {
// outermost context is reached. This parameter has no effect if {@code ctx} // outermost context is reached. This parameter has no effect if {@code ctx}
// is {@code nil}. // is {@code nil}.
/// ///
LL1func (this *Analyzer) _LOOK(s, stopState , ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF) { func (la *LL1Analyzer) _LOOK(s, stopState , ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF) {
var c = new ATNConfig({state:s, alt:0}, ctx) var c = NewATNConfig({state:s, alt:0}, ctx)
if (lookBusy.contains(c)) { if (lookBusy.contains(c)) {
return return
} }
@ -149,11 +150,11 @@ LL1func (this *Analyzer) _LOOK(s, stopState , ctx, look, lookBusy, calledRuleSta
if (ctx != PredictionContext.EMPTY) { if (ctx != PredictionContext.EMPTY) {
// run thru all possible stack tops in ctx // run thru all possible stack tops in ctx
for(var i=0 i<ctx.length i++) { for(var i=0 i<ctx.length i++) {
var returnState = this.atn.states[ctx.getReturnState(i)] var returnState = la.atn.states[ctx.getReturnState(i)]
var removed = calledRuleStack.contains(returnState.ruleIndex) var removed = calledRuleStack.contains(returnState.ruleIndex)
try { try {
calledRuleStack.remove(returnState.ruleIndex) calledRuleStack.remove(returnState.ruleIndex)
this._LOOK(returnState, stopState, ctx.getParent(i), look, lookBusy, calledRuleStack, seeThruPreds, addEOF) la._LOOK(returnState, stopState, ctx.getParent(i), look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
} finally { } finally {
if (removed) { if (removed) {
calledRuleStack.add(returnState.ruleIndex) calledRuleStack.add(returnState.ruleIndex)
@ -163,7 +164,7 @@ LL1func (this *Analyzer) _LOOK(s, stopState , ctx, look, lookBusy, calledRuleSta
return return
} }
} }
for(var j=0 j<s.transitions.length j++) { for j :=0; j<s.transitions.length; j++ {
var t = s.transitions[j] var t = s.transitions[j]
if (t.constructor == RuleTransition) { if (t.constructor == RuleTransition) {
if (calledRuleStack.contains(t.target.ruleIndex)) { if (calledRuleStack.contains(t.target.ruleIndex)) {
@ -172,31 +173,28 @@ LL1func (this *Analyzer) _LOOK(s, stopState , ctx, look, lookBusy, calledRuleSta
var newContext = SingletonPredictionContext.create(ctx, t.followState.stateNumber) var newContext = SingletonPredictionContext.create(ctx, t.followState.stateNumber)
try { try {
calledRuleStack.add(t.target.ruleIndex) calledRuleStack.add(t.target.ruleIndex)
this._LOOK(t.target, stopState, newContext, look, lookBusy, calledRuleStack, seeThruPreds, addEOF) la._LOOK(t.target, stopState, newContext, look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
} finally { } finally {
calledRuleStack.remove(t.target.ruleIndex) calledRuleStack.remove(t.target.ruleIndex)
} }
} else if (t instanceof AbstractPredicateTransition ) { } else if (t instanceof AbstractPredicateTransition ) {
if (seeThruPreds) { if (seeThruPreds) {
this._LOOK(t.target, stopState, ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF) la._LOOK(t.target, stopState, ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
} else { } else {
look.addOne(LL1Analyzer.HIT_PRED) look.addOne(LL1Analyzer.HIT_PRED)
} }
} else if( t.isEpsilon) { } else if( t.isEpsilon) {
this._LOOK(t.target, stopState, ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF) la._LOOK(t.target, stopState, ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
} else if (t.constructor == WildcardTransition) { } else if (t.constructor == WildcardTransition) {
look.addRange( Token.MIN_USER_TOKEN_TYPE, this.atn.maxTokenType ) look.addRange( Token.MIN_USER_TOKEN_TYPE, la.atn.maxTokenType )
} else { } else {
var set = t.label var set = t.label
if (set != nil) { if (set != nil) {
if (t instanceof NotSetTransition) { if (t instanceof NotSetTransition) {
set = set.complement(Token.MIN_USER_TOKEN_TYPE, this.atn.maxTokenType) set = set.complement(Token.MIN_USER_TOKEN_TYPE, la.atn.maxTokenType)
} }
look.addSet(set) look.addSet(set)
} }
} }
} }
} }
exports.LL1Analyzer = LL1Analyzer

View File

@ -257,7 +257,7 @@ func (l *Lexer) emitToken(token) {
// outermost lexical rule. The token object should point into the // outermost lexical rule. The token object should point into the
// char buffer start..stop. If there is a text override in 'text', // char buffer start..stop. If there is a text override in 'text',
// use that to set the token's text. Override l method to emit // use that to set the token's text. Override l method to emit
// custom Token objects or provide a new factory. // custom Token objects or provide a Newfactory.
// / // /
func (l *Lexer) emit() { func (l *Lexer) emit() {
var t = l._factory.create(l._tokenFactorySourcePair, l._type, var t = l._factory.create(l._tokenFactorySourcePair, l._type,

View File

@ -106,7 +106,7 @@ func (p.*Parser) match(ttype) {
} else { } else {
t = p._errHandler.recoverInline(p. t = p._errHandler.recoverInline(p.
if (p.buildParseTrees && t.tokenIndex == -1) { if (p.buildParseTrees && t.tokenIndex == -1) {
// we must have conjured up a new token during single token // we must have conjured up a Newtoken during single token
// insertion // insertion
// if it's not the current symbol // if it's not the current symbol
p._ctx.addErrorNode(t) p._ctx.addErrorNode(t)
@ -138,7 +138,7 @@ func (p.*Parser) matchWildcard() {
} else { } else {
t = p._errHandler.recoverInline(p. t = p._errHandler.recoverInline(p.
if (p._buildParseTrees && t.tokenIndex == -1) { if (p._buildParseTrees && t.tokenIndex == -1) {
// we must have conjured up a new token during single token // we must have conjured up a Newtoken during single token
// insertion // insertion
// if it's not the current symbol // if it's not the current symbol
p._ctx.addErrorNode(t) p._ctx.addErrorNode(t)
@ -244,7 +244,7 @@ func (p.*Parser) getTokenFactory() {
return p._input.tokenSource._factory return p._input.tokenSource._factory
} }
// Tell our token source and error strategy about a new way to create tokens.// // Tell our token source and error strategy about a Newway to create tokens.//
func (p.*Parser) setTokenFactory(factory) { func (p.*Parser) setTokenFactory(factory) {
p._input.tokenSource._factory = factory p._input.tokenSource._factory = factory
} }
@ -262,9 +262,9 @@ func (p.*Parser) getATNWithBypassAlts() {
} }
var result = p.bypassAltsAtnCache[serializedAtn] var result = p.bypassAltsAtnCache[serializedAtn]
if (result == nil) { if (result == nil) {
var deserializationOptions = new ATNDeserializationOptions() var deserializationOptions = NewATNDeserializationOptions()
deserializationOptions.generateRuleBypassTransitions = true deserializationOptions.generateRuleBypassTransitions = true
result = new ATNDeserializer(deserializationOptions) result = NewATNDeserializer(deserializationOptions)
.deserialize(serializedAtn) .deserialize(serializedAtn)
p.bypassAltsAtnCache[serializedAtn] = result p.bypassAltsAtnCache[serializedAtn] = result
} }
@ -297,7 +297,7 @@ func (p.*Parser) compileParseTreePattern(pattern, patternRuleIndex, lexer) {
if (lexer == nil) { if (lexer == nil) {
throw "Parser can't discover a lexer to use" throw "Parser can't discover a lexer to use"
} }
var m = new ParseTreePatternMatcher(lexer, p. var m = NewParseTreePatternMatcher(lexer, p.
return m.compile(pattern, patternRuleIndex) return m.compile(pattern, patternRuleIndex)
} }
@ -417,7 +417,7 @@ func (p.*Parser) exitRule() {
} }
func (p.*Parser) enterOuterAlt(localctx, altNum) { func (p.*Parser) enterOuterAlt(localctx, altNum) {
// if we have new localctx, make sure we replace existing ctx // if we have Newlocalctx, make sure we replace existing ctx
// that is previous child of parse tree // that is previous child of parse tree
if (p.buildParseTrees && p._ctx != localctx) { if (p.buildParseTrees && p._ctx != localctx) {
if (p._ctx.parentCtx != nil) { if (p._ctx.parentCtx != nil) {
@ -649,7 +649,7 @@ func (p.*Parser) setTrace(trace) {
if (p._tracer != nil) { if (p._tracer != nil) {
p.removeParseListener(p._tracer) p.removeParseListener(p._tracer)
} }
p._tracer = new TraceListener(p. p._tracer = NewTraceListener(p.
p.addParseListener(p._tracer) p.addParseListener(p._tracer)
} }
} }

View File

@ -90,14 +90,14 @@ func (this *ParserRuleContext) removeLastChild() {
} }
func (this *ParserRuleContext) addTokenNode(token) { func (this *ParserRuleContext) addTokenNode(token) {
var node = new TerminalNodeImpl(token) var node = NewTerminalNodeImpl(token)
this.addChild(node) this.addChild(node)
node.parentCtx = this node.parentCtx = this
return node return node
} }
func (this *ParserRuleContext) addErrorNode(badToken) { func (this *ParserRuleContext) addErrorNode(badToken) {
var node = new ErrorNodeImpl(badToken) var node = NewErrorNodeImpl(badToken)
this.addChild(node) this.addChild(node)
node.parentCtx = this node.parentCtx = this
return node return node
@ -187,11 +187,11 @@ func (this *ParserRuleContext) getSourceInterval() {
if( this.start == nil || this.stop == nil) { if( this.start == nil || this.stop == nil) {
return INVALID_INTERVAL return INVALID_INTERVAL
} else { } else {
return new Interval(this.start.tokenIndex, this.stop.tokenIndex) return NewInterval(this.start.tokenIndex, this.stop.tokenIndex)
} }
} }
RuleContext.EMPTY = new ParserRuleContext() RuleContext.EMPTY = NewParserRuleContext()
func InterpreterRuleContext(parent, invokingStateNumber, ruleIndex) { func InterpreterRuleContext(parent, invokingStateNumber, ruleIndex) {
ParserRuleContext.call(parent, invokingStateNumber) ParserRuleContext.call(parent, invokingStateNumber)

View File

@ -76,7 +76,7 @@ type PredictionContextCache struct {
} }
// Add a context to the cache and return it. If the context already exists, // Add a context to the cache and return it. If the context already exists,
// return that one instead and do not add a new context to the cache. // return that one instead and do not add a Newcontext to the cache.
// Protect shared cache from unsafe thread access. // Protect shared cache from unsafe thread access.
// //
func (this *PredictionContextCache) add(ctx) { func (this *PredictionContextCache) add(ctx) {
@ -117,7 +117,7 @@ SingletonPredictionContext.create = function(parent, returnState) {
// someone can pass in the bits of an array ctx that mean $ // someone can pass in the bits of an array ctx that mean $
return PredictionContext.EMPTY return PredictionContext.EMPTY
} else { } else {
return new SingletonPredictionContext(parent, returnState) return NewSingletonPredictionContext(parent, returnState)
} }
} }
@ -197,7 +197,7 @@ func (this *EmptyPredictionContext) toString() {
return "$" return "$"
} }
PredictionContext.EMPTY = new EmptyPredictionContext() PredictionContext.EMPTY = NewEmptyPredictionContext()
func ArrayPredictionContext(parents, returnStates) { func ArrayPredictionContext(parents, returnStates) {
// Parent can be nil only if full ctx mode and we make an array // Parent can be nil only if full ctx mode and we make an array
@ -321,10 +321,10 @@ func merge(a, b, rootIsWildcard, mergeCache) {
} }
// convert singleton so both are arrays to normalize // convert singleton so both are arrays to normalize
if (a instanceof SingletonPredictionContext) { if (a instanceof SingletonPredictionContext) {
a = new ArrayPredictionContext([a.getParent()], [a.returnState]) a = NewArrayPredictionContext([a.getParent()], [a.returnState])
} }
if (b instanceof SingletonPredictionContext) { if (b instanceof SingletonPredictionContext) {
b = new ArrayPredictionContext([b.getParent()], [b.returnState]) b = NewArrayPredictionContext([b.getParent()], [b.returnState])
} }
return mergeArrays(a, b, rootIsWildcard, mergeCache) return mergeArrays(a, b, rootIsWildcard, mergeCache)
} }
@ -337,7 +337,7 @@ func merge(a, b, rootIsWildcard, mergeCache) {
// type="image/svg+xml"/></p> // type="image/svg+xml"/></p>
// //
// <p>Same stack top, parents differ merge parents giving array node, then // <p>Same stack top, parents differ merge parents giving array node, then
// remainders of those graphs. A new root node is created to point to the // remainders of those graphs. A Newroot node is created to point to the
// merged parents.<br> // merged parents.<br>
// <embed src="images/SingletonMerge_SameRootDiffPar.svg" // <embed src="images/SingletonMerge_SameRootDiffPar.svg"
// type="image/svg+xml"/></p> // type="image/svg+xml"/></p>
@ -392,7 +392,7 @@ func mergeSingletons(a, b, rootIsWildcard, mergeCache) {
// else: ax + ay = a'[x,y] // else: ax + ay = a'[x,y]
// merge parents x and y, giving array node with x,y then remainders // merge parents x and y, giving array node with x,y then remainders
// of those graphs. dup a, a' points at merged array // of those graphs. dup a, a' points at merged array
// new joined parent so create new singleton pointing to it, a' // Newjoined parent so create Newsingleton pointing to it, a'
var spc = SingletonPredictionContext.create(parent, a.returnState) var spc = SingletonPredictionContext.create(parent, a.returnState)
if (mergeCache != nil) { if (mergeCache != nil) {
mergeCache.set(a, b, spc) mergeCache.set(a, b, spc)
@ -414,7 +414,7 @@ func mergeSingletons(a, b, rootIsWildcard, mergeCache) {
payloads[1] = a.returnState payloads[1] = a.returnState
} }
var parents = [ singleParent, singleParent ] var parents = [ singleParent, singleParent ]
var apc = new ArrayPredictionContext(parents, payloads) var apc = NewArrayPredictionContext(parents, payloads)
if (mergeCache != nil) { if (mergeCache != nil) {
mergeCache.set(a, b, apc) mergeCache.set(a, b, apc)
} }
@ -430,7 +430,7 @@ func mergeSingletons(a, b, rootIsWildcard, mergeCache) {
payloads[1] = a.returnState payloads[1] = a.returnState
parents = [ b.parentCtx, a.parentCtx ] parents = [ b.parentCtx, a.parentCtx ]
} }
var a_ = new ArrayPredictionContext(parents, payloads) var a_ = NewArrayPredictionContext(parents, payloads)
if (mergeCache != nil) { if (mergeCache != nil) {
mergeCache.set(a, b, a_) mergeCache.set(a, b, a_)
} }
@ -491,11 +491,11 @@ func mergeRoot(a, b, rootIsWildcard) {
var payloads = [ b.returnState, var payloads = [ b.returnState,
PredictionContext.EMPTY_RETURN_STATE ] PredictionContext.EMPTY_RETURN_STATE ]
var parents = [ b.parentCtx, nil ] var parents = [ b.parentCtx, nil ]
return new ArrayPredictionContext(parents, payloads) return NewArrayPredictionContext(parents, payloads)
} else if (b == PredictionContext.EMPTY) { // x + $ = [$,x] ($ is always first if present) } else if (b == PredictionContext.EMPTY) { // x + $ = [$,x] ($ is always first if present)
var payloads = [ a.returnState, PredictionContext.EMPTY_RETURN_STATE ] var payloads = [ a.returnState, PredictionContext.EMPTY_RETURN_STATE ]
var parents = [ a.parentCtx, nil ] var parents = [ a.parentCtx, nil ]
return new ArrayPredictionContext(parents, payloads) return NewArrayPredictionContext(parents, payloads)
} }
} }
return nil return nil
@ -601,7 +601,7 @@ func mergeArrays(a, b, rootIsWildcard, mergeCache) {
mergedReturnStates = mergedReturnStates.slice(0, k) mergedReturnStates = mergedReturnStates.slice(0, k)
} }
var M = new ArrayPredictionContext(mergedParents, mergedReturnStates) var M = NewArrayPredictionContext(mergedParents, mergedReturnStates)
// if we created same array as a or b, return that instead // if we created same array as a or b, return that instead
// TODO: track whether this is possible above during merge sort for speed // TODO: track whether this is possible above during merge sort for speed
@ -683,7 +683,7 @@ func getCachedPredictionContext(context, contextCache, visited) {
updated = SingletonPredictionContext.create(parents[0], context updated = SingletonPredictionContext.create(parents[0], context
.getReturnState(0)) .getReturnState(0))
} else { } else {
updated = new ArrayPredictionContext(parents, context.returnStates) updated = NewArrayPredictionContext(parents, context.returnStates)
} }
contextCache.add(updated) contextCache.add(updated)
visited[updated] = updated visited[updated] = updated

View File

@ -84,7 +84,7 @@ func (this *Recognizer) getErrorHeader(e) {
// to use t.toString() (which, for CommonToken, dumps everything about // to use t.toString() (which, for CommonToken, dumps everything about
// the token). This is better than forcing you to override a method in // the token). This is better than forcing you to override a method in
// your token objects because you don't have to go modify your lexer // your token objects because you don't have to go modify your lexer
// so that it creates a new Java type. // so that it creates a NewJava type.
// //
// @deprecated This method is not called by the ANTLR 4 Runtime. Specific // @deprecated This method is not called by the ANTLR 4 Runtime. Specific
// implementations of {@link ANTLRErrorStrategy} may provide a similar // implementations of {@link ANTLRErrorStrategy} may provide a similar
@ -108,7 +108,7 @@ func (this *Recognizer) getTokenErrorDisplay(t) {
} }
func (this *Recognizer) getErrorListenerDispatch() { func (this *Recognizer) getErrorListenerDispatch() {
return new ProxyErrorListener(this._listeners) return NewProxyErrorListener(this._listeners)
} }
// subclass needs to override these if there are sempreds or actions // subclass needs to override these if there are sempreds or actions

View File

@ -87,7 +87,7 @@ CommonToken.prototype.constructor = CommonToken
// {@link //source} for tokens that do not have a source. // {@link //source} for tokens that do not have a source.
CommonToken.EMPTY_SOURCE = [ nil, nil ] CommonToken.EMPTY_SOURCE = [ nil, nil ]
// Constructs a new {@link CommonToken} as a copy of another {@link Token}. // Constructs a New{@link CommonToken} as a copy of another {@link Token}.
// //
// <p> // <p>
// If {@code oldToken} is also a {@link CommonToken} instance, the newly // If {@code oldToken} is also a {@link CommonToken} instance, the newly
@ -100,7 +100,7 @@ CommonToken.EMPTY_SOURCE = [ nil, nil ]
// @param oldToken The token to copy. // @param oldToken The token to copy.
// //
func (this *CommonToken) clone() { func (this *CommonToken) clone() {
var t = new CommonToken(this.source, this.type, this.channel, this.start, var t = NewCommonToken(this.source, this.type, this.channel, this.start,
this.stop) this.stop)
t.tokenIndex = this.tokenIndex t.tokenIndex = this.tokenIndex
t.line = this.line t.line = this.line

View File

@ -55,7 +55,7 @@ func NewATN(grammarType , maxTokenType) ATN {
// the rule surrounding {@code s}. In other words, the set will be // the rule surrounding {@code s}. In other words, the set will be
// restricted to tokens reachable staying within {@code s}'s rule. // restricted to tokens reachable staying within {@code s}'s rule.
func (this *ATN) nextTokensInContext(s, ctx) { func (this *ATN) nextTokensInContext(s, ctx) {
var anal = new LL1Analyzer(this) var anal = NewLL1Analyzer(this)
return anal.LOOK(s, nil, ctx) return anal.LOOK(s, nil, ctx)
} }
@ -134,7 +134,7 @@ func (this *ATN) getExpectedTokens( stateNumber, ctx ) {
if (!following.contains(Token.EPSILON)) { if (!following.contains(Token.EPSILON)) {
return following return following
} }
var expected = new IntervalSet() var expected = NewIntervalSet()
expected.addSet(following) expected.addSet(following)
expected.removeOne(Token.EPSILON) expected.removeOne(Token.EPSILON)
while (ctx != nil && ctx.invokingState >= 0 && following.contains(Token.EPSILON)) { while (ctx != nil && ctx.invokingState >= 0 && following.contains(Token.EPSILON)) {

View File

@ -40,7 +40,7 @@ func ATNConfigSet(fullCtx) {
// use a hash table that lets us specify the equals/hashcode operation. // use a hash table that lets us specify the equals/hashcode operation.
// All configs but hashed by (s, i, _, pi) not including context. Wiped out // All configs but hashed by (s, i, _, pi) not including context. Wiped out
// when we go readonly as this set becomes a DFA state. // when we go readonly as this set becomes a DFA state.
this.configLookup = new Set(hashATNConfig, equalATNConfigs) this.configLookup = NewSet(hashATNConfig, equalATNConfigs)
// Indicates that this configuration set is part of a full context // Indicates that this configuration set is part of a full context
// LL prediction. It will be used to determine how to merge $. With SLL // LL prediction. It will be used to determine how to merge $. With SLL
// it's a wildcard whereas it is not for LL context merge. // it's a wildcard whereas it is not for LL context merge.
@ -71,7 +71,7 @@ func ATNConfigSet(fullCtx) {
return this return this
} }
// Adding a new config means merging contexts with existing configs for // Adding a Newconfig means merging contexts with existing configs for
// {@code (s, i, pi, _)}, where {@code s} is the // {@code (s, i, pi, _)}, where {@code s} is the
// {@link ATNConfig//state}, {@code i} is the {@link ATNConfig//alt}, and // {@link ATNConfig//state}, {@code i} is the {@link ATNConfig//alt}, and
// {@code pi} is the {@link ATNConfig//semanticContext}. We use // {@code pi} is the {@link ATNConfig//semanticContext}. We use
@ -103,7 +103,7 @@ func (this *ATNConfigSet) add(config, mergeCache) {
var rootIsWildcard = !this.fullCtx var rootIsWildcard = !this.fullCtx
var merged = merge(existing.context, config.context, rootIsWildcard, mergeCache) var merged = merge(existing.context, config.context, rootIsWildcard, mergeCache)
// no need to check for existing.context, config.context in cache // no need to check for existing.context, config.context in cache
// since only way to create new graphs is "call rule" and here. We // since only way to create Newgraphs is "call rule" and here. We
// cache at both places. // cache at both places.
existing.reachesIntoOuterContext = Math.max( existing.reachesIntoOuterContext, config.reachesIntoOuterContext) existing.reachesIntoOuterContext = Math.max( existing.reachesIntoOuterContext, config.reachesIntoOuterContext)
// make sure to preserve the precedence filter suppression during the merge // make sure to preserve the precedence filter suppression during the merge
@ -115,7 +115,7 @@ func (this *ATNConfigSet) add(config, mergeCache) {
} }
func (this *ATNConfigSet) getStates() { func (this *ATNConfigSet) getStates() {
var states = new Set() var states = NewSet()
for (var i = 0 i < this.configs.length i++) { for (var i = 0 i < this.configs.length i++) {
states.add(this.configs[i].state) states.add(this.configs[i].state)
} }
@ -222,7 +222,7 @@ func (this *ATNConfigSet) clear() {
} }
this.configs = [] this.configs = []
this.cachedHashString = "-1" this.cachedHashString = "-1"
this.configLookup = new Set() this.configLookup = NewSet()
} }
func (this *ATNConfigSet) setReadonly(readOnly) { func (this *ATNConfigSet) setReadonly(readOnly) {
@ -242,7 +242,7 @@ func (this *ATNConfigSet) toString() {
type OrderedATNConfigSet struct { type OrderedATNConfigSet struct {
ATNConfigSet.call(this) ATNConfigSet.call(this)
this.configLookup = new Set() this.configLookup = NewSet()
return this return this
} }

View File

@ -11,7 +11,7 @@ func ATNDeserializationOptions(copyFrom) {
return this return this
} }
ATNDeserializationOptions.defaultOptions = new ATNDeserializationOptions() ATNDeserializationOptions.defaultOptions = NewATNDeserializationOptions()
ATNDeserializationOptions.defaultOptions.readOnly = true ATNDeserializationOptions.defaultOptions.readOnly = true
// func __setattr__(self, key, value): // func __setattr__(self, key, value):

View File

@ -149,7 +149,7 @@ func (this *ATNDeserializer) checkUUID() {
func (this *ATNDeserializer) readATN() { func (this *ATNDeserializer) readATN() {
var grammarType = this.readInt() var grammarType = this.readInt()
var maxTokenType = this.readInt() var maxTokenType = this.readInt()
return new ATN(grammarType, maxTokenType) return NewATN(grammarType, maxTokenType)
} }
func (this *ATNDeserializer) readStates(atn) { func (this *ATNDeserializer) readStates(atn) {
@ -245,7 +245,7 @@ func (this *ATNDeserializer) readSets(atn) {
var sets = [] var sets = []
var m = this.readInt() var m = this.readInt()
for (var i=0 i<m i++) { for (var i=0 i<m i++) {
var iset = new IntervalSet() var iset = NewIntervalSet()
sets.push(iset) sets.push(iset)
var n = this.readInt() var n = this.readInt()
var containsEof = this.readInt() var containsEof = this.readInt()
@ -290,7 +290,7 @@ func (this *ATNDeserializer) readEdges(atn, sets) {
} }
} }
trans = new EpsilonTransition(t.followState, outermostPrecedenceReturn) trans = NewEpsilonTransition(t.followState, outermostPrecedenceReturn)
atn.ruleToStopState[t.target.ruleIndex].addTransition(trans) atn.ruleToStopState[t.target.ruleIndex].addTransition(trans)
} }
} }
@ -370,11 +370,11 @@ func (this *ATNDeserializer) generateRuleBypassTransitions(atn) {
func (this *ATNDeserializer) generateRuleBypassTransition(atn, idx) { func (this *ATNDeserializer) generateRuleBypassTransition(atn, idx) {
var i, state var i, state
var bypassStart = new BasicBlockStartState() var bypassStart = NewBasicBlockStartState()
bypassStart.ruleIndex = idx bypassStart.ruleIndex = idx
atn.addState(bypassStart) atn.addState(bypassStart)
var bypassStop = new BlockEndState() var bypassStop = NewBlockEndState()
bypassStop.ruleIndex = idx bypassStop.ruleIndex = idx
atn.addState(bypassStop) atn.addState(bypassStop)
@ -427,14 +427,14 @@ func (this *ATNDeserializer) generateRuleBypassTransition(atn, idx) {
bypassStart.addTransition(ruleToStartState.transitions[count-1]) bypassStart.addTransition(ruleToStartState.transitions[count-1])
ruleToStartState.transitions = ruleToStartState.transitions.slice(-1) ruleToStartState.transitions = ruleToStartState.transitions.slice(-1)
} }
// link the new states // link the Newstates
atn.ruleToStartState[idx].addTransition(new EpsilonTransition(bypassStart)) atn.ruleToStartState[idx].addTransition(NewEpsilonTransition(bypassStart))
bypassStop.addTransition(new EpsilonTransition(endState)) bypassStop.addTransition(NewEpsilonTransition(endState))
var matchState = new BasicState() var matchState = NewBasicState()
atn.addState(matchState) atn.addState(matchState)
matchState.addTransition(new AtomTransition(bypassStop, atn.ruleToTokenType[idx])) matchState.addTransition(NewAtomTransition(bypassStop, atn.ruleToTokenType[idx]))
bypassStart.addTransition(new EpsilonTransition(matchState)) bypassStart.addTransition(NewEpsilonTransition(matchState))
} }
func (this *ATNDeserializer) stateIsEndStateFor(state, idx) { func (this *ATNDeserializer) stateIsEndStateFor(state, idx) {
@ -586,25 +586,25 @@ ATNDeserializer.prototype.edgeFactory = function(atn, type, src, trg, arg1, arg2
var target = atn.states[trg] var target = atn.states[trg]
switch(type) { switch(type) {
case Transition.EPSILON: case Transition.EPSILON:
return new EpsilonTransition(target) return NewEpsilonTransition(target)
case Transition.RANGE: case Transition.RANGE:
return arg3 != 0 ? new RangeTransition(target, Token.EOF, arg2) : new RangeTransition(target, arg1, arg2) return arg3 != 0 ? NewRangeTransition(target, Token.EOF, arg2) : NewRangeTransition(target, arg1, arg2)
case Transition.RULE: case Transition.RULE:
return new RuleTransition(atn.states[arg1], arg2, arg3, target) return NewRuleTransition(atn.states[arg1], arg2, arg3, target)
case Transition.PREDICATE: case Transition.PREDICATE:
return new PredicateTransition(target, arg1, arg2, arg3 != 0) return NewPredicateTransition(target, arg1, arg2, arg3 != 0)
case Transition.PRECEDENCE: case Transition.PRECEDENCE:
return new PrecedencePredicateTransition(target, arg1) return NewPrecedencePredicateTransition(target, arg1)
case Transition.ATOM: case Transition.ATOM:
return arg3 != 0 ? new AtomTransition(target, Token.EOF) : new AtomTransition(target, arg1) return arg3 != 0 ? NewAtomTransition(target, Token.EOF) : NewAtomTransition(target, arg1)
case Transition.ACTION: case Transition.ACTION:
return new ActionTransition(target, arg1, arg2, arg3 != 0) return NewActionTransition(target, arg1, arg2, arg3 != 0)
case Transition.SET: case Transition.SET:
return new SetTransition(target, sets[arg1]) return NewSetTransition(target, sets[arg1])
case Transition.NOT_SET: case Transition.NOT_SET:
return new NotSetTransition(target, sets[arg1]) return NewNotSetTransition(target, sets[arg1])
case Transition.WILDCARD: case Transition.WILDCARD:
return new WildcardTransition(target) return NewWildcardTransition(target)
default: default:
throw "The specified transition type: " + type + " is not valid." throw "The specified transition type: " + type + " is not valid."
} }
@ -614,18 +614,18 @@ func (this *ATNDeserializer) stateFactory(type, ruleIndex) {
if (this.stateFactories == nil) { if (this.stateFactories == nil) {
var sf = [] var sf = []
sf[ATNState.INVALID_TYPE] = nil sf[ATNState.INVALID_TYPE] = nil
sf[ATNState.BASIC] = function() { return new BasicState() } sf[ATNState.BASIC] = function() { return NewBasicState() }
sf[ATNState.RULE_START] = function() { return new RuleStartState() } sf[ATNState.RULE_START] = function() { return NewRuleStartState() }
sf[ATNState.BLOCK_START] = function() { return new BasicBlockStartState() } sf[ATNState.BLOCK_START] = function() { return NewBasicBlockStartState() }
sf[ATNState.PLUS_BLOCK_START] = function() { return new PlusBlockStartState() } sf[ATNState.PLUS_BLOCK_START] = function() { return NewPlusBlockStartState() }
sf[ATNState.STAR_BLOCK_START] = function() { return new StarBlockStartState() } sf[ATNState.STAR_BLOCK_START] = function() { return NewStarBlockStartState() }
sf[ATNState.TOKEN_START] = function() { return new TokensStartState() } sf[ATNState.TOKEN_START] = function() { return NewTokensStartState() }
sf[ATNState.RULE_STOP] = function() { return new RuleStopState() } sf[ATNState.RULE_STOP] = function() { return NewRuleStopState() }
sf[ATNState.BLOCK_END] = function() { return new BlockEndState() } sf[ATNState.BLOCK_END] = function() { return NewBlockEndState() }
sf[ATNState.STAR_LOOP_BACK] = function() { return new StarLoopbackState() } sf[ATNState.STAR_LOOP_BACK] = function() { return NewStarLoopbackState() }
sf[ATNState.STAR_LOOP_ENTRY] = function() { return new StarLoopEntryState() } sf[ATNState.STAR_LOOP_ENTRY] = function() { return NewStarLoopEntryState() }
sf[ATNState.PLUS_LOOP_BACK] = function() { return new PlusLoopbackState() } sf[ATNState.PLUS_LOOP_BACK] = function() { return NewPlusLoopbackState() }
sf[ATNState.LOOP_END] = function() { return new LoopEndState() } sf[ATNState.LOOP_END] = function() { return NewLoopEndState() }
this.stateFactories = sf this.stateFactories = sf
} }
if (type>this.stateFactories.length || this.stateFactories[type] == nil) { if (type>this.stateFactories.length || this.stateFactories[type] == nil) {
@ -642,14 +642,14 @@ func (this *ATNDeserializer) stateFactory(type, ruleIndex) {
ATNDeserializer.prototype.lexerActionFactory = function(type, data1, data2) { ATNDeserializer.prototype.lexerActionFactory = function(type, data1, data2) {
if (this.actionFactories == nil) { if (this.actionFactories == nil) {
var af = [] var af = []
af[LexerActionType.CHANNEL] = function(data1, data2) { return new LexerChannelAction(data1) } af[LexerActionType.CHANNEL] = function(data1, data2) { return NewLexerChannelAction(data1) }
af[LexerActionType.CUSTOM] = function(data1, data2) { return new LexerCustomAction(data1, data2) } af[LexerActionType.CUSTOM] = function(data1, data2) { return NewLexerCustomAction(data1, data2) }
af[LexerActionType.MODE] = function(data1, data2) { return new LexerModeAction(data1) } af[LexerActionType.MODE] = function(data1, data2) { return NewLexerModeAction(data1) }
af[LexerActionType.MORE] = function(data1, data2) { return LexerMoreAction.INSTANCE } af[LexerActionType.MORE] = function(data1, data2) { return LexerMoreAction.INSTANCE }
af[LexerActionType.POP_MODE] = function(data1, data2) { return LexerPopModeAction.INSTANCE } af[LexerActionType.POP_MODE] = function(data1, data2) { return LexerPopModeAction.INSTANCE }
af[LexerActionType.PUSH_MODE] = function(data1, data2) { return new LexerPushModeAction(data1) } af[LexerActionType.PUSH_MODE] = function(data1, data2) { return NewLexerPushModeAction(data1) }
af[LexerActionType.SKIP] = function(data1, data2) { return LexerSkipAction.INSTANCE } af[LexerActionType.SKIP] = function(data1, data2) { return LexerSkipAction.INSTANCE }
af[LexerActionType.TYPE] = function(data1, data2) { return new LexerTypeAction(data1) } af[LexerActionType.TYPE] = function(data1, data2) { return NewLexerTypeAction(data1) }
this.actionFactories = af this.actionFactories = af
} }
if (type>this.actionFactories.length || this.actionFactories[type] == nil) { if (type>this.actionFactories.length || this.actionFactories[type] == nil) {

View File

@ -32,7 +32,7 @@ func ATNSimulator(atn, sharedContextCache) {
} }
// Must distinguish between missing edge and edge we know leads nowhere/// // Must distinguish between missing edge and edge we know leads nowhere///
ATNSimulator.ERROR = new DFAState(0x7FFFFFFF, new ATNConfigSet()) ATNSimulator.ERROR = NewDFAState(0x7FFFFFFF, NewATNConfigSet())
func (this *ATNSimulator) getCachedContext(context) { func (this *ATNSimulator) getCachedContext(context) {

View File

@ -64,7 +64,7 @@ func LexerATNSimulator(recog, atn, decisionToDFA, sharedContextCache) {
this.mode = Lexer.DEFAULT_MODE this.mode = Lexer.DEFAULT_MODE
// Used during DFA/ATN exec to record the most recent accept configuration // Used during DFA/ATN exec to record the most recent accept configuration
// info // info
this.prevAccept = new SimState() this.prevAccept = NewSimState()
// done // done
return this return this
} }
@ -194,7 +194,7 @@ LexerATNSimulator.prototype.execATN = function(input, ds0) {
} }
} }
t = input.LA(1) t = input.LA(1)
s = target // flip current DFA target becomes new src/from state s = target // flip current DFA target becomes Newsrc/from state
} }
return this.failOrAccept(this.prevAccept, input, s.configs, t) return this.failOrAccept(this.prevAccept, input, s.configs, t)
} }
@ -234,7 +234,7 @@ func (this *LexerATNSimulator) getExistingTargetState(s, t) {
// {@code t}. If {@code t} does not lead to a valid DFA state, this method // {@code t}. If {@code t} does not lead to a valid DFA state, this method
// returns {@link //ERROR}. // returns {@link //ERROR}.
func (this *LexerATNSimulator) computeTargetState(input, s, t) { func (this *LexerATNSimulator) computeTargetState(input, s, t) {
var reach = new OrderedATNConfigSet() var reach = NewOrderedATNConfigSet()
// if we don't find an existing DFA state // if we don't find an existing DFA state
// Fill reach starting from closure, following t transitions // Fill reach starting from closure, following t transitions
this.getReachableConfigSet(input, s.configs, reach, t) this.getReachableConfigSet(input, s.configs, reach, t)
@ -263,7 +263,7 @@ func (this *LexerATNSimulator) failOrAccept(prevAccept, input, reach, t) {
if (t == Token.EOF && input.index == this.startIndex) { if (t == Token.EOF && input.index == this.startIndex) {
return Token.EOF return Token.EOF
} }
throw new LexerNoViableAltException(this.recog, input, this.startIndex, reach) throw NewLexerNoViableAltException(this.recog, input, this.startIndex, reach)
} }
} }
@ -294,7 +294,7 @@ func (this *LexerATNSimulator) getReachableConfigSet(input, closure,
lexerActionExecutor = lexerActionExecutor.fixOffsetBeforeMatch(input.index - this.startIndex) lexerActionExecutor = lexerActionExecutor.fixOffsetBeforeMatch(input.index - this.startIndex)
} }
var treatEofAsEpsilon = (t == Token.EOF) var treatEofAsEpsilon = (t == Token.EOF)
var config = new LexerATNConfig({state:target, lexerActionExecutor:lexerActionExecutor}, cfg) var config = NewLexerATNConfig({state:target, lexerActionExecutor:lexerActionExecutor}, cfg)
if (this.closure(input, config, reach, if (this.closure(input, config, reach,
currentAltReachedAcceptState, true, treatEofAsEpsilon)) { currentAltReachedAcceptState, true, treatEofAsEpsilon)) {
// any remaining configs for this alt have a lower priority // any remaining configs for this alt have a lower priority
@ -330,10 +330,10 @@ func (this *LexerATNSimulator) getReachableTarget(trans, t) {
func (this *LexerATNSimulator) computeStartState(input, p) { func (this *LexerATNSimulator) computeStartState(input, p) {
var initialContext = PredictionContext.EMPTY var initialContext = PredictionContext.EMPTY
var configs = new OrderedATNConfigSet() var configs = NewOrderedATNConfigSet()
for (var i = 0 i < p.transitions.length i++) { for (var i = 0 i < p.transitions.length i++) {
var target = p.transitions[i].target var target = p.transitions[i].target
var cfg = new LexerATNConfig({state:target, alt:i+1, context:initialContext}, nil) var cfg = NewLexerATNConfig({state:target, alt:i+1, context:initialContext}, nil)
this.closure(input, cfg, configs, false, false, false) this.closure(input, cfg, configs, false, false, false)
} }
return configs return configs
@ -366,7 +366,7 @@ func (this *LexerATNSimulator) closure(input, config, configs,
configs.add(config) configs.add(config)
return true return true
} else { } else {
configs.add(new LexerATNConfig({ state:config.state, context:PredictionContext.EMPTY}, config)) configs.add(NewLexerATNConfig({ state:config.state, context:PredictionContext.EMPTY}, config))
currentAltReachedAcceptState = true currentAltReachedAcceptState = true
} }
} }
@ -375,7 +375,7 @@ func (this *LexerATNSimulator) closure(input, config, configs,
if (config.context.getReturnState(i) != PredictionContext.EMPTY_RETURN_STATE) { if (config.context.getReturnState(i) != PredictionContext.EMPTY_RETURN_STATE) {
var newContext = config.context.getParent(i) // "pop" return state var newContext = config.context.getParent(i) // "pop" return state
var returnState = this.atn.states[config.context.getReturnState(i)] var returnState = this.atn.states[config.context.getReturnState(i)]
cfg = new LexerATNConfig({ state:returnState, context:newContext }, config) cfg = NewLexerATNConfig({ state:returnState, context:newContext }, config)
currentAltReachedAcceptState = this.closure(input, cfg, currentAltReachedAcceptState = this.closure(input, cfg,
configs, currentAltReachedAcceptState, speculative, configs, currentAltReachedAcceptState, speculative,
treatEofAsEpsilon) treatEofAsEpsilon)
@ -407,7 +407,7 @@ func (this *LexerATNSimulator) getEpsilonTarget(input, config, trans,
var cfg = nil var cfg = nil
if (trans.serializationType == Transition.RULE) { if (trans.serializationType == Transition.RULE) {
var newContext = SingletonPredictionContext.create(config.context, trans.followState.stateNumber) var newContext = SingletonPredictionContext.create(config.context, trans.followState.stateNumber)
cfg = new LexerATNConfig( { state:trans.target, context:newContext}, config) cfg = NewLexerATNConfig( { state:trans.target, context:newContext}, config)
} else if (trans.serializationType == Transition.PRECEDENCE) { } else if (trans.serializationType == Transition.PRECEDENCE) {
throw "Precedence predicates are not supported in lexers." throw "Precedence predicates are not supported in lexers."
} else if (trans.serializationType == Transition.PREDICATE) { } else if (trans.serializationType == Transition.PREDICATE) {
@ -434,7 +434,7 @@ func (this *LexerATNSimulator) getEpsilonTarget(input, config, trans,
} }
configs.hasSemanticContext = true configs.hasSemanticContext = true
if (this.evaluatePredicate(input, trans.ruleIndex, trans.predIndex, speculative)) { if (this.evaluatePredicate(input, trans.ruleIndex, trans.predIndex, speculative)) {
cfg = new LexerATNConfig({ state:trans.target}, config) cfg = NewLexerATNConfig({ state:trans.target}, config)
} }
} else if (trans.serializationType == Transition.ACTION) { } else if (trans.serializationType == Transition.ACTION) {
if (config.context == nil || config.context.hasEmptyPath()) { if (config.context == nil || config.context.hasEmptyPath()) {
@ -452,19 +452,19 @@ func (this *LexerATNSimulator) getEpsilonTarget(input, config, trans,
// the split operation. // the split operation.
var lexerActionExecutor = LexerActionExecutor.append(config.lexerActionExecutor, var lexerActionExecutor = LexerActionExecutor.append(config.lexerActionExecutor,
this.atn.lexerActions[trans.actionIndex]) this.atn.lexerActions[trans.actionIndex])
cfg = new LexerATNConfig({ state:trans.target, lexerActionExecutor:lexerActionExecutor }, config) cfg = NewLexerATNConfig({ state:trans.target, lexerActionExecutor:lexerActionExecutor }, config)
} else { } else {
// ignore actions in referenced rules // ignore actions in referenced rules
cfg = new LexerATNConfig( { state:trans.target}, config) cfg = NewLexerATNConfig( { state:trans.target}, config)
} }
} else if (trans.serializationType == Transition.EPSILON) { } else if (trans.serializationType == Transition.EPSILON) {
cfg = new LexerATNConfig({ state:trans.target}, config) cfg = NewLexerATNConfig({ state:trans.target}, config)
} else if (trans.serializationType == Transition.ATOM || } else if (trans.serializationType == Transition.ATOM ||
trans.serializationType == Transition.RANGE || trans.serializationType == Transition.RANGE ||
trans.serializationType == Transition.SET) { trans.serializationType == Transition.SET) {
if (treatEofAsEpsilon) { if (treatEofAsEpsilon) {
if (trans.matches(Token.EOF, 0, 0xFFFF)) { if (trans.matches(Token.EOF, 0, 0xFFFF)) {
cfg = new LexerATNConfig( { state:trans.target }, config) cfg = NewLexerATNConfig( { state:trans.target }, config)
} }
} }
} }
@ -567,12 +567,12 @@ func (this *LexerATNSimulator) addDFAEdge(from_, tk, to, cfgs) {
return to return to
} }
// Add a new DFA state if there isn't one with this set of // Add a NewDFA state if there isn't one with this set of
// configurations already. This method also detects the first // configurations already. This method also detects the first
// configuration containing an ATN rule stop state. Later, when // configuration containing an ATN rule stop state. Later, when
// traversing the DFA, we will know which rule to accept. // traversing the DFA, we will know which rule to accept.
func (this *LexerATNSimulator) addDFAState(configs) { func (this *LexerATNSimulator) addDFAState(configs) {
var proposed = new DFAState(nil, configs) var proposed = NewDFAState(nil, configs)
var firstConfigWithRuleStopState = nil var firstConfigWithRuleStopState = nil
for (var i = 0 i < configs.items.length i++) { for (var i = 0 i < configs.items.length i++) {
var cfg = configs.items[i] var cfg = configs.items[i]

View File

@ -42,7 +42,7 @@ LexerSkipAction.prototype = Object.create(LexerAction.prototype)
LexerSkipAction.prototype.constructor = LexerSkipAction LexerSkipAction.prototype.constructor = LexerSkipAction
// Provides a singleton instance of this parameterless lexer action. // Provides a singleton instance of this parameterless lexer action.
LexerSkipAction.INSTANCE = new LexerSkipAction() LexerSkipAction.INSTANCE = NewLexerSkipAction()
func (this *LexerSkipAction) execute(lexer) { func (this *LexerSkipAction) execute(lexer) {
lexer.skip() lexer.skip()
@ -134,7 +134,7 @@ type LexerPopModeAction struct {
LexerPopModeAction.prototype = Object.create(LexerAction.prototype) LexerPopModeAction.prototype = Object.create(LexerAction.prototype)
LexerPopModeAction.prototype.constructor = LexerPopModeAction LexerPopModeAction.prototype.constructor = LexerPopModeAction
LexerPopModeAction.INSTANCE = new LexerPopModeAction() LexerPopModeAction.INSTANCE = NewLexerPopModeAction()
// <p>This action is implemented by calling {@link Lexer//popMode}.</p> // <p>This action is implemented by calling {@link Lexer//popMode}.</p>
func (this *LexerPopModeAction) execute(lexer) { func (this *LexerPopModeAction) execute(lexer) {
@ -157,7 +157,7 @@ type LexerMoreAction struct {
LexerMoreAction.prototype = Object.create(LexerAction.prototype) LexerMoreAction.prototype = Object.create(LexerAction.prototype)
LexerMoreAction.prototype.constructor = LexerMoreAction LexerMoreAction.prototype.constructor = LexerMoreAction
LexerMoreAction.INSTANCE = new LexerMoreAction() LexerMoreAction.INSTANCE = NewLexerMoreAction()
// <p>This action is implemented by calling {@link Lexer//popMode}.</p> // <p>This action is implemented by calling {@link Lexer//popMode}.</p>
func (this *LexerMoreAction) execute(lexer) { func (this *LexerMoreAction) execute(lexer) {
@ -255,7 +255,7 @@ func (this *LexerCustomAction) equals(other) {
// Implements the {@code channel} lexer action by calling // Implements the {@code channel} lexer action by calling
// {@link Lexer//setChannel} with the assigned channel. // {@link Lexer//setChannel} with the assigned channel.
// Constructs a new {@code channel} action with the specified channel value. // Constructs a New{@code channel} action with the specified channel value.
// @param channel The channel value to pass to {@link Lexer//setChannel}. // @param channel The channel value to pass to {@link Lexer//setChannel}.
func LexerChannelAction(channel) { func LexerChannelAction(channel) {
LexerAction.call(this, LexerActionType.CHANNEL) LexerAction.call(this, LexerActionType.CHANNEL)
@ -299,7 +299,7 @@ func (this *LexerChannelAction) toString() {
// lexer actions, see {@link LexerActionExecutor//append} and // lexer actions, see {@link LexerActionExecutor//append} and
// {@link LexerActionExecutor//fixOffsetBeforeMatch}.</p> // {@link LexerActionExecutor//fixOffsetBeforeMatch}.</p>
// Constructs a new indexed custom action by associating a character offset // Constructs a Newindexed custom action by associating a character offset
// with a {@link LexerAction}. // with a {@link LexerAction}.
// //
// <p>Note: This class is only required for lexer actions for which // <p>Note: This class is only required for lexer actions for which

View File

@ -33,10 +33,10 @@ func LexerActionExecutor(lexerActions) {
// of {@code lexerActionExecutor} and {@code lexerAction}. // of {@code lexerActionExecutor} and {@code lexerAction}.
LexerActionExecutor.append = function(lexerActionExecutor, lexerAction) { LexerActionExecutor.append = function(lexerActionExecutor, lexerAction) {
if (lexerActionExecutor == nil) { if (lexerActionExecutor == nil) {
return new LexerActionExecutor([ lexerAction ]) return NewLexerActionExecutor([ lexerAction ])
} }
var lexerActions = lexerActionExecutor.lexerActions.concat([ lexerAction ]) var lexerActions = lexerActionExecutor.lexerActions.concat([ lexerAction ])
return new LexerActionExecutor(lexerActions) return NewLexerActionExecutor(lexerActions)
} }
// Creates a {@link LexerActionExecutor} which encodes the current offset // Creates a {@link LexerActionExecutor} which encodes the current offset
@ -75,14 +75,14 @@ func (this *LexerActionExecutor) fixOffsetBeforeMatch(offset) {
if (updatedLexerActions == nil) { if (updatedLexerActions == nil) {
updatedLexerActions = this.lexerActions.concat([]) updatedLexerActions = this.lexerActions.concat([])
} }
updatedLexerActions[i] = new LexerIndexedCustomAction(offset, updatedLexerActions[i] = NewLexerIndexedCustomAction(offset,
this.lexerActions[i]) this.lexerActions[i])
} }
} }
if (updatedLexerActions == nil) { if (updatedLexerActions == nil) {
return this return this
} else { } else {
return new LexerActionExecutor(updatedLexerActions) return NewLexerActionExecutor(updatedLexerActions)
} }
} }

View File

@ -155,7 +155,7 @@ package atn
// //
// <p> // <p>
// The {@link ParserATNSimulator} locks on the {@link //decisionToDFA} field when // The {@link ParserATNSimulator} locks on the {@link //decisionToDFA} field when
// it adds a new DFA object to that array. {@link //addDFAEdge} // it adds a NewDFA object to that array. {@link //addDFAEdge}
// locks on the DFA for the current decision when setting the // locks on the DFA for the current decision when setting the
// {@link DFAState//edges} field. {@link //addDFAState} locks on // {@link DFAState//edges} field. {@link //addDFAState} locks on
// the DFA for the current decision when looking up a DFA state to see if it // the DFA for the current decision when looking up a DFA state to see if it
@ -190,7 +190,7 @@ package atn
// //
// <pre> // <pre>
// parser.{@link Parser//getInterpreter() getInterpreter()}.{@link //setPredictionMode setPredictionMode}{@code (}{@link PredictionMode//SLL}{@code )} // parser.{@link Parser//getInterpreter() getInterpreter()}.{@link //setPredictionMode setPredictionMode}{@code (}{@link PredictionMode//SLL}{@code )}
// parser.{@link Parser//setErrorHandler setErrorHandler}(new {@link BailErrorStrategy}()) // parser.{@link Parser//setErrorHandler setErrorHandler}(New{@link BailErrorStrategy}())
// </pre> // </pre>
// //
// <p> // <p>
@ -351,10 +351,10 @@ func (this *ParserATNSimulator) adaptivePredict(input, decision, outerContext) {
// than simply setting DFA.s0. // than simply setting DFA.s0.
// //
s0_closure = this.applyPrecedenceFilter(s0_closure) s0_closure = this.applyPrecedenceFilter(s0_closure)
s0 = this.addDFAState(dfa, new DFAState(nil, s0_closure)) s0 = this.addDFAState(dfa, NewDFAState(nil, s0_closure))
dfa.setPrecedenceStartState(this.parser.getPrecedence(), s0) dfa.setPrecedenceStartState(this.parser.getPrecedence(), s0)
} else { } else {
s0 = this.addDFAState(dfa, new DFAState(nil, s0_closure)) s0 = this.addDFAState(dfa, NewDFAState(nil, s0_closure))
dfa.s0 = s0 dfa.s0 = s0
} }
} }
@ -382,7 +382,7 @@ func (this *ParserATNSimulator) adaptivePredict(input, decision, outerContext) {
// putting it on the work list? // putting it on the work list?
// We also have some key operations to do: // We also have some key operations to do:
// add an edge from previous DFA state to potentially new DFA state, D, // add an edge from previous DFA state to potentially NewDFA state, D,
// upon current symbol but only if adding to work list, which means in all // upon current symbol but only if adding to work list, which means in all
// cases except no viable alternative (and possibly non-greedy decisions?) // cases except no viable alternative (and possibly non-greedy decisions?)
// collecting predicates and adding semantic context to DFA accept states // collecting predicates and adding semantic context to DFA accept states
@ -532,8 +532,8 @@ func (this *ParserATNSimulator) computeTargetState(dfa, previousD, t) {
this.addDFAEdge(dfa, previousD, t, ATNSimulator.ERROR) this.addDFAEdge(dfa, previousD, t, ATNSimulator.ERROR)
return ATNSimulator.ERROR return ATNSimulator.ERROR
} }
// create new target state we'll add to DFA after it's complete // create Newtarget state we'll add to DFA after it's complete
var D = new DFAState(nil, reach) var D = NewDFAState(nil, reach)
var predictedAlt = this.getUniqueAlt(reach) var predictedAlt = this.getUniqueAlt(reach)
@ -705,9 +705,9 @@ func (this *ParserATNSimulator) computeReachSet(closure, t, fullCtx) {
console.log("in computeReachSet, starting closure: " + closure) console.log("in computeReachSet, starting closure: " + closure)
} }
if( this.mergeCache==nil) { if( this.mergeCache==nil) {
this.mergeCache = new DoubleDict() this.mergeCache = NewDoubleDict()
} }
var intermediate = new ATNConfigSet(fullCtx) var intermediate = NewATNConfigSet(fullCtx)
// Configurations already in a rule stop state indicate reaching the end // Configurations already in a rule stop state indicate reaching the end
// of the decision rule (local context) or end of the start rule (full // of the decision rule (local context) or end of the start rule (full
@ -743,7 +743,7 @@ func (this *ParserATNSimulator) computeReachSet(closure, t, fullCtx) {
var trans = c.state.transitions[j] var trans = c.state.transitions[j]
var target = this.getReachableTarget(trans, t) var target = this.getReachableTarget(trans, t)
if (target!=nil) { if (target!=nil) {
var cfg = new ATNConfig({state:target}, c) var cfg = NewATNConfig({state:target}, c)
intermediate.add(cfg, this.mergeCache) intermediate.add(cfg, this.mergeCache)
if(this.debug) { if(this.debug) {
console.log("added " + cfg + " to intermediate") console.log("added " + cfg + " to intermediate")
@ -780,8 +780,8 @@ func (this *ParserATNSimulator) computeReachSet(closure, t, fullCtx) {
// operation on the intermediate set to compute its initial value. // operation on the intermediate set to compute its initial value.
// //
if (reach==nil) { if (reach==nil) {
reach = new ATNConfigSet(fullCtx) reach = NewATNConfigSet(fullCtx)
var closureBusy = new Set() var closureBusy = NewSet()
var treatEofAsEpsilon = t == Token.EOF var treatEofAsEpsilon = t == Token.EOF
for (var k=0 k<intermediate.items.lengthk++) { for (var k=0 k<intermediate.items.lengthk++) {
this.closure(intermediate.items[k], reach, closureBusy, false, fullCtx, treatEofAsEpsilon) this.closure(intermediate.items[k], reach, closureBusy, false, fullCtx, treatEofAsEpsilon)
@ -843,14 +843,14 @@ func (this *ParserATNSimulator) computeReachSet(closure, t, fullCtx) {
// {@code configs}. // {@code configs}.
// //
// @return {@code configs} if all configurations in {@code configs} are in a // @return {@code configs} if all configurations in {@code configs} are in a
// rule stop state, otherwise return a new configuration set containing only // rule stop state, otherwise return a Newconfiguration set containing only
// the configurations from {@code configs} which are in a rule stop state // the configurations from {@code configs} which are in a rule stop state
// //
func (this *ParserATNSimulator) removeAllConfigsNotInRuleStopState(configs, lookToEndOfRule) { func (this *ParserATNSimulator) removeAllConfigsNotInRuleStopState(configs, lookToEndOfRule) {
if (PredictionMode.allConfigsInRuleStopStates(configs)) { if (PredictionMode.allConfigsInRuleStopStates(configs)) {
return configs return configs
} }
var result = new ATNConfigSet(configs.fullCtx) var result = NewATNConfigSet(configs.fullCtx)
for(var i=0 i<configs.items.lengthi++) { for(var i=0 i<configs.items.lengthi++) {
var config = configs.items[i] var config = configs.items[i]
if (config.state instanceof RuleStopState) { if (config.state instanceof RuleStopState) {
@ -861,7 +861,7 @@ func (this *ParserATNSimulator) removeAllConfigsNotInRuleStopState(configs, look
var nextTokens = this.atn.nextTokens(config.state) var nextTokens = this.atn.nextTokens(config.state)
if (nextTokens.contains(Token.EPSILON)) { if (nextTokens.contains(Token.EPSILON)) {
var endOfRuleState = this.atn.ruleToStopState[config.state.ruleIndex] var endOfRuleState = this.atn.ruleToStopState[config.state.ruleIndex]
result.add(new ATNConfig({state:endOfRuleState}, config), this.mergeCache) result.add(NewATNConfig({state:endOfRuleState}, config), this.mergeCache)
} }
} }
} }
@ -871,11 +871,11 @@ func (this *ParserATNSimulator) removeAllConfigsNotInRuleStopState(configs, look
func (this *ParserATNSimulator) computeStartState(p, ctx, fullCtx) { func (this *ParserATNSimulator) computeStartState(p, ctx, fullCtx) {
// always at least the implicit call to start rule // always at least the implicit call to start rule
var initialContext = predictionContextFromRuleContext(this.atn, ctx) var initialContext = predictionContextFromRuleContext(this.atn, ctx)
var configs = new ATNConfigSet(fullCtx) var configs = NewATNConfigSet(fullCtx)
for(var i=0i<p.transitions.lengthi++) { for(var i=0i<p.transitions.lengthi++) {
var target = p.transitions[i].target var target = p.transitions[i].target
var c = new ATNConfig({ state:target, alt:i+1, context:initialContext }, nil) var c = NewATNConfig({ state:target, alt:i+1, context:initialContext }, nil)
var closureBusy = new Set() var closureBusy = NewSet()
this.closure(c, configs, closureBusy, true, fullCtx, false) this.closure(c, configs, closureBusy, true, fullCtx, false)
} }
return configs return configs
@ -940,7 +940,7 @@ func (this *ParserATNSimulator) computeStartState(p, ctx, fullCtx) {
func (this *ParserATNSimulator) applyPrecedenceFilter(configs) { func (this *ParserATNSimulator) applyPrecedenceFilter(configs) {
var config var config
var statesFromAlt1 = [] var statesFromAlt1 = []
var configSet = new ATNConfigSet(configs.fullCtx) var configSet = NewATNConfigSet(configs.fullCtx)
for(var i=0 i<configs.items.length i++) { for(var i=0 i<configs.items.length i++) {
config = configs.items[i] config = configs.items[i]
// handle alt 1 first // handle alt 1 first
@ -954,7 +954,7 @@ func (this *ParserATNSimulator) applyPrecedenceFilter(configs) {
} }
statesFromAlt1[config.state.stateNumber] = config.context statesFromAlt1[config.state.stateNumber] = config.context
if (updatedContext != config.semanticContext) { if (updatedContext != config.semanticContext) {
configSet.add(new ATNConfig({semanticContext:updatedContext}, config), this.mergeCache) configSet.add(NewATNConfig({semanticContext:updatedContext}, config), this.mergeCache)
} else { } else {
configSet.add(config, this.mergeCache) configSet.add(config, this.mergeCache)
} }
@ -1034,7 +1034,7 @@ func (this *ParserATNSimulator) getPredicatePredictions(ambigAlts, altToPred) {
var pred = altToPred[i] var pred = altToPred[i]
// unpredicated is indicated by SemanticContext.NONE // unpredicated is indicated by SemanticContext.NONE
if( ambigAlts!=nil && ambigAlts.contains( i )) { if( ambigAlts!=nil && ambigAlts.contains( i )) {
pairs.push(new PredPrediction(pred, i)) pairs.push(NewPredPrediction(pred, i))
} }
if (pred != SemanticContext.NONE) { if (pred != SemanticContext.NONE) {
containsPredicate = true containsPredicate = true
@ -1130,14 +1130,14 @@ func (this *ParserATNSimulator) getAltThatFinishedDecisionEntryRule(configs) {
// those that have preds evaluating to true/false. If no pred, assume // those that have preds evaluating to true/false. If no pred, assume
// true pred and include in succeeded set. Returns Pair of sets. // true pred and include in succeeded set. Returns Pair of sets.
// //
// Create a new set so as not to alter the incoming parameter. // Create a Newset so as not to alter the incoming parameter.
// //
// Assumption: the input stream has been restored to the starting point // Assumption: the input stream has been restored to the starting point
// prediction, which is where predicates need to evaluate. // prediction, which is where predicates need to evaluate.
// //
func (this *ParserATNSimulator) splitAccordingToSemanticValidity( configs, outerContext) { func (this *ParserATNSimulator) splitAccordingToSemanticValidity( configs, outerContext) {
var succeeded = new ATNConfigSet(configs.fullCtx) var succeeded = NewATNConfigSet(configs.fullCtx)
var failed = new ATNConfigSet(configs.fullCtx) var failed = NewATNConfigSet(configs.fullCtx)
for(var i=0i<configs.items.length i++) { for(var i=0i<configs.items.length i++) {
var c = configs.items[i] var c = configs.items[i]
if (c.semanticContext != SemanticContext.NONE) { if (c.semanticContext != SemanticContext.NONE) {
@ -1161,7 +1161,7 @@ func (this *ParserATNSimulator) splitAccordingToSemanticValidity( configs, outer
// includes pairs with nil predicates. // includes pairs with nil predicates.
// //
func (this *ParserATNSimulator) evalSemanticContext(predPredictions, outerContext, complete) { func (this *ParserATNSimulator) evalSemanticContext(predPredictions, outerContext, complete) {
var predictions = new BitSet() var predictions = NewBitSet()
for(var i=0i<predPredictions.lengthi++) { for(var i=0i<predPredictions.lengthi++) {
var pair = predPredictions[i] var pair = predPredictions[i]
if (pair.pred == SemanticContext.NONE) { if (pair.pred == SemanticContext.NONE) {
@ -1217,7 +1217,7 @@ func (this *ParserATNSimulator) closureCheckingStopState(config, configs, closur
for ( var i =0 i<config.context.length i++) { for ( var i =0 i<config.context.length i++) {
if (config.context.getReturnState(i) == PredictionContext.EMPTY_RETURN_STATE) { if (config.context.getReturnState(i) == PredictionContext.EMPTY_RETURN_STATE) {
if (fullCtx) { if (fullCtx) {
configs.add(new ATNConfig({state:config.state, context:PredictionContext.EMPTY}, config), this.mergeCache) configs.add(NewATNConfig({state:config.state, context:PredictionContext.EMPTY}, config), this.mergeCache)
continue continue
} else { } else {
// we have no context info, just chase follow links (if greedy) // we have no context info, just chase follow links (if greedy)
@ -1232,7 +1232,7 @@ func (this *ParserATNSimulator) closureCheckingStopState(config, configs, closur
returnState = this.atn.states[config.context.getReturnState(i)] returnState = this.atn.states[config.context.getReturnState(i)]
newContext = config.context.getParent(i) // "pop" return state newContext = config.context.getParent(i) // "pop" return state
var parms = {state:returnState, alt:config.alt, context:newContext, semanticContext:config.semanticContext} var parms = {state:returnState, alt:config.alt, context:newContext, semanticContext:config.semanticContext}
c = new ATNConfig(parms, nil) c = NewATNConfig(parms, nil)
// While we have context to pop back from, we may have // While we have context to pop back from, we may have
// gotten that context AFTER having falling off a rule. // gotten that context AFTER having falling off a rule.
// Make sure we track that we are now out of context. // Make sure we track that we are now out of context.
@ -1327,7 +1327,7 @@ func (this *ParserATNSimulator) getEpsilonTarget(config, t, collectPredicates, i
case Transition.ACTION: case Transition.ACTION:
return this.actionTransition(config, t) return this.actionTransition(config, t)
case Transition.EPSILON: case Transition.EPSILON:
return new ATNConfig({state:t.target}, config) return NewATNConfig({state:t.target}, config)
case Transition.ATOM: case Transition.ATOM:
case Transition.RANGE: case Transition.RANGE:
case Transition.SET: case Transition.SET:
@ -1335,7 +1335,7 @@ func (this *ParserATNSimulator) getEpsilonTarget(config, t, collectPredicates, i
// transition is traversed // transition is traversed
if (treatEofAsEpsilon) { if (treatEofAsEpsilon) {
if (t.matches(Token.EOF, 0, 1)) { if (t.matches(Token.EOF, 0, 1)) {
return new ATNConfig({state: t.target}, config) return NewATNConfig({state: t.target}, config)
} }
} }
return nil return nil
@ -1348,7 +1348,7 @@ func (this *ParserATNSimulator) actionTransition(config, t) {
if (this.debug) { if (this.debug) {
console.log("ACTION edge " + t.ruleIndex + ":" + t.actionIndex) console.log("ACTION edge " + t.ruleIndex + ":" + t.actionIndex)
} }
return new ATNConfig({state:t.target}, config) return NewATNConfig({state:t.target}, config)
} }
func (this *ParserATNSimulator) precedenceTransition(config, pt, collectPredicates, inContext, fullCtx) { func (this *ParserATNSimulator) precedenceTransition(config, pt, collectPredicates, inContext, fullCtx) {
@ -1371,14 +1371,14 @@ func (this *ParserATNSimulator) precedenceTransition(config, pt, collectPredica
var predSucceeds = pt.getPredicate().evaluate(this.parser, this._outerContext) var predSucceeds = pt.getPredicate().evaluate(this.parser, this._outerContext)
this._input.seek(currentPosition) this._input.seek(currentPosition)
if (predSucceeds) { if (predSucceeds) {
c = new ATNConfig({state:pt.target}, config) // no pred context c = NewATNConfig({state:pt.target}, config) // no pred context
} }
} else { } else {
newSemCtx = SemanticContext.andContext(config.semanticContext, pt.getPredicate()) newSemCtx = SemanticContext.andContext(config.semanticContext, pt.getPredicate())
c = new ATNConfig({state:pt.target, semanticContext:newSemCtx}, config) c = NewATNConfig({state:pt.target, semanticContext:newSemCtx}, config)
} }
} else { } else {
c = new ATNConfig({state:pt.target}, config) c = NewATNConfig({state:pt.target}, config)
} }
if (this.debug) { if (this.debug) {
console.log("config from pred transition=" + c) console.log("config from pred transition=" + c)
@ -1406,14 +1406,14 @@ func (this *ParserATNSimulator) predTransition(config, pt, collectPredicates, in
var predSucceeds = pt.getPredicate().evaluate(this.parser, this._outerContext) var predSucceeds = pt.getPredicate().evaluate(this.parser, this._outerContext)
this._input.seek(currentPosition) this._input.seek(currentPosition)
if (predSucceeds) { if (predSucceeds) {
c = new ATNConfig({state:pt.target}, config) // no pred context c = NewATNConfig({state:pt.target}, config) // no pred context
} }
} else { } else {
var newSemCtx = SemanticContext.andContext(config.semanticContext, pt.getPredicate()) var newSemCtx = SemanticContext.andContext(config.semanticContext, pt.getPredicate())
c = new ATNConfig({state:pt.target, semanticContext:newSemCtx}, config) c = NewATNConfig({state:pt.target, semanticContext:newSemCtx}, config)
} }
} else { } else {
c = new ATNConfig({state:pt.target}, config) c = NewATNConfig({state:pt.target}, config)
} }
if (this.debug) { if (this.debug) {
console.log("config from pred transition=" + c) console.log("config from pred transition=" + c)
@ -1427,7 +1427,7 @@ func (this *ParserATNSimulator) ruleTransition(config, t) {
} }
var returnState = t.followState var returnState = t.followState
var newContext = SingletonPredictionContext.create(config.context, returnState.stateNumber) var newContext = SingletonPredictionContext.create(config.context, returnState.stateNumber)
return new ATNConfig({state:t.target, context:newContext}, config ) return NewATNConfig({state:t.target, context:newContext}, config )
} }
func (this *ParserATNSimulator) getConflictingAlts(configs) { func (this *ParserATNSimulator) getConflictingAlts(configs) {
@ -1474,7 +1474,7 @@ func (this *ParserATNSimulator) getConflictingAlts(configs) {
func (this *ParserATNSimulator) getConflictingAltsOrUniqueAlt(configs) { func (this *ParserATNSimulator) getConflictingAltsOrUniqueAlt(configs) {
var conflictingAlts = nil var conflictingAlts = nil
if (configs.uniqueAlt!= ATN.INVALID_ALT_NUMBER) { if (configs.uniqueAlt!= ATN.INVALID_ALT_NUMBER) {
conflictingAlts = new BitSet() conflictingAlts = NewBitSet()
conflictingAlts.add(configs.uniqueAlt) conflictingAlts.add(configs.uniqueAlt)
} else { } else {
conflictingAlts = configs.conflictingAlts conflictingAlts = configs.conflictingAlts
@ -1525,7 +1525,7 @@ func (this *ParserATNSimulator) dumpDeadEndConfigs(nvae) {
} }
func (this *ParserATNSimulator) noViableAlt(input, outerContext, configs, startIndex) { func (this *ParserATNSimulator) noViableAlt(input, outerContext, configs, startIndex) {
return new NoViableAltException(this.parser, input, input.get(startIndex), input.LT(1), configs, outerContext) return NewNoViableAltException(this.parser, input, input.get(startIndex), input.LT(1), configs, outerContext)
} }
func (this *ParserATNSimulator) getUniqueAlt(configs) { func (this *ParserATNSimulator) getUniqueAlt(configs) {
@ -1614,14 +1614,14 @@ func (this *ParserATNSimulator) addDFAState(dfa, D) {
} }
dfa.states[hash] = D dfa.states[hash] = D
if (this.debug) { if (this.debug) {
console.log("adding new DFA state: " + D) console.log("adding NewDFA state: " + D)
} }
return D return D
} }
func (this *ParserATNSimulator) reportAttemptingFullContext(dfa, conflictingAlts, configs, startIndex, stopIndex) { func (this *ParserATNSimulator) reportAttemptingFullContext(dfa, conflictingAlts, configs, startIndex, stopIndex) {
if (this.debug || this.retry_debug) { if (this.debug || this.retry_debug) {
var interval = new Interval(startIndex, stopIndex + 1) var interval = NewInterval(startIndex, stopIndex + 1)
console.log("reportAttemptingFullContext decision=" + dfa.decision + ":" + configs + console.log("reportAttemptingFullContext decision=" + dfa.decision + ":" + configs +
", input=" + this.parser.getTokenStream().getText(interval)) ", input=" + this.parser.getTokenStream().getText(interval))
} }
@ -1632,7 +1632,7 @@ func (this *ParserATNSimulator) reportAttemptingFullContext(dfa, conflictingAlts
func (this *ParserATNSimulator) reportContextSensitivity(dfa, prediction, configs, startIndex, stopIndex) { func (this *ParserATNSimulator) reportContextSensitivity(dfa, prediction, configs, startIndex, stopIndex) {
if (this.debug || this.retry_debug) { if (this.debug || this.retry_debug) {
var interval = new Interval(startIndex, stopIndex + 1) var interval = NewInterval(startIndex, stopIndex + 1)
console.log("reportContextSensitivity decision=" + dfa.decision + ":" + configs + console.log("reportContextSensitivity decision=" + dfa.decision + ":" + configs +
", input=" + this.parser.getTokenStream().getText(interval)) ", input=" + this.parser.getTokenStream().getText(interval))
} }
@ -1645,7 +1645,7 @@ func (this *ParserATNSimulator) reportContextSensitivity(dfa, prediction, config
func (this *ParserATNSimulator) reportAmbiguity(dfa, D, startIndex, stopIndex, func (this *ParserATNSimulator) reportAmbiguity(dfa, D, startIndex, stopIndex,
exact, ambigAlts, configs ) { exact, ambigAlts, configs ) {
if (this.debug || this.retry_debug) { if (this.debug || this.retry_debug) {
var interval = new Interval(startIndex, stopIndex + 1) var interval = NewInterval(startIndex, stopIndex + 1)
console.log("reportAmbiguity " + ambigAlts + ":" + configs + console.log("reportAmbiguity " + ambigAlts + ":" + configs +
", input=" + this.parser.getTokenStream().getText(interval)) ", input=" + this.parser.getTokenStream().getText(interval))
} }

View File

@ -184,10 +184,10 @@ PredictionMode.hasSLLConflictTerminatingPrediction = function( mode, configs) {
// since we'll often fail over anyway. // since we'll often fail over anyway.
if (configs.hasSemanticContext) { if (configs.hasSemanticContext) {
// dup configs, tossing out semantic predicates // dup configs, tossing out semantic predicates
var dup = new ATNConfigSet() var dup = NewATNConfigSet()
for(var i=0i<configs.items.lengthi++) { for(var i=0i<configs.items.lengthi++) {
var c = configs.items[i] var c = configs.items[i]
c = new ATNConfig({semanticContext:SemanticContext.NONE}, c) c = NewATNConfig({semanticContext:SemanticContext.NONE}, c)
dup.add(c) dup.add(c)
} }
configs = dup configs = dup
@ -471,7 +471,7 @@ PredictionMode.getUniqueAlt = function(altsets) {
// @return the set of represented alternatives in {@code altsets} // @return the set of represented alternatives in {@code altsets}
// //
PredictionMode.getAlts = function(altsets) { PredictionMode.getAlts = function(altsets) {
var all = new BitSet() var all = NewBitSet()
altsets.map( function(alts) { all.or(alts) }) altsets.map( function(alts) { all.or(alts) })
return all return all
} }
@ -492,7 +492,7 @@ PredictionMode.getConflictingAltSubsets = function(configs) {
var key = "key_" + c.state.stateNumber + "/" + c.context var key = "key_" + c.state.stateNumber + "/" + c.context
var alts = configToAlts[key] || nil var alts = configToAlts[key] || nil
if (alts == nil) { if (alts == nil) {
alts = new BitSet() alts = NewBitSet()
configToAlts[key] = alts configToAlts[key] = alts
} }
alts.add(c.alt) alts.add(c.alt)
@ -516,11 +516,11 @@ PredictionMode.getConflictingAltSubsets = function(configs) {
// </pre> // </pre>
// //
PredictionMode.getStateToAltMap = function(configs) { PredictionMode.getStateToAltMap = function(configs) {
var m = new AltDict() var m = NewAltDict()
configs.items.map(function(c) { configs.items.map(function(c) {
var alts = m.get(c.state) var alts = m.get(c.state)
if (alts == nil) { if (alts == nil) {
alts = new BitSet() alts = NewBitSet()
m.put(c.state, alts) m.put(c.state, alts)
} }
alts.add(c.alt) alts.add(c.alt)

View File

@ -43,7 +43,7 @@ func (this *SemanticContext) evaluate(parser, outerContext) {
// precedence predicates are evaluated.</li> // precedence predicates are evaluated.</li>
// <li>{@code this}: if the semantic context is not changed as a result of // <li>{@code this}: if the semantic context is not changed as a result of
// precedence predicate evaluation.</li> // precedence predicate evaluation.</li>
// <li>A non-{@code nil} {@link SemanticContext}: the new simplified // <li>A non-{@code nil} {@link SemanticContext}: the Newsimplified
// semantic context after precedence predicates are evaluated.</li> // semantic context after precedence predicates are evaluated.</li>
// </ul> // </ul>
// //
@ -58,7 +58,7 @@ SemanticContext.andContext = function(a, b) {
if (b == nil || b == SemanticContext.NONE) { if (b == nil || b == SemanticContext.NONE) {
return a return a
} }
var result = new AND(a, b) var result = NewAND(a, b)
if (result.opnds.length == 1) { if (result.opnds.length == 1) {
return result.opnds[0] return result.opnds[0]
} else { } else {
@ -76,7 +76,7 @@ SemanticContext.orContext = function(a, b) {
if (a == SemanticContext.NONE || b == SemanticContext.NONE) { if (a == SemanticContext.NONE || b == SemanticContext.NONE) {
return SemanticContext.NONE return SemanticContext.NONE
} }
var result = new OR(a, b) var result = NewOR(a, b)
if (result.opnds.length == 1) { if (result.opnds.length == 1) {
return result.opnds[0] return result.opnds[0]
} else { } else {
@ -98,7 +98,7 @@ Predicate.prototype.constructor = Predicate
//The default {@link SemanticContext}, which is semantically equivalent to //The default {@link SemanticContext}, which is semantically equivalent to
//a predicate of the form {@code {true}?}. //a predicate of the form {@code {true}?}.
// //
SemanticContext.NONE = new Predicate() SemanticContext.NONE = NewPredicate()
func (this *Predicate) evaluate(parser, outerContext) { func (this *Predicate) evaluate(parser, outerContext) {
@ -186,7 +186,7 @@ PrecedencePredicate.filterPrecedencePredicates = function(set) {
// //
func AND(a, b) { func AND(a, b) {
SemanticContext.call(this) SemanticContext.call(this)
var operands = new Set() var operands = NewSet()
if (a instanceof AND) { if (a instanceof AND) {
a.opnds.map(function(o) { a.opnds.map(function(o) {
operands.add(o) operands.add(o)
@ -291,7 +291,7 @@ func (this *AND) toString() {
// //
func OR(a, b) { func OR(a, b) {
SemanticContext.call(this) SemanticContext.call(this)
var operands = new Set() var operands = NewSet()
if (a instanceof OR) { if (a instanceof OR) {
a.opnds.map(function(o) { a.opnds.map(function(o) {
operands.add(o) operands.add(o)

View File

@ -82,7 +82,7 @@ AtomTransition.prototype = Object.create(Transition.prototype)
AtomTransition.prototype.constructor = AtomTransition AtomTransition.prototype.constructor = AtomTransition
func (this *AtomTransition) makeLabel() { func (this *AtomTransition) makeLabel() {
var s = new IntervalSet() var s = NewIntervalSet()
s.addOne(this.label_) s.addOne(this.label_)
return s return s
} }
@ -145,7 +145,7 @@ RangeTransition.prototype = Object.create(Transition.prototype)
RangeTransition.prototype.constructor = RangeTransition RangeTransition.prototype.constructor = RangeTransition
func (this *RangeTransition) makeLabel() { func (this *RangeTransition) makeLabel() {
var s = new IntervalSet() var s = NewIntervalSet()
s.addRange(this.start, this.stop) s.addRange(this.start, this.stop)
return s return s
} }
@ -184,7 +184,7 @@ func (this *PredicateTransition) matches(symbol, minVocabSymbol, maxVocabSymbol
} }
func (this *PredicateTransition) getPredicate() { func (this *PredicateTransition) getPredicate() {
return new Predicate(this.ruleIndex, this.predIndex, this.isCtxDependent) return NewPredicate(this.ruleIndex, this.predIndex, this.isCtxDependent)
} }
func (this *PredicateTransition) toString() { func (this *PredicateTransition) toString() {
@ -221,7 +221,7 @@ func SetTransition(target, set) {
if (set !=undefined && set !=nil) { if (set !=undefined && set !=nil) {
this.label = set this.label = set
} else { } else {
this.label = new IntervalSet() this.label = NewIntervalSet()
this.label.addOne(Token.INVALID_TYPE) this.label.addOne(Token.INVALID_TYPE)
} }
return this return this
@ -291,7 +291,7 @@ func (this *PrecedencePredicateTransition) matches(symbol, minVocabSymbol, maxV
} }
func (this *PrecedencePredicateTransition) getPredicate() { func (this *PrecedencePredicateTransition) getPredicate() {
return new PrecedencePredicate(this.precedence) return NewPrecedencePredicate(this.precedence)
} }
func (this *PrecedencePredicateTransition) toString() { func (this *PrecedencePredicateTransition) toString() {

View File

@ -24,7 +24,7 @@ func DFA(atnStartState, decision) {
this.decision = decision this.decision = decision
// A set of all DFA states. Use {@link Map} so we can get old state back // A set of all DFA states. Use {@link Map} so we can get old state back
// ({@link Set} only allows you to see if it's there). // ({@link Set} only allows you to see if it's there).
this._states = new DFAStatesSet() this._states = NewDFAStatesSet()
this.s0 = nil this.s0 = nil
// {@code true} if this DFA is for a precedence decision otherwise, // {@code true} if this DFA is for a precedence decision otherwise,
// {@code false}. This is the backing field for {@link //isPrecedenceDfa}, // {@code false}. This is the backing field for {@link //isPrecedenceDfa},
@ -95,9 +95,9 @@ func (this *DFA) setPrecedenceStartState(precedence, startState) {
func (this *DFA) setPrecedenceDfa(precedenceDfa) { func (this *DFA) setPrecedenceDfa(precedenceDfa) {
if (this.precedenceDfa!=precedenceDfa) { if (this.precedenceDfa!=precedenceDfa) {
this._states = new DFAStatesSet() this._states = NewDFAStatesSet()
if (precedenceDfa) { if (precedenceDfa) {
var precedenceState = new DFAState(new ATNConfigSet()) var precedenceState = NewDFAState(NewATNConfigSet())
precedenceState.edges = [] precedenceState.edges = []
precedenceState.isAcceptState = false precedenceState.isAcceptState = false
precedenceState.requiresFullContext = false precedenceState.requiresFullContext = false
@ -134,7 +134,7 @@ func (this *DFA) toString(literalNames, symbolicNames) {
if (this.s0 == nil) { if (this.s0 == nil) {
return "" return ""
} }
var serializer = new DFASerializer(this, literalNames, symbolicNames) var serializer = NewDFASerializer(this, literalNames, symbolicNames)
return serializer.toString() return serializer.toString()
} }
@ -142,7 +142,7 @@ func (this *DFA) toLexerString() {
if (this.s0 == nil) { if (this.s0 == nil) {
return "" return ""
} }
var serializer = new LexerDFASerializer(this) var serializer = NewLexerDFASerializer(this)
return serializer.toString() return serializer.toString()
} }

View File

@ -44,7 +44,7 @@ func DFAState(stateNumber, configs) {
stateNumber = -1 stateNumber = -1
} }
if (configs == nil) { if (configs == nil) {
configs = new ATNConfigSet() configs = NewATNConfigSet()
} }
this.stateNumber = stateNumber this.stateNumber = stateNumber
this.configs = configs this.configs = configs
@ -84,7 +84,7 @@ func DFAState(stateNumber, configs) {
// Get the set of all alts mentioned by all ATN configurations in this // Get the set of all alts mentioned by all ATN configurations in this
// DFA state. // DFA state.
func (this *DFAState) getAltSet() { func (this *DFAState) getAltSet() {
var alts = new Set() var alts = NewSet()
if (this.configs != nil) { if (this.configs != nil) {
for (var i = 0 i < this.configs.length i++) { for (var i = 0 i < this.configs.length i++) {
var c = this.configs[i] var c = this.configs[i]

View File

@ -44,7 +44,7 @@ func (this *DiagnosticErrorListener) reportAmbiguity(recognizer, dfa,
": ambigAlts=" + ": ambigAlts=" +
this.getConflictingAlts(ambigAlts, configs) + this.getConflictingAlts(ambigAlts, configs) +
", input='" + ", input='" +
recognizer.getTokenStream().getText(new Interval(startIndex, stopIndex)) + "'" recognizer.getTokenStream().getText(NewInterval(startIndex, stopIndex)) + "'"
recognizer.notifyErrorListeners(msg) recognizer.notifyErrorListeners(msg)
} }
@ -53,7 +53,7 @@ func (this *DiagnosticErrorListener) reportAttemptingFullContext(
var msg = "reportAttemptingFullContext d=" + var msg = "reportAttemptingFullContext d=" +
this.getDecisionDescription(recognizer, dfa) + this.getDecisionDescription(recognizer, dfa) +
", input='" + ", input='" +
recognizer.getTokenStream().getText(new Interval(startIndex, stopIndex)) + "'" recognizer.getTokenStream().getText(NewInterval(startIndex, stopIndex)) + "'"
recognizer.notifyErrorListeners(msg) recognizer.notifyErrorListeners(msg)
} }
@ -62,7 +62,7 @@ func (this *DiagnosticErrorListener) reportContextSensitivity(
var msg = "reportContextSensitivity d=" + var msg = "reportContextSensitivity d=" +
this.getDecisionDescription(recognizer, dfa) + this.getDecisionDescription(recognizer, dfa) +
", input='" + ", input='" +
recognizer.getTokenStream().getText(new Interval(startIndex, stopIndex)) + "'" recognizer.getTokenStream().getText(NewInterval(startIndex, stopIndex)) + "'"
recognizer.notifyErrorListeners(msg) recognizer.notifyErrorListeners(msg)
} }
@ -96,7 +96,7 @@ func (this *DiagnosticErrorListener) getConflictingAlts(reportedAlts, configs) {
if (reportedAlts != nil) { if (reportedAlts != nil) {
return reportedAlts return reportedAlts
} }
var result = new BitSet() var result = NewBitSet()
for (var i = 0 i < configs.items.length i++) { for (var i = 0 i < configs.items.length i++) {
result.add(configs.items[i].alt) result.add(configs.items[i].alt)
} }

View File

@ -31,7 +31,7 @@ ConsoleErrorListener.prototype.constructor = ConsoleErrorListener
// //
// Provides a default instance of {@link ConsoleErrorListener}. // Provides a default instance of {@link ConsoleErrorListener}.
// //
ConsoleErrorListener.INSTANCE = new ConsoleErrorListener() ConsoleErrorListener.INSTANCE = NewConsoleErrorListener()
// //
// {@inheritDoc} // {@inheritDoc}

View File

@ -234,13 +234,13 @@ func (this *DefaultErrorStrategy) sync(recognizer) {
if( this.singleTokenDeletion(recognizer) != nil) { if( this.singleTokenDeletion(recognizer) != nil) {
return return
} else { } else {
throw new InputMismatchException(recognizer) throw NewInputMismatchException(recognizer)
} }
break break
case ATNState.PLUS_LOOP_BACK: case ATNState.PLUS_LOOP_BACK:
case ATNState.STAR_LOOP_BACK: case ATNState.STAR_LOOP_BACK:
this.reportUnwantedToken(recognizer) this.reportUnwantedToken(recognizer)
var expecting = new IntervalSet() var expecting = NewIntervalSet()
expecting.addSet(recognizer.getExpectedTokens()) expecting.addSet(recognizer.getExpectedTokens())
var whatFollowsLoopIterationOrRule = expecting.addSet(this.getErrorRecoverySet(recognizer)) var whatFollowsLoopIterationOrRule = expecting.addSet(this.getErrorRecoverySet(recognizer))
this.consumeUntil(recognizer, whatFollowsLoopIterationOrRule) this.consumeUntil(recognizer, whatFollowsLoopIterationOrRule)
@ -265,7 +265,7 @@ func (this *DefaultErrorStrategy) reportNoViableAlternative(recognizer, e) {
if (e.startToken.type==Token.EOF) { if (e.startToken.type==Token.EOF) {
input = "<EOF>" input = "<EOF>"
} else { } else {
input = tokens.getText(new Interval(e.startToken, e.offendingToken)) input = tokens.getText(NewInterval(e.startToken, e.offendingToken))
} }
} else { } else {
input = "<unknown input>" input = "<unknown input>"
@ -424,7 +424,7 @@ func (this *DefaultErrorStrategy) recoverInline(recognizer) {
return this.getMissingSymbol(recognizer) return this.getMissingSymbol(recognizer)
} }
// even that didn't work must throw the exception // even that didn't work must throw the exception
throw new InputMismatchException(recognizer) throw NewInputMismatchException(recognizer)
} }
// //
@ -547,7 +547,7 @@ func (this *DefaultErrorStrategy) getExpectedTokens(recognizer) {
// to use t.toString() (which, for CommonToken, dumps everything about // to use t.toString() (which, for CommonToken, dumps everything about
// the token). This is better than forcing you to override a method in // the token). This is better than forcing you to override a method in
// your token objects because you don't have to go modify your lexer // your token objects because you don't have to go modify your lexer
// so that it creates a new Java type. // so that it creates a NewJava type.
// //
func (this *DefaultErrorStrategy) getTokenErrorDisplay(t) { func (this *DefaultErrorStrategy) getTokenErrorDisplay(t) {
if (t == nil) { if (t == nil) {
@ -666,7 +666,7 @@ func (this *DefaultErrorStrategy) escapeWSAndQuote(s) {
func (this *DefaultErrorStrategy) getErrorRecoverySet(recognizer) { func (this *DefaultErrorStrategy) getErrorRecoverySet(recognizer) {
var atn = recognizer._interp.atn var atn = recognizer._interp.atn
var ctx = recognizer._ctx var ctx = recognizer._ctx
var recoverSet = new IntervalSet() var recoverSet = NewIntervalSet()
while (ctx != nil && ctx.invokingState>=0) { while (ctx != nil && ctx.invokingState>=0) {
// compute what follows who invoked us // compute what follows who invoked us
var invokingState = atn.states[ctx.invokingState] var invokingState = atn.states[ctx.invokingState]
@ -712,7 +712,7 @@ func (this *DefaultErrorStrategy) consumeUntil(recognizer, set) {
// </ul> // </ul>
// //
// <p> // <p>
// {@code myparser.setErrorHandler(new BailErrorStrategy())}</p> // {@code myparser.setErrorHandler(NewBailErrorStrategy())}</p>
// //
// @see Parser//setErrorHandler(ANTLRErrorStrategy) // @see Parser//setErrorHandler(ANTLRErrorStrategy)
// //
@ -735,14 +735,14 @@ func (this *BailErrorStrategy) recover(recognizer, e) {
context.exception = e context.exception = e
context = context.parentCtx context = context.parentCtx
} }
throw new ParseCancellationException(e) throw NewParseCancellationException(e)
} }
// Make sure we don't attempt to recover inline if the parser // Make sure we don't attempt to recover inline if the parser
// successfully recovers, it won't throw an exception. // successfully recovers, it won't throw an exception.
// //
func (this *BailErrorStrategy) recoverInline(recognizer) { func (this *BailErrorStrategy) recoverInline(recognizer) {
this.recover(recognizer, new InputMismatchException(recognizer)) this.recover(recognizer, NewInputMismatchException(recognizer))
} }
// Make sure we don't attempt to recover from problems in subrules.// // Make sure we don't attempt to recover from problems in subrules.//

View File

@ -13,7 +13,7 @@ func RecognitionException(params) {
if (!!Error.captureStackTrace) { if (!!Error.captureStackTrace) {
Error.captureStackTrace(this, RecognitionException) Error.captureStackTrace(this, RecognitionException)
} else { } else {
var stack = new Error().stack var stack = NewError().stack
} }
this.message = params.message this.message = params.message
this.recognizer = params.recognizer this.recognizer = params.recognizer

View File

@ -6,7 +6,7 @@ package tree
var Token = require('./../Token').Token var Token = require('./../Token').Token
var Interval = require('./../IntervalSet').Interval var Interval = require('./../IntervalSet').Interval
var INVALID_INTERVAL = new Interval(-1, -2) var INVALID_INTERVAL = NewInterval(-1, -2)
var Utils = require('../Utils.js') var Utils = require('../Utils.js')
@ -128,7 +128,7 @@ func (this *TerminalNodeImpl) getSourceInterval() {
return INVALID_INTERVAL return INVALID_INTERVAL
} }
var tokenIndex = this.symbol.tokenIndex var tokenIndex = this.symbol.tokenIndex
return new Interval(tokenIndex, tokenIndex) return NewInterval(tokenIndex, tokenIndex)
} }
func (this *TerminalNodeImpl) getChildCount() { func (this *TerminalNodeImpl) getChildCount() {
@ -211,7 +211,7 @@ func (this *ParseTreeWalker) exitRule(listener, r) {
listener.exitEveryRule(ctx) listener.exitEveryRule(ctx)
} }
ParseTreeWalker.DEFAULT = new ParseTreeWalker() ParseTreeWalker.DEFAULT = NewParseTreeWalker()