Remove generics from all runtime code which did not use them to provide type safety
This commit is contained in:
parent
3470978749
commit
d220f90d3b
|
@ -43,7 +43,7 @@ public class BailErrorStrategy extends DefaultErrorStrategy {
|
|||
*/
|
||||
@Override
|
||||
public void recover(Parser recognizer, RecognitionException e) {
|
||||
for (ParserRuleContext<?> context = recognizer.getContext(); context != null; context = context.getParent()) {
|
||||
for (ParserRuleContext context = recognizer.getContext(); context != null; context = context.getParent()) {
|
||||
context.exception = e;
|
||||
}
|
||||
|
||||
|
@ -58,7 +58,7 @@ public class BailErrorStrategy extends DefaultErrorStrategy {
|
|||
throws RecognitionException
|
||||
{
|
||||
InputMismatchException e = new InputMismatchException(recognizer);
|
||||
for (ParserRuleContext<?> context = recognizer.getContext(); context != null; context = context.getParent()) {
|
||||
for (ParserRuleContext context = recognizer.getContext(); context != null; context = context.getParent()) {
|
||||
context.exception = e;
|
||||
}
|
||||
|
||||
|
|
|
@ -51,7 +51,7 @@ import java.util.Set;
|
|||
* This is not a subclass of UnbufferedTokenStream because I don't want
|
||||
* to confuse small moving window of tokens it uses for the full buffer.
|
||||
*/
|
||||
public class BufferedTokenStream<T extends Token> implements TokenStream {
|
||||
public class BufferedTokenStream implements TokenStream {
|
||||
protected TokenSource tokenSource;
|
||||
|
||||
/** Record every single token pulled from the source so we can reproduce
|
||||
|
@ -59,7 +59,7 @@ public class BufferedTokenStream<T extends Token> implements TokenStream {
|
|||
* as its moving window moves through the input. This list captures
|
||||
* everything so we can access complete input text.
|
||||
*/
|
||||
protected List<T> tokens = new ArrayList<T>(100);
|
||||
protected List<Token> tokens = new ArrayList<Token>(100);
|
||||
|
||||
/** Track the last mark() call result value for use in rewind(). */
|
||||
protected int lastMarker;
|
||||
|
@ -136,7 +136,7 @@ public class BufferedTokenStream<T extends Token> implements TokenStream {
|
|||
/** add n elements to buffer */
|
||||
protected void fetch(int n) {
|
||||
for (int i=1; i<=n; i++) {
|
||||
T t = (T)tokenSource.nextToken();
|
||||
Token t = tokenSource.nextToken();
|
||||
if ( t instanceof WritableToken ) {
|
||||
((WritableToken)t).setTokenIndex(tokens.size());
|
||||
}
|
||||
|
@ -146,7 +146,7 @@ public class BufferedTokenStream<T extends Token> implements TokenStream {
|
|||
}
|
||||
|
||||
@Override
|
||||
public T get(int i) {
|
||||
public Token get(int i) {
|
||||
if ( i < 0 || i >= tokens.size() ) {
|
||||
throw new IndexOutOfBoundsException("token index "+i+" out of range 0.."+(tokens.size()-1));
|
||||
}
|
||||
|
@ -154,13 +154,13 @@ public class BufferedTokenStream<T extends Token> implements TokenStream {
|
|||
}
|
||||
|
||||
/** Get all tokens from start..stop inclusively */
|
||||
public List<T> get(int start, int stop) {
|
||||
public List<Token> get(int start, int stop) {
|
||||
if ( start<0 || stop<0 ) return null;
|
||||
if ( p == -1 ) setup();
|
||||
List<T> subset = new ArrayList<T>();
|
||||
List<Token> subset = new ArrayList<Token>();
|
||||
if ( stop>=tokens.size() ) stop = tokens.size()-1;
|
||||
for (int i = start; i <= stop; i++) {
|
||||
T t = tokens.get(i);
|
||||
Token t = tokens.get(i);
|
||||
if ( t.getType()==Token.EOF ) break;
|
||||
subset.add(t);
|
||||
}
|
||||
|
@ -170,13 +170,13 @@ public class BufferedTokenStream<T extends Token> implements TokenStream {
|
|||
@Override
|
||||
public int LA(int i) { return LT(i).getType(); }
|
||||
|
||||
protected T LB(int k) {
|
||||
protected Token LB(int k) {
|
||||
if ( (p-k)<0 ) return null;
|
||||
return tokens.get(p-k);
|
||||
}
|
||||
|
||||
@Override
|
||||
public T LT(int k) {
|
||||
public Token LT(int k) {
|
||||
if ( p == -1 ) setup();
|
||||
if ( k==0 ) return null;
|
||||
if ( k < 0 ) return LB(-k);
|
||||
|
@ -200,9 +200,9 @@ public class BufferedTokenStream<T extends Token> implements TokenStream {
|
|||
p = -1;
|
||||
}
|
||||
|
||||
public List<T> getTokens() { return tokens; }
|
||||
public List<Token> getTokens() { return tokens; }
|
||||
|
||||
public List<T> getTokens(int start, int stop) {
|
||||
public List<Token> getTokens(int start, int stop) {
|
||||
return getTokens(start, stop, null);
|
||||
}
|
||||
|
||||
|
@ -210,7 +210,7 @@ public class BufferedTokenStream<T extends Token> implements TokenStream {
|
|||
* the token type BitSet. Return null if no tokens were found. This
|
||||
* method looks at both on and off channel tokens.
|
||||
*/
|
||||
public List<T> getTokens(int start, int stop, Set<Integer> types) {
|
||||
public List<Token> getTokens(int start, int stop, Set<Integer> types) {
|
||||
if ( p == -1 ) setup();
|
||||
if ( start<0 || stop>=tokens.size() ||
|
||||
stop<0 || start>=tokens.size() )
|
||||
|
@ -221,9 +221,9 @@ public class BufferedTokenStream<T extends Token> implements TokenStream {
|
|||
if ( start>stop ) return null;
|
||||
|
||||
// list = tokens[start:stop]:{T t, t.getType() in types}
|
||||
List<T> filteredTokens = new ArrayList<T>();
|
||||
List<Token> filteredTokens = new ArrayList<Token>();
|
||||
for (int i=start; i<=stop; i++) {
|
||||
T t = tokens.get(i);
|
||||
Token t = tokens.get(i);
|
||||
if ( types==null || types.contains(t.getType()) ) {
|
||||
filteredTokens.add(t);
|
||||
}
|
||||
|
@ -234,7 +234,7 @@ public class BufferedTokenStream<T extends Token> implements TokenStream {
|
|||
return filteredTokens;
|
||||
}
|
||||
|
||||
public List<T> getTokens(int start, int stop, int ttype) {
|
||||
public List<Token> getTokens(int start, int stop, int ttype) {
|
||||
HashSet<Integer> s = new HashSet<Integer>(ttype);
|
||||
s.add(ttype);
|
||||
return getTokens(start,stop, s);
|
||||
|
@ -272,7 +272,7 @@ public class BufferedTokenStream<T extends Token> implements TokenStream {
|
|||
* the current token up until we see a token on DEFAULT_TOKEN_CHANNEL or
|
||||
* EOF. If channel is -1, find any non default channel token.
|
||||
*/
|
||||
public List<T> getHiddenTokensToRight(int tokenIndex, int channel) {
|
||||
public List<Token> getHiddenTokensToRight(int tokenIndex, int channel) {
|
||||
if ( p == -1 ) setup();
|
||||
if ( tokenIndex<0 || tokenIndex>=tokens.size() ) {
|
||||
throw new IndexOutOfBoundsException(tokenIndex+" not in 0.."+(tokens.size()-1));
|
||||
|
@ -293,7 +293,7 @@ public class BufferedTokenStream<T extends Token> implements TokenStream {
|
|||
* the current token up until we see a token on DEFAULT_TOKEN_CHANNEL
|
||||
* of EOF.
|
||||
*/
|
||||
public List<T> getHiddenTokensToRight(int tokenIndex) {
|
||||
public List<Token> getHiddenTokensToRight(int tokenIndex) {
|
||||
return getHiddenTokensToRight(tokenIndex, -1);
|
||||
}
|
||||
|
||||
|
@ -301,7 +301,7 @@ public class BufferedTokenStream<T extends Token> implements TokenStream {
|
|||
* the current token up until we see a token on DEFAULT_TOKEN_CHANNEL.
|
||||
* If channel is -1, find any non default channel token.
|
||||
*/
|
||||
public List<T> getHiddenTokensToLeft(int tokenIndex, int channel) {
|
||||
public List<Token> getHiddenTokensToLeft(int tokenIndex, int channel) {
|
||||
if ( p == -1 ) setup();
|
||||
if ( tokenIndex<0 || tokenIndex>=tokens.size() ) {
|
||||
throw new IndexOutOfBoundsException(tokenIndex+" not in 0.."+(tokens.size()-1));
|
||||
|
@ -320,14 +320,14 @@ public class BufferedTokenStream<T extends Token> implements TokenStream {
|
|||
/** Collect all hidden tokens (any off-default channel) to the left of
|
||||
* the current token up until we see a token on DEFAULT_TOKEN_CHANNEL.
|
||||
*/
|
||||
public List<T> getHiddenTokensToLeft(int tokenIndex) {
|
||||
public List<Token> getHiddenTokensToLeft(int tokenIndex) {
|
||||
return getHiddenTokensToLeft(tokenIndex, -1);
|
||||
}
|
||||
|
||||
protected List<T> filterForChannel(int from, int to, int channel) {
|
||||
List<T> hidden = new ArrayList<T>();
|
||||
protected List<Token> filterForChannel(int from, int to, int channel) {
|
||||
List<Token> hidden = new ArrayList<Token>();
|
||||
for (int i=from; i<=to; i++) {
|
||||
T t = tokens.get(i);
|
||||
Token t = tokens.get(i);
|
||||
if ( channel==-1 ) {
|
||||
if ( t.getChannel()!= Lexer.DEFAULT_TOKEN_CHANNEL ) hidden.add(t);
|
||||
}
|
||||
|
@ -362,7 +362,7 @@ public class BufferedTokenStream<T extends Token> implements TokenStream {
|
|||
|
||||
StringBuilder buf = new StringBuilder();
|
||||
for (int i = start; i <= stop; i++) {
|
||||
T t = tokens.get(i);
|
||||
Token t = tokens.get(i);
|
||||
if ( t.getType()==Token.EOF ) break;
|
||||
buf.append(t.getText());
|
||||
}
|
||||
|
|
|
@ -46,7 +46,7 @@ package org.antlr.v4.runtime;
|
|||
* @see UnbufferedTokenStream
|
||||
* @see BufferedTokenStream
|
||||
*/
|
||||
public class CommonTokenStream extends BufferedTokenStream<Token> {
|
||||
public class CommonTokenStream extends BufferedTokenStream {
|
||||
/** Skip tokens on any channel but this one; this is how we skip whitespace... */
|
||||
protected int channel = Token.DEFAULT_CHANNEL;
|
||||
|
||||
|
|
|
@ -64,7 +64,7 @@ public class NoViableAltException extends RecognitionException {
|
|||
@NotNull Token startToken,
|
||||
@NotNull Token offendingToken,
|
||||
@Nullable ATNConfigSet deadEndConfigs,
|
||||
@NotNull ParserRuleContext<?> ctx)
|
||||
@NotNull ParserRuleContext ctx)
|
||||
{
|
||||
super(recognizer, input, ctx);
|
||||
this.deadEndConfigs = deadEndConfigs;
|
||||
|
|
|
@ -46,45 +46,45 @@ import java.util.List;
|
|||
|
||||
/** This is all the parsing support code essentially; most of it is error recovery stuff. */
|
||||
public abstract class Parser extends Recognizer<Token, ParserATNSimulator> {
|
||||
public class TraceListener implements ParseTreeListener<Token> {
|
||||
public class TraceListener implements ParseTreeListener {
|
||||
@Override
|
||||
public void enterEveryRule(ParserRuleContext<Token> ctx) {
|
||||
public void enterEveryRule(ParserRuleContext ctx) {
|
||||
System.out.println("enter " + getRuleNames()[ctx.getRuleIndex()] +
|
||||
", LT(1)=" + _input.LT(1).getText());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void visitTerminal(TerminalNode<Token> node) {
|
||||
public void visitTerminal(TerminalNode node) {
|
||||
System.out.println("consume "+node.getSymbol()+" rule "+
|
||||
getRuleNames()[_ctx.getRuleIndex()]+
|
||||
" alt="+_ctx.altNum);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void visitErrorNode(ErrorNode<Token> node) {
|
||||
public void visitErrorNode(ErrorNode node) {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void exitEveryRule(ParserRuleContext<Token> ctx) {
|
||||
public void exitEveryRule(ParserRuleContext ctx) {
|
||||
System.out.println("exit "+getRuleNames()[ctx.getRuleIndex()]+
|
||||
", LT(1)="+_input.LT(1).getText());
|
||||
}
|
||||
}
|
||||
|
||||
public static class TrimToSizeListener implements ParseTreeListener<Token> {
|
||||
public static class TrimToSizeListener implements ParseTreeListener {
|
||||
public static final TrimToSizeListener INSTANCE = new TrimToSizeListener();
|
||||
|
||||
@Override
|
||||
public void enterEveryRule(ParserRuleContext<Token> ctx) { }
|
||||
public void enterEveryRule(ParserRuleContext ctx) { }
|
||||
|
||||
@Override
|
||||
public void visitTerminal(TerminalNode<Token> node) { }
|
||||
public void visitTerminal(TerminalNode node) { }
|
||||
|
||||
@Override
|
||||
public void visitErrorNode(ErrorNode<Token> node) { }
|
||||
public void visitErrorNode(ErrorNode node) { }
|
||||
|
||||
@Override
|
||||
public void exitEveryRule(ParserRuleContext<Token> ctx) {
|
||||
public void exitEveryRule(ParserRuleContext ctx) {
|
||||
if (ctx.children instanceof ArrayList) {
|
||||
((ArrayList<?>)ctx.children).trimToSize();
|
||||
}
|
||||
|
@ -100,7 +100,7 @@ public abstract class Parser extends Recognizer<Token, ParserATNSimulator> {
|
|||
* When somebody calls the start rule, this gets set to the
|
||||
* root context.
|
||||
*/
|
||||
protected ParserRuleContext<Token> _ctx;
|
||||
protected ParserRuleContext _ctx;
|
||||
|
||||
protected boolean _buildParseTrees = true;
|
||||
|
||||
|
@ -114,7 +114,7 @@ public abstract class Parser extends Recognizer<Token, ParserATNSimulator> {
|
|||
* ParseTreeListener with ParseTreeWalker.
|
||||
* @see ParseTreeWalker
|
||||
*/
|
||||
protected List<ParseTreeListener<Token>> _parseListeners;
|
||||
protected List<ParseTreeListener> _parseListeners;
|
||||
|
||||
/** Did the recognizer encounter a syntax error? Track how many. */
|
||||
protected int _syntaxErrors = 0;
|
||||
|
@ -237,7 +237,7 @@ public abstract class Parser extends Recognizer<Token, ParserATNSimulator> {
|
|||
// return traceATNStates;
|
||||
// }
|
||||
|
||||
public List<ParseTreeListener<Token>> getParseListeners() {
|
||||
public List<ParseTreeListener> getParseListeners() {
|
||||
return _parseListeners;
|
||||
}
|
||||
|
||||
|
@ -250,15 +250,15 @@ public abstract class Parser extends Recognizer<Token, ParserATNSimulator> {
|
|||
* ParseTreeListener to a ParseTreeWalker instead of giving it to
|
||||
* the parser!!!!
|
||||
*/
|
||||
public void addParseListener(ParseTreeListener<Token> listener) {
|
||||
public void addParseListener(ParseTreeListener listener) {
|
||||
if ( listener==null ) return;
|
||||
if ( _parseListeners==null ) {
|
||||
_parseListeners = new ArrayList<ParseTreeListener<Token>>();
|
||||
_parseListeners = new ArrayList<ParseTreeListener>();
|
||||
}
|
||||
this._parseListeners.add(listener);
|
||||
}
|
||||
|
||||
public void removeParseListener(ParseTreeListener<Token> l) {
|
||||
public void removeParseListener(ParseTreeListener l) {
|
||||
if ( l==null ) return;
|
||||
if ( _parseListeners!=null ) {
|
||||
_parseListeners.remove(l);
|
||||
|
@ -278,7 +278,7 @@ public abstract class Parser extends Recognizer<Token, ParserATNSimulator> {
|
|||
* ParseTreeListener interface. This is not for the average user.
|
||||
*/
|
||||
public void triggerEnterRuleEvent() {
|
||||
for (ParseTreeListener<Token> l : _parseListeners) {
|
||||
for (ParseTreeListener l : _parseListeners) {
|
||||
l.enterEveryRule(_ctx);
|
||||
_ctx.enterRule(l);
|
||||
}
|
||||
|
@ -292,7 +292,7 @@ public abstract class Parser extends Recognizer<Token, ParserATNSimulator> {
|
|||
public void triggerExitRuleEvent() {
|
||||
// reverse order walk of listeners
|
||||
for (int i = _parseListeners.size()-1; i >= 0; i--) {
|
||||
ParseTreeListener<Token> l = _parseListeners.get(i);
|
||||
ParseTreeListener l = _parseListeners.get(i);
|
||||
_ctx.exitRule(l);
|
||||
l.exitEveryRule(_ctx);
|
||||
}
|
||||
|
@ -385,17 +385,17 @@ public abstract class Parser extends Recognizer<Token, ParserATNSimulator> {
|
|||
boolean hasListener = _parseListeners != null && !_parseListeners.isEmpty();
|
||||
if (_buildParseTrees || hasListener) {
|
||||
if ( _errHandler.inErrorRecoveryMode(this) ) {
|
||||
ErrorNode<Token> node = _ctx.addErrorNode(o);
|
||||
ErrorNode node = _ctx.addErrorNode(o);
|
||||
if (_parseListeners != null) {
|
||||
for (ParseTreeListener<Token> listener : _parseListeners) {
|
||||
for (ParseTreeListener listener : _parseListeners) {
|
||||
listener.visitErrorNode(node);
|
||||
}
|
||||
}
|
||||
}
|
||||
else {
|
||||
TerminalNode<Token> node = _ctx.addChild(o);
|
||||
TerminalNode node = _ctx.addChild(o);
|
||||
if (_parseListeners != null) {
|
||||
for (ParseTreeListener<Token> listener : _parseListeners) {
|
||||
for (ParseTreeListener listener : _parseListeners) {
|
||||
listener.visitTerminal(node);
|
||||
}
|
||||
}
|
||||
|
@ -405,7 +405,7 @@ public abstract class Parser extends Recognizer<Token, ParserATNSimulator> {
|
|||
}
|
||||
|
||||
protected void addContextToParseTree() {
|
||||
ParserRuleContext<?> parent = (ParserRuleContext<?>)_ctx.parent;
|
||||
ParserRuleContext parent = (ParserRuleContext)_ctx.parent;
|
||||
// add current context to parent if we have a parent
|
||||
if ( parent!=null ) {
|
||||
parent.addChild(_ctx);
|
||||
|
@ -419,7 +419,7 @@ public abstract class Parser extends Recognizer<Token, ParserATNSimulator> {
|
|||
* This is flexible because users do not have to regenerate parsers
|
||||
* to get trace facilities.
|
||||
*/
|
||||
public void enterRule(ParserRuleContext<Token> localctx, int ruleIndex) {
|
||||
public void enterRule(ParserRuleContext localctx, int ruleIndex) {
|
||||
_ctx = localctx;
|
||||
_ctx.start = _input.LT(1);
|
||||
if (_buildParseTrees) addContextToParseTree();
|
||||
|
@ -430,14 +430,14 @@ public abstract class Parser extends Recognizer<Token, ParserATNSimulator> {
|
|||
_ctx.stop = _input.LT(-1);
|
||||
// trigger event on _ctx, before it reverts to parent
|
||||
if ( _parseListeners != null) triggerExitRuleEvent();
|
||||
_ctx = (ParserRuleContext<Token>)_ctx.parent;
|
||||
_ctx = (ParserRuleContext)_ctx.parent;
|
||||
}
|
||||
|
||||
public void enterOuterAlt(ParserRuleContext<Token> localctx, int altNum) {
|
||||
public void enterOuterAlt(ParserRuleContext localctx, int altNum) {
|
||||
// if we have new localctx, make sure we replace existing ctx
|
||||
// that is previous child of parse tree
|
||||
if ( _buildParseTrees && _ctx != localctx ) {
|
||||
ParserRuleContext<?> parent = (ParserRuleContext<?>)_ctx.parent;
|
||||
ParserRuleContext parent = (ParserRuleContext)_ctx.parent;
|
||||
if ( parent!=null ) {
|
||||
parent.removeLastChild();
|
||||
parent.addChild(localctx);
|
||||
|
@ -447,7 +447,7 @@ public abstract class Parser extends Recognizer<Token, ParserATNSimulator> {
|
|||
_ctx.altNum = altNum;
|
||||
}
|
||||
|
||||
public void enterRecursionRule(ParserRuleContext<Token> localctx, int ruleIndex) {
|
||||
public void enterRecursionRule(ParserRuleContext localctx, int ruleIndex) {
|
||||
_ctx = localctx;
|
||||
_ctx.start = _input.LT(1);
|
||||
if (_parseListeners != null) {
|
||||
|
@ -456,8 +456,8 @@ public abstract class Parser extends Recognizer<Token, ParserATNSimulator> {
|
|||
}
|
||||
|
||||
/* like enterRule but for recursive rules */
|
||||
public void pushNewRecursionContext(ParserRuleContext<Token> localctx, int state, int ruleIndex) {
|
||||
ParserRuleContext<Token> previous = _ctx;
|
||||
public void pushNewRecursionContext(ParserRuleContext localctx, int state, int ruleIndex) {
|
||||
ParserRuleContext previous = _ctx;
|
||||
previous.parent = localctx;
|
||||
previous.invokingState = state;
|
||||
previous.stop = _input.LT(-1);
|
||||
|
@ -473,15 +473,15 @@ public abstract class Parser extends Recognizer<Token, ParserATNSimulator> {
|
|||
}
|
||||
}
|
||||
|
||||
public void unrollRecursionContexts(ParserRuleContext<Token> _parentctx) {
|
||||
public void unrollRecursionContexts(ParserRuleContext _parentctx) {
|
||||
_ctx.stop = _input.LT(-1);
|
||||
ParserRuleContext<Token> retctx = _ctx; // save current ctx (return value)
|
||||
ParserRuleContext retctx = _ctx; // save current ctx (return value)
|
||||
|
||||
// unroll so _ctx is as it was before call to recursive method
|
||||
if ( _parseListeners != null ) {
|
||||
while ( _ctx != _parentctx ) {
|
||||
triggerExitRuleEvent();
|
||||
_ctx = (ParserRuleContext<Token>)_ctx.parent;
|
||||
_ctx = (ParserRuleContext)_ctx.parent;
|
||||
}
|
||||
}
|
||||
else {
|
||||
|
@ -492,16 +492,16 @@ public abstract class Parser extends Recognizer<Token, ParserATNSimulator> {
|
|||
if (_buildParseTrees) _parentctx.addChild(retctx); // add return ctx into invoking rule's tree
|
||||
}
|
||||
|
||||
public ParserRuleContext<Token> getInvokingContext(int ruleIndex) {
|
||||
ParserRuleContext<Token> p = _ctx;
|
||||
public ParserRuleContext getInvokingContext(int ruleIndex) {
|
||||
ParserRuleContext p = _ctx;
|
||||
while ( p!=null ) {
|
||||
if ( p.getRuleIndex() == ruleIndex ) return p;
|
||||
p = (ParserRuleContext<Token>)p.parent;
|
||||
p = (ParserRuleContext)p.parent;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
public ParserRuleContext<Token> getContext() {
|
||||
public ParserRuleContext getContext() {
|
||||
return _ctx;
|
||||
}
|
||||
|
||||
|
@ -513,7 +513,7 @@ public abstract class Parser extends Recognizer<Token, ParserATNSimulator> {
|
|||
public boolean isExpectedToken(int symbol) {
|
||||
// return getInterpreter().atn.nextTokens(_ctx);
|
||||
ATN atn = getInterpreter().atn;
|
||||
ParserRuleContext<?> ctx = _ctx;
|
||||
ParserRuleContext ctx = _ctx;
|
||||
ATNState s = atn.states.get(ctx.s);
|
||||
IntervalSet following = atn.nextTokens(s);
|
||||
if (following.contains(symbol)) {
|
||||
|
@ -530,7 +530,7 @@ public abstract class Parser extends Recognizer<Token, ParserATNSimulator> {
|
|||
return true;
|
||||
}
|
||||
|
||||
ctx = (ParserRuleContext<?>)ctx.parent;
|
||||
ctx = (ParserRuleContext)ctx.parent;
|
||||
}
|
||||
|
||||
if ( following.contains(Token.EPSILON) && symbol == Token.EOF ) {
|
||||
|
@ -545,7 +545,7 @@ public abstract class Parser extends Recognizer<Token, ParserATNSimulator> {
|
|||
*/
|
||||
public IntervalSet getExpectedTokens() {
|
||||
ATN atn = getInterpreter().atn;
|
||||
ParserRuleContext<?> ctx = _ctx;
|
||||
ParserRuleContext ctx = _ctx;
|
||||
ATNState s = atn.states.get(ctx.s);
|
||||
IntervalSet following = atn.nextTokens(s);
|
||||
// System.out.println("following "+s+"="+following);
|
||||
|
@ -559,7 +559,7 @@ public abstract class Parser extends Recognizer<Token, ParserATNSimulator> {
|
|||
following = atn.nextTokens(rt.followState);
|
||||
expected.addAll(following);
|
||||
expected.remove(Token.EPSILON);
|
||||
ctx = (ParserRuleContext<?>)ctx.parent;
|
||||
ctx = (ParserRuleContext)ctx.parent;
|
||||
}
|
||||
if ( following.contains(Token.EPSILON) ) {
|
||||
expected.add(Token.EOF);
|
||||
|
@ -583,7 +583,7 @@ public abstract class Parser extends Recognizer<Token, ParserATNSimulator> {
|
|||
// return atn.nextTokens(s, ctx);
|
||||
// }
|
||||
|
||||
public ParserRuleContext<Token> getRuleContext() { return _ctx; }
|
||||
public ParserRuleContext getRuleContext() { return _ctx; }
|
||||
|
||||
/** Return List<String> of the rule names in your parser instance
|
||||
* leading up to a call to the current rule. You could override if
|
||||
|
|
|
@ -64,7 +64,7 @@ import java.util.List;
|
|||
* group values such as this aggregate. The getters/setters are there to
|
||||
* satisfy the superclass interface.
|
||||
*/
|
||||
public class ParserRuleContext<Symbol extends Token> extends RuleContext {
|
||||
public class ParserRuleContext extends RuleContext {
|
||||
/** If we are debugging or building a parse tree for a visitor,
|
||||
* we need to track all of the tokens and rule invocations associated
|
||||
* with this rule's context. This is empty for parsing w/o tree constr.
|
||||
|
@ -100,7 +100,7 @@ public class ParserRuleContext<Symbol extends Token> extends RuleContext {
|
|||
*/
|
||||
public int s = -1;
|
||||
|
||||
public Symbol start, stop;
|
||||
public Token start, stop;
|
||||
|
||||
/** Set during parsing to identify which alt of rule parser is in. */
|
||||
public int altNum;
|
||||
|
@ -114,7 +114,7 @@ public class ParserRuleContext<Symbol extends Token> extends RuleContext {
|
|||
public ParserRuleContext() { }
|
||||
|
||||
/** COPY a ctx (I'm deliberately not using copy constructor) */
|
||||
public void copyFrom(ParserRuleContext<Symbol> ctx) {
|
||||
public void copyFrom(ParserRuleContext ctx) {
|
||||
// from RuleContext
|
||||
this.parent = ctx.parent;
|
||||
this.s = ctx.s;
|
||||
|
@ -124,22 +124,22 @@ public class ParserRuleContext<Symbol extends Token> extends RuleContext {
|
|||
this.stop = ctx.stop;
|
||||
}
|
||||
|
||||
public ParserRuleContext(@Nullable ParserRuleContext<Symbol> parent, int invokingStateNumber, int stateNumber) {
|
||||
public ParserRuleContext(@Nullable ParserRuleContext parent, int invokingStateNumber, int stateNumber) {
|
||||
super(parent, invokingStateNumber);
|
||||
this.s = stateNumber;
|
||||
}
|
||||
|
||||
public ParserRuleContext(@Nullable ParserRuleContext<Symbol> parent, int stateNumber) {
|
||||
public ParserRuleContext(@Nullable ParserRuleContext parent, int stateNumber) {
|
||||
this(parent, parent!=null ? parent.s : -1 /* invoking state */, stateNumber);
|
||||
}
|
||||
|
||||
// Double dispatch methods for listeners
|
||||
|
||||
public void enterRule(ParseTreeListener<Symbol> listener) { }
|
||||
public void exitRule(ParseTreeListener<Symbol> listener) { }
|
||||
public void enterRule(ParseTreeListener listener) { }
|
||||
public void exitRule(ParseTreeListener listener) { }
|
||||
|
||||
/** Does not set parent link; other add methods do that */
|
||||
public TerminalNode addChild(TerminalNode<Symbol> t) {
|
||||
public TerminalNode addChild(TerminalNode t) {
|
||||
if ( children==null ) children = new ArrayList<ParseTree>();
|
||||
children.add(t);
|
||||
return t;
|
||||
|
@ -166,15 +166,15 @@ public class ParserRuleContext<Symbol extends Token> extends RuleContext {
|
|||
// states.add(s);
|
||||
// }
|
||||
|
||||
public TerminalNode addChild(Symbol matchedToken) {
|
||||
TerminalNodeImpl<Symbol> t = new TerminalNodeImpl<Symbol>(matchedToken);
|
||||
public TerminalNode addChild(Token matchedToken) {
|
||||
TerminalNodeImpl t = new TerminalNodeImpl(matchedToken);
|
||||
addChild(t);
|
||||
t.parent = this;
|
||||
return t;
|
||||
}
|
||||
|
||||
public ErrorNode<Symbol> addErrorNode(Symbol badToken) {
|
||||
ErrorNodeImpl<Symbol> t = new ErrorNodeImpl<Symbol>(badToken);
|
||||
public ErrorNode addErrorNode(Token badToken) {
|
||||
ErrorNodeImpl t = new ErrorNodeImpl(badToken);
|
||||
addChild(t);
|
||||
t.parent = this;
|
||||
return t;
|
||||
|
@ -182,8 +182,8 @@ public class ParserRuleContext<Symbol extends Token> extends RuleContext {
|
|||
|
||||
@Override
|
||||
/** Override to make type more specific */
|
||||
public ParserRuleContext<Symbol> getParent() {
|
||||
return (ParserRuleContext<Symbol>)super.getParent();
|
||||
public ParserRuleContext getParent() {
|
||||
return (ParserRuleContext)super.getParent();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -208,16 +208,15 @@ public class ParserRuleContext<Symbol extends Token> extends RuleContext {
|
|||
return null;
|
||||
}
|
||||
|
||||
@SuppressWarnings("checked")
|
||||
public TerminalNode<Symbol> getToken(int ttype, int i) {
|
||||
public TerminalNode getToken(int ttype, int i) {
|
||||
if ( children==null || i < 0 || i >= children.size() ) {
|
||||
return null;
|
||||
}
|
||||
|
||||
int j = -1; // what token with ttype have we found?
|
||||
for (ParseTree o : children) {
|
||||
if ( o instanceof TerminalNode<?> ) {
|
||||
TerminalNode<Symbol> tnode = (TerminalNode<Symbol>)o;
|
||||
if ( o instanceof TerminalNode ) {
|
||||
TerminalNode tnode = (TerminalNode)o;
|
||||
Token symbol = tnode.getSymbol();
|
||||
if ( symbol.getType()==ttype ) {
|
||||
j++;
|
||||
|
@ -231,20 +230,19 @@ public class ParserRuleContext<Symbol extends Token> extends RuleContext {
|
|||
return null;
|
||||
}
|
||||
|
||||
@SuppressWarnings("checked")
|
||||
public List<TerminalNode<Symbol>> getTokens(int ttype) {
|
||||
public List<TerminalNode> getTokens(int ttype) {
|
||||
if ( children==null ) {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
List<TerminalNode<Symbol>> tokens = null;
|
||||
List<TerminalNode> tokens = null;
|
||||
for (ParseTree o : children) {
|
||||
if ( o instanceof TerminalNode<?> ) {
|
||||
TerminalNode<Symbol> tnode = (TerminalNode<Symbol>)o;
|
||||
if ( o instanceof TerminalNode ) {
|
||||
TerminalNode tnode = (TerminalNode)o;
|
||||
Token symbol = tnode.getSymbol();
|
||||
if ( symbol.getType()==ttype ) {
|
||||
if ( tokens==null ) {
|
||||
tokens = new ArrayList<TerminalNode<Symbol>>();
|
||||
tokens = new ArrayList<TerminalNode>();
|
||||
}
|
||||
tokens.add(tnode);
|
||||
}
|
||||
|
@ -258,11 +256,11 @@ public class ParserRuleContext<Symbol extends Token> extends RuleContext {
|
|||
return tokens;
|
||||
}
|
||||
|
||||
public <T extends ParserRuleContext<?>> T getRuleContext(Class<? extends T> ctxType, int i) {
|
||||
public <T extends ParserRuleContext> T getRuleContext(Class<? extends T> ctxType, int i) {
|
||||
return getChild(ctxType, i);
|
||||
}
|
||||
|
||||
public <T extends ParserRuleContext<?>> List<? extends T> getRuleContexts(Class<? extends T> ctxType) {
|
||||
public <T extends ParserRuleContext> List<? extends T> getRuleContexts(Class<? extends T> ctxType) {
|
||||
if ( children==null ) {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
@ -294,8 +292,8 @@ public class ParserRuleContext<Symbol extends Token> extends RuleContext {
|
|||
return Interval.of(start.getTokenIndex(), stop.getTokenIndex());
|
||||
}
|
||||
|
||||
public Symbol getStart() { return start; }
|
||||
public Symbol getStop() { return stop; }
|
||||
public Token getStart() { return start; }
|
||||
public Token getStop() { return stop; }
|
||||
|
||||
/** Used for rule context info debugging during parse-time, not so much for ATN debugging */
|
||||
public String toInfoString(Parser recognizer) {
|
||||
|
|
|
@ -62,7 +62,7 @@ import java.util.List;
|
|||
* @see ParserRuleContext
|
||||
*/
|
||||
public class RuleContext implements RuleNode {
|
||||
public static final ParserRuleContext<Token> EMPTY = new ParserRuleContext<Token>();
|
||||
public static final ParserRuleContext EMPTY = new ParserRuleContext();
|
||||
|
||||
/** What context invoked this rule? */
|
||||
public RuleContext parent;
|
||||
|
|
|
@ -285,7 +285,7 @@ public class ParserATNSimulator extends ATNSimulator {
|
|||
// LAME globals to avoid parameters!!!!! I need these down deep in predTransition
|
||||
protected TokenStream _input;
|
||||
protected int _startIndex;
|
||||
protected ParserRuleContext<?> _outerContext;
|
||||
protected ParserRuleContext _outerContext;
|
||||
|
||||
/** Testing only! */
|
||||
public ParserATNSimulator(@NotNull ATN atn, @NotNull DFA[] decisionToDFA,
|
||||
|
@ -311,7 +311,7 @@ public class ParserATNSimulator extends ATNSimulator {
|
|||
}
|
||||
|
||||
public int adaptivePredict(@NotNull TokenStream input, int decision,
|
||||
@Nullable ParserRuleContext<?> outerContext)
|
||||
@Nullable ParserRuleContext outerContext)
|
||||
{
|
||||
if ( debug || debug_list_atn_decisions ) {
|
||||
System.out.println("adaptivePredict decision "+decision+
|
||||
|
@ -364,7 +364,7 @@ public class ParserATNSimulator extends ATNSimulator {
|
|||
}
|
||||
|
||||
public int predictATN(@NotNull DFA dfa, @NotNull TokenStream input,
|
||||
@Nullable ParserRuleContext<?> outerContext)
|
||||
@Nullable ParserRuleContext outerContext)
|
||||
{
|
||||
// caller must have write lock on dfa
|
||||
if ( outerContext==null ) outerContext = ParserRuleContext.EMPTY;
|
||||
|
@ -400,7 +400,7 @@ public class ParserATNSimulator extends ATNSimulator {
|
|||
|
||||
public int execDFA(@NotNull DFA dfa, @NotNull DFAState s0,
|
||||
@NotNull TokenStream input, int startIndex,
|
||||
@Nullable ParserRuleContext<?> outerContext)
|
||||
@Nullable ParserRuleContext outerContext)
|
||||
{
|
||||
// caller must have read lock on dfa
|
||||
if ( outerContext==null ) outerContext = ParserRuleContext.EMPTY;
|
||||
|
@ -545,7 +545,7 @@ public class ParserATNSimulator extends ATNSimulator {
|
|||
*/
|
||||
public int execATN(@NotNull DFA dfa, @NotNull DFAState s0,
|
||||
@NotNull TokenStream input, int startIndex,
|
||||
ParserRuleContext<?> outerContext)
|
||||
ParserRuleContext outerContext)
|
||||
{
|
||||
// caller is expected to have write lock on dfa
|
||||
if ( debug || debug_list_atn_decisions) {
|
||||
|
@ -724,7 +724,7 @@ public class ParserATNSimulator extends ATNSimulator {
|
|||
DFAState D, // how far we got before failing over
|
||||
@NotNull ATNConfigSet s0,
|
||||
@NotNull TokenStream input, int startIndex,
|
||||
ParserRuleContext<?> outerContext,
|
||||
ParserRuleContext outerContext,
|
||||
int SLL_min_alt) // todo: is this in D as min ambig alts?
|
||||
{
|
||||
// caller must have write lock on dfa
|
||||
|
@ -1005,7 +1005,7 @@ public class ParserATNSimulator extends ATNSimulator {
|
|||
* includes pairs with null predicates.
|
||||
*/
|
||||
public BitSet evalSemanticContext(List<DFAState.PredPrediction> predPredictions,
|
||||
ParserRuleContext<?> outerContext,
|
||||
ParserRuleContext outerContext,
|
||||
boolean complete)
|
||||
{
|
||||
BitSet predictions = new BitSet();
|
||||
|
@ -1355,7 +1355,7 @@ public class ParserATNSimulator extends ATNSimulator {
|
|||
|
||||
@NotNull
|
||||
public NoViableAltException noViableAlt(@NotNull TokenStream input,
|
||||
@NotNull ParserRuleContext<?> outerContext,
|
||||
@NotNull ParserRuleContext outerContext,
|
||||
@NotNull ATNConfigSet configs,
|
||||
int startIndex)
|
||||
{
|
||||
|
|
|
@ -118,7 +118,7 @@ public class OrderedHashSet<T> extends LinkedHashSet<T> {
|
|||
|
||||
@Override
|
||||
public Object clone() {
|
||||
@SuppressWarnings("unchecked")
|
||||
@SuppressWarnings("unchecked") // safe (result of clone)
|
||||
OrderedHashSet<T> dup = (OrderedHashSet<T>)super.clone();
|
||||
dup.elements = new ArrayList<T>(this.elements);
|
||||
return dup;
|
||||
|
|
|
@ -139,32 +139,32 @@ public class TestRig {
|
|||
// System.out.println("exec "+grammarName+"."+startRuleName);
|
||||
String lexerName = grammarName+"Lexer";
|
||||
ClassLoader cl = Thread.currentThread().getContextClassLoader();
|
||||
Class lexerClass;
|
||||
Class<? extends Lexer> lexerClass;
|
||||
try {
|
||||
lexerClass = cl.loadClass(lexerName);
|
||||
lexerClass = cl.loadClass(lexerName).asSubclass(Lexer.class);
|
||||
}
|
||||
catch (java.lang.ClassNotFoundException cnfe) {
|
||||
// might be pure lexer grammar; no Lexer suffix then
|
||||
lexerName = grammarName;
|
||||
lexerClass = cl.loadClass(lexerName);
|
||||
lexerClass = cl.loadClass(lexerName).asSubclass(Lexer.class);
|
||||
}
|
||||
if ( lexerClass==null ) {
|
||||
System.err.println("Can't load "+lexerName);
|
||||
return;
|
||||
}
|
||||
|
||||
Constructor<Lexer> lexerCtor = lexerClass.getConstructor(CharStream.class);
|
||||
Constructor<? extends Lexer> lexerCtor = lexerClass.getConstructor(CharStream.class);
|
||||
Lexer lexer = lexerCtor.newInstance((CharStream)null);
|
||||
|
||||
Class parserClass = null;
|
||||
Class<? extends Parser> parserClass = null;
|
||||
Parser parser = null;
|
||||
if ( !startRuleName.equals(LEXER_START_RULE_NAME) ) {
|
||||
String parserName = grammarName+"Parser";
|
||||
parserClass = cl.loadClass(parserName);
|
||||
parserClass = cl.loadClass(parserName).asSubclass(Parser.class);
|
||||
if ( parserClass==null ) {
|
||||
System.err.println("Can't load "+parserName);
|
||||
}
|
||||
Constructor<Parser> parserCtor = parserClass.getConstructor(TokenStream.class);
|
||||
Constructor<? extends Parser> parserCtor = parserClass.getConstructor(TokenStream.class);
|
||||
parser = parserCtor.newInstance((TokenStream)null);
|
||||
}
|
||||
|
||||
|
@ -201,7 +201,7 @@ public class TestRig {
|
|||
}
|
||||
}
|
||||
|
||||
static void process(Lexer lexer, Class parserClass, Parser parser, InputStream is, Reader r) throws IOException, IllegalAccessException, InvocationTargetException, PrintException {
|
||||
static void process(Lexer lexer, Class<? extends Parser> parserClass, Parser parser, InputStream is, Reader r) throws IOException, IllegalAccessException, InvocationTargetException, PrintException {
|
||||
try {
|
||||
ANTLRInputStream input = new ANTLRInputStream(r);
|
||||
lexer.setInputStream(input);
|
||||
|
@ -232,8 +232,8 @@ public class TestRig {
|
|||
parser.setTrace(trace);
|
||||
|
||||
try {
|
||||
Method startRule = parserClass.getMethod(startRuleName, (Class[])null);
|
||||
ParserRuleContext<Token> tree = (ParserRuleContext<Token>)startRule.invoke(parser, (Object[])null);
|
||||
Method startRule = parserClass.getMethod(startRuleName);
|
||||
ParserRuleContext tree = (ParserRuleContext)startRule.invoke(parser, (Object[])null);
|
||||
|
||||
if ( printTree ) {
|
||||
System.out.println(tree.toStringTree(parser));
|
||||
|
|
|
@ -38,7 +38,7 @@ import java.util.Iterator;
|
|||
|
||||
public class Utils {
|
||||
// Seriously: why isn't this built in to java? ugh!
|
||||
public static String join(Iterator iter, String separator) {
|
||||
public static <T> String join(Iterator<T> iter, String separator) {
|
||||
StringBuilder buf = new StringBuilder();
|
||||
while ( iter.hasNext() ) {
|
||||
buf.append(iter.next());
|
||||
|
|
|
@ -29,7 +29,5 @@
|
|||
|
||||
package org.antlr.v4.runtime.tree;
|
||||
|
||||
import org.antlr.v4.runtime.Token;
|
||||
|
||||
public interface ErrorNode<Symbol extends Token> extends TerminalNode<Symbol> {
|
||||
public interface ErrorNode extends TerminalNode {
|
||||
}
|
||||
|
|
|
@ -37,11 +37,8 @@ import org.antlr.v4.runtime.Token;
|
|||
* and deletion as well as during "consume until error recovery set"
|
||||
* upon no viable alternative exceptions.
|
||||
*/
|
||||
public class ErrorNodeImpl<Symbol extends Token> extends
|
||||
TerminalNodeImpl<Symbol>
|
||||
implements ErrorNode<Symbol>
|
||||
{
|
||||
public ErrorNodeImpl(Symbol token) {
|
||||
public class ErrorNodeImpl extends TerminalNodeImpl implements ErrorNode {
|
||||
public ErrorNodeImpl(Token token) {
|
||||
super(token);
|
||||
}
|
||||
|
||||
|
|
|
@ -30,11 +30,10 @@
|
|||
package org.antlr.v4.runtime.tree;
|
||||
|
||||
import org.antlr.v4.runtime.ParserRuleContext;
|
||||
import org.antlr.v4.runtime.Token;
|
||||
|
||||
public interface ParseTreeListener<Symbol extends Token> {
|
||||
void visitTerminal(TerminalNode<Symbol> node);
|
||||
void visitErrorNode(ErrorNode<Symbol> node);
|
||||
void enterEveryRule(ParserRuleContext<Symbol> ctx);
|
||||
void exitEveryRule(ParserRuleContext<Symbol> ctx);
|
||||
public interface ParseTreeListener {
|
||||
void visitTerminal(TerminalNode node);
|
||||
void visitErrorNode(ErrorNode node);
|
||||
void enterEveryRule(ParserRuleContext ctx);
|
||||
void exitEveryRule(ParserRuleContext ctx);
|
||||
}
|
||||
|
|
|
@ -42,12 +42,12 @@ public class ParseTreeVisitor<T> {
|
|||
* @param node The {@link TerminalNode} to visit.
|
||||
* @return The result of visiting the node.
|
||||
*/
|
||||
public T visitTerminal(TerminalNode<? extends Token> node) { return null; }
|
||||
public T visitTerminal(TerminalNode node) { return null; }
|
||||
|
||||
/** Visit an error node, and return a user-defined result of the operation.
|
||||
*
|
||||
* @param node The {@link ErrorNode} to visit.
|
||||
* @return The result of visiting the node.
|
||||
*/
|
||||
public T visitErrorNode(ErrorNode<? extends Token> node) { return null; }
|
||||
public T visitErrorNode(ErrorNode node) { return null; }
|
||||
}
|
||||
|
|
|
@ -30,19 +30,17 @@
|
|||
package org.antlr.v4.runtime.tree;
|
||||
|
||||
import org.antlr.v4.runtime.ParserRuleContext;
|
||||
import org.antlr.v4.runtime.Token;
|
||||
|
||||
public class ParseTreeWalker {
|
||||
public static final ParseTreeWalker DEFAULT = new ParseTreeWalker();
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public <Symbol extends Token> void walk(ParseTreeListener<Symbol> listener, ParseTree t) {
|
||||
public void walk(ParseTreeListener listener, ParseTree t) {
|
||||
if ( t instanceof ErrorNode) {
|
||||
listener.visitErrorNode((ErrorNode<Symbol>)t);
|
||||
listener.visitErrorNode((ErrorNode)t);
|
||||
return;
|
||||
}
|
||||
else if ( t instanceof TerminalNode) {
|
||||
listener.visitTerminal((TerminalNode<Symbol>)t);
|
||||
listener.visitTerminal((TerminalNode)t);
|
||||
return;
|
||||
}
|
||||
RuleNode r = (RuleNode)t;
|
||||
|
@ -59,16 +57,14 @@ public class ParseTreeWalker {
|
|||
* First we trigger the generic and then the rule specific.
|
||||
* We to them in reverse order upon finishing the node.
|
||||
*/
|
||||
protected <Symbol extends Token> void enterRule(ParseTreeListener<Symbol> listener, RuleNode r) {
|
||||
@SuppressWarnings("unchecked")
|
||||
ParserRuleContext<Symbol> ctx = (ParserRuleContext<Symbol>)r.getRuleContext();
|
||||
protected void enterRule(ParseTreeListener listener, RuleNode r) {
|
||||
ParserRuleContext ctx = (ParserRuleContext)r.getRuleContext();
|
||||
listener.enterEveryRule(ctx);
|
||||
ctx.enterRule(listener);
|
||||
}
|
||||
|
||||
protected <Symbol extends Token> void exitRule(ParseTreeListener<Symbol> listener, RuleNode r) {
|
||||
@SuppressWarnings("unchecked")
|
||||
ParserRuleContext<Symbol> ctx = (ParserRuleContext<Symbol>)r.getRuleContext();
|
||||
protected void exitRule(ParseTreeListener listener, RuleNode r) {
|
||||
ParserRuleContext ctx = (ParserRuleContext)r.getRuleContext();
|
||||
ctx.exitRule(listener);
|
||||
listener.exitEveryRule(ctx);
|
||||
}
|
||||
|
|
|
@ -31,6 +31,6 @@ package org.antlr.v4.runtime.tree;
|
|||
|
||||
import org.antlr.v4.runtime.Token;
|
||||
|
||||
public interface TerminalNode<Symbol extends Token> extends ParseTree {
|
||||
Symbol getSymbol();
|
||||
public interface TerminalNode extends ParseTree {
|
||||
Token getSymbol();
|
||||
}
|
||||
|
|
|
@ -33,24 +33,24 @@ import org.antlr.v4.runtime.Parser;
|
|||
import org.antlr.v4.runtime.Token;
|
||||
import org.antlr.v4.runtime.misc.Interval;
|
||||
|
||||
public class TerminalNodeImpl<Symbol extends Token> implements TerminalNode<Symbol> {
|
||||
public Symbol symbol;
|
||||
public class TerminalNodeImpl implements TerminalNode {
|
||||
public Token symbol;
|
||||
public ParseTree parent;
|
||||
/** Which ATN node matched this token? */
|
||||
public int s;
|
||||
public TerminalNodeImpl(Symbol symbol) { this.symbol = symbol; }
|
||||
public TerminalNodeImpl(Token symbol) { this.symbol = symbol; }
|
||||
|
||||
@Override
|
||||
public ParseTree getChild(int i) {return null;}
|
||||
|
||||
@Override
|
||||
public Symbol getSymbol() {return symbol;}
|
||||
public Token getSymbol() {return symbol;}
|
||||
|
||||
@Override
|
||||
public ParseTree getParent() { return parent; }
|
||||
|
||||
@Override
|
||||
public Symbol getPayload() { return symbol; }
|
||||
public Token getPayload() { return symbol; }
|
||||
|
||||
@Override
|
||||
public Interval getSourceInterval() {
|
||||
|
|
|
@ -132,9 +132,9 @@ public class Trees {
|
|||
return t.toString();
|
||||
}
|
||||
else if ( t instanceof TerminalNode) {
|
||||
Object symbol = ((TerminalNode<?>)t).getSymbol();
|
||||
if (symbol instanceof Token) {
|
||||
String s = ((Token)symbol).getText();
|
||||
Token symbol = ((TerminalNode)t).getSymbol();
|
||||
if (symbol != null) {
|
||||
String s = symbol.getText();
|
||||
return s;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -38,7 +38,7 @@ package <file.genPackage>;
|
|||
import org.antlr.v4.runtime.tree.*;
|
||||
import org.antlr.v4.runtime.Token;
|
||||
|
||||
public interface <file.grammarName>Listener extends ParseTreeListener\<<InputSymbolType()>\> {
|
||||
public interface <file.grammarName>Listener extends ParseTreeListener {
|
||||
<file.listenerNames:{lname |
|
||||
void enter<lname; format="cap">(<file.parserName>.<lname; format="cap">Context ctx);
|
||||
void exit<lname; format="cap">(<file.parserName>.<lname; format="cap">Context ctx);}; separator="\n">
|
||||
|
@ -61,10 +61,10 @@ public class <file.grammarName>BaseListener implements <file.grammarName>Listene
|
|||
@Override public void enter<lname; format="cap">(<file.parserName>.<lname; format="cap">Context ctx) { \}
|
||||
@Override public void exit<lname; format="cap">(<file.parserName>.<lname; format="cap">Context ctx) { \}}; separator="\n">
|
||||
|
||||
@Override public void enterEveryRule(ParserRuleContext\<<InputSymbolType()>\> ctx) { }
|
||||
@Override public void exitEveryRule(ParserRuleContext\<<InputSymbolType()>\> ctx) { }
|
||||
@Override public void visitTerminal(TerminalNode\<<InputSymbolType()>\> node) { }
|
||||
@Override public void visitErrorNode(ErrorNode\<<InputSymbolType()>\> node) { }
|
||||
@Override public void enterEveryRule(ParserRuleContext ctx) { }
|
||||
@Override public void exitEveryRule(ParserRuleContext ctx) { }
|
||||
@Override public void visitTerminal(TerminalNode node) { }
|
||||
@Override public void visitErrorNode(ErrorNode node) { }
|
||||
}
|
||||
>>
|
||||
|
||||
|
@ -245,7 +245,7 @@ LeftRecursiveRuleFunction(currentRule,code,locals,ruleCtx,altLabelCtxs,
|
|||
<altLabelCtxs:{l | <altLabelCtxs.(l)>}; separator="\n">
|
||||
|
||||
<if(currentRule.modifiers)><currentRule.modifiers:{f | <f> }><else>public final <endif><currentRule.ctxType> <currentRule.name>(<currentRule.args; separator=",">) throws RecognitionException {
|
||||
ParserRuleContext\<Token> _parentctx = _ctx;
|
||||
ParserRuleContext _parentctx = _ctx;
|
||||
int _parentState = _ctx.s;
|
||||
<currentRule.ctxType> _localctx = new <currentRule.ctxType>(_ctx, _parentState<currentRule.args:{a | , <a.name>}>);
|
||||
<currentRule.ctxType> _prevctx = _localctx;
|
||||
|
@ -558,11 +558,11 @@ RuleContextDecl(r) ::= "public <r.ctxName> <r.name>;"
|
|||
RuleContextListDecl(rdecl) ::= "public List\<<rdecl.ctxName>> <rdecl.name> = new ArrayList\<<rdecl.ctxName>>();"
|
||||
|
||||
ContextTokenGetterDecl(t) ::=
|
||||
"public TerminalNode\<<TokenLabelType()>> <t.name>() { return getToken(<parser.name>.<t.name>, 0); }"
|
||||
"public TerminalNode <t.name>() { return getToken(<parser.name>.<t.name>, 0); }"
|
||||
ContextTokenListGetterDecl(t) ::=
|
||||
"public List\<TerminalNode\<<TokenLabelType()>>> <t.name>() { return getTokens(<parser.name>.<t.name>); }"
|
||||
"public List\<TerminalNode> <t.name>() { return getTokens(<parser.name>.<t.name>); }"
|
||||
ContextTokenListIndexedGetterDecl(t) ::= <<
|
||||
public TerminalNode\<<TokenLabelType()>\> <t.name>(int i) {
|
||||
public TerminalNode <t.name>(int i) {
|
||||
return getToken(<parser.name>.<t.name>, i);
|
||||
}
|
||||
>>
|
||||
|
@ -598,12 +598,12 @@ CaptureNextToken(d) ::= "<d.varName> = _input.LT(1);"
|
|||
CaptureNextTokenType(d) ::= "<d.varName> = _input.LA(1);"
|
||||
|
||||
StructDecl(struct,attrs,getters,dispatchMethods,interfaces,extensionMembers,
|
||||
superClass={ParserRuleContext\<<InputSymbolType()>>}) ::= <<
|
||||
superClass={ParserRuleContext}) ::= <<
|
||||
public static class <struct.name> extends <superClass><if(interfaces)> implements <interfaces; separator=", "><endif> {
|
||||
<attrs:{a | <a>}; separator="\n">
|
||||
<getters:{g | <g>}; separator="\n">
|
||||
<if(struct.ctorAttrs)>public <struct.name>(ParserRuleContext\<<InputSymbolType()>\> parent, int state) { super(parent, state); }<endif>
|
||||
public <struct.name>(ParserRuleContext\<<InputSymbolType()>\> parent, int state<struct.ctorAttrs:{a | , <a>}>) {
|
||||
<if(struct.ctorAttrs)>public <struct.name>(ParserRuleContext parent, int state) { super(parent, state); }<endif>
|
||||
public <struct.name>(ParserRuleContext parent, int state<struct.ctorAttrs:{a | , <a>}>) {
|
||||
super(parent, state);
|
||||
<struct.ctorAttrs:{a | this.<a.name> = <a.name>;}; separator="\n">
|
||||
}
|
||||
|
@ -631,7 +631,7 @@ public static class <struct.name> extends <currentRule.name; format="cap">Contex
|
|||
|
||||
ListenerDispatchMethod(method) ::= <<
|
||||
@Override
|
||||
public void <if(method.isEnter)>enter<else>exit<endif>Rule(ParseTreeListener\<<InputSymbolType()>\> listener) {
|
||||
public void <if(method.isEnter)>enter<else>exit<endif>Rule(ParseTreeListener listener) {
|
||||
if ( listener instanceof <parser.grammarName>Listener ) ((<parser.grammarName>Listener)listener).<if(method.isEnter)>enter<else>exit<endif><struct.derivedFromName; format="cap">(this);
|
||||
}
|
||||
>>
|
||||
|
|
|
@ -940,17 +940,17 @@ public abstract class BaseTest {
|
|||
" CommonTokenStream tokens = new CommonTokenStream(lex);\n" +
|
||||
" <createParser>\n"+
|
||||
" parser.setBuildParseTree(true);\n" +
|
||||
" ParserRuleContext\\<Token> tree = parser.<parserStartRuleName>();\n" +
|
||||
" ParserRuleContext tree = parser.<parserStartRuleName>();\n" +
|
||||
" ParseTreeWalker.DEFAULT.walk(new TreeShapeListener(), tree);\n" +
|
||||
" }\n" +
|
||||
"\n" +
|
||||
" static class TreeShapeListener implements ParseTreeListener\\<Token> {\n" +
|
||||
" @Override public void visitTerminal(TerminalNode\\<Token> node) { }\n" +
|
||||
" @Override public void visitErrorNode(ErrorNode\\<Token> node) { }\n" +
|
||||
" @Override public void exitEveryRule(ParserRuleContext\\<Token> ctx) { }\n" +
|
||||
" static class TreeShapeListener implements ParseTreeListener {\n" +
|
||||
" @Override public void visitTerminal(TerminalNode node) { }\n" +
|
||||
" @Override public void visitErrorNode(ErrorNode node) { }\n" +
|
||||
" @Override public void exitEveryRule(ParserRuleContext ctx) { }\n" +
|
||||
"\n" +
|
||||
" @Override\n" +
|
||||
" public void enterEveryRule(ParserRuleContext\\<Token> ctx) {\n" +
|
||||
" public void enterEveryRule(ParserRuleContext ctx) {\n" +
|
||||
" for (int i = 0; i \\< ctx.getChildCount(); i++) {\n" +
|
||||
" ParseTree parent = ctx.getChild(i).getParent();\n" +
|
||||
" if (!(parent instanceof RuleNode) || ((RuleNode)parent).getRuleContext() != ctx) {\n" +
|
||||
|
|
|
@ -504,7 +504,7 @@ public class TestATNParserPrediction extends BaseTest {
|
|||
}
|
||||
|
||||
public synchronized DFA getDFA(LexerGrammar lg, Grammar g, String ruleName,
|
||||
String inputString, ParserRuleContext<?> ctx)
|
||||
String inputString, ParserRuleContext ctx)
|
||||
{
|
||||
// sync to ensure multiple tests don't race on dfa access
|
||||
Tool.internalOption_ShowATNConfigsInDFA = true;
|
||||
|
|
|
@ -42,7 +42,7 @@ import org.junit.Test;
|
|||
public class TestBufferedTokenStream extends BaseTest {
|
||||
|
||||
protected TokenStream createTokenStream(TokenSource src) {
|
||||
return new BufferedTokenStream<Token>(src);
|
||||
return new BufferedTokenStream(src);
|
||||
}
|
||||
|
||||
@Test public void testFirstToken() throws Exception {
|
||||
|
|
|
@ -9,7 +9,7 @@ public class TestListeners extends BaseTest {
|
|||
"@header {import org.antlr.v4.runtime.tree.*;}\n"+
|
||||
"@members {\n" +
|
||||
"public static class LeafListener extends TBaseListener {\n" +
|
||||
" public void visitTerminal(TerminalNode<Token> node) {\n" +
|
||||
" public void visitTerminal(TerminalNode node) {\n" +
|
||||
" System.out.println(node.getSymbol().getText());\n" +
|
||||
" }\n" +
|
||||
" }}\n" +
|
||||
|
|
|
@ -220,8 +220,7 @@ public class TestPerformance extends BaseTest {
|
|||
|
||||
private static final Parser[] sharedParsers = new Parser[NUMBER_OF_THREADS];
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private static final ParseTreeListener<Token>[] sharedListeners = (ParseTreeListener<Token>[])new ParseTreeListener<?>[NUMBER_OF_THREADS];
|
||||
private static final ParseTreeListener[] sharedListeners = new ParseTreeListener[NUMBER_OF_THREADS];
|
||||
|
||||
private final AtomicInteger tokenCount = new AtomicInteger();
|
||||
private int currentPass;
|
||||
|
@ -616,8 +615,7 @@ public class TestPerformance extends BaseTest {
|
|||
ClassLoader loader = new URLClassLoader(new URL[] { new File(tmpdir).toURI().toURL() }, ClassLoader.getSystemClassLoader());
|
||||
final Class<? extends Lexer> lexerClass = loader.loadClass(lexerName).asSubclass(Lexer.class);
|
||||
final Class<? extends Parser> parserClass = loader.loadClass(parserName).asSubclass(Parser.class);
|
||||
@SuppressWarnings("unchecked")
|
||||
final Class<? extends ParseTreeListener<Token>> listenerClass = (Class<? extends ParseTreeListener<Token>>)loader.loadClass(listenerName).asSubclass(ParseTreeListener.class);
|
||||
final Class<? extends ParseTreeListener> listenerClass = loader.loadClass(listenerName).asSubclass(ParseTreeListener.class);
|
||||
|
||||
final Constructor<? extends Lexer> lexerCtor = lexerClass.getConstructor(CharStream.class);
|
||||
final Constructor<? extends Parser> parserCtor = parserClass.getConstructor(TokenStream.class);
|
||||
|
@ -701,7 +699,7 @@ public class TestPerformance extends BaseTest {
|
|||
Method parseMethod = parserClass.getMethod(entryPoint);
|
||||
Object parseResult;
|
||||
|
||||
ParseTreeListener<Token> checksumParserListener = null;
|
||||
ParseTreeListener checksumParserListener = null;
|
||||
|
||||
try {
|
||||
if (COMPUTE_CHECKSUM) {
|
||||
|
@ -879,7 +877,7 @@ public class TestPerformance extends BaseTest {
|
|||
|
||||
}
|
||||
|
||||
protected static class ChecksumParseTreeListener implements ParseTreeListener<Token> {
|
||||
protected static class ChecksumParseTreeListener implements ParseTreeListener {
|
||||
private static final int VISIT_TERMINAL = 1;
|
||||
private static final int VISIT_ERROR_NODE = 2;
|
||||
private static final int ENTER_RULE = 3;
|
||||
|
@ -892,26 +890,26 @@ public class TestPerformance extends BaseTest {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void visitTerminal(TerminalNode<Token> node) {
|
||||
public void visitTerminal(TerminalNode node) {
|
||||
checksum.update(VISIT_TERMINAL);
|
||||
updateChecksum(checksum, node.getSymbol());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void visitErrorNode(ErrorNode<Token> node) {
|
||||
public void visitErrorNode(ErrorNode node) {
|
||||
checksum.update(VISIT_ERROR_NODE);
|
||||
updateChecksum(checksum, node.getSymbol());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void enterEveryRule(ParserRuleContext<Token> ctx) {
|
||||
public void enterEveryRule(ParserRuleContext ctx) {
|
||||
checksum.update(ENTER_RULE);
|
||||
updateChecksum(checksum, ctx.getRuleIndex());
|
||||
updateChecksum(checksum, ctx.getStart());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void exitEveryRule(ParserRuleContext<Token> ctx) {
|
||||
public void exitEveryRule(ParserRuleContext ctx) {
|
||||
checksum.update(EXIT_RULE);
|
||||
updateChecksum(checksum, ctx.getRuleIndex());
|
||||
updateChecksum(checksum, ctx.getStop());
|
||||
|
|
Loading…
Reference in New Issue