Code cleanup (little things like using .isEmpty() and StringBuilder, and specify some small-scale generic arguments)

This commit is contained in:
Sam Harwell 2012-02-23 16:42:36 -06:00
parent 1e3e092326
commit 9bf6f284df
14 changed files with 59 additions and 64 deletions

View File

@ -218,7 +218,7 @@ public class BufferedTokenStream<T extends Token> implements TokenStream {
filteredTokens.add(t);
}
}
if ( filteredTokens.size()==0 ) {
if ( filteredTokens.isEmpty() ) {
filteredTokens = null;
}
return filteredTokens;

View File

@ -90,7 +90,7 @@ public class TokenRewriteStream extends CommonTokenStream {
// Define the rewrite operation hierarchy
class RewriteOperation {
public class RewriteOperation {
/** What index into rewrites List are we? */
protected int instructionIndex;
/** Token buffer index. */
@ -376,7 +376,7 @@ public class TokenRewriteStream extends CommonTokenStream {
if ( end>tokens.size()-1 ) end = tokens.size()-1;
if ( start<0 ) start = 0;
if ( rewrites==null || rewrites.size()==0 ) {
if ( rewrites==null || rewrites.isEmpty() ) {
return toOriginalString(start,end); // no instructions to execute
}
StringBuilder buf = new StringBuilder();
@ -474,9 +474,8 @@ public class TokenRewriteStream extends CommonTokenStream {
if ( !(op instanceof ReplaceOp) ) continue;
ReplaceOp rop = (ReplaceOp)rewrites.get(i);
// Wipe prior inserts within range
List<InsertBeforeOp> inserts = getKindOfOps(rewrites, InsertBeforeOp.class, i);
for (int j = 0; j < inserts.size(); j++) {
InsertBeforeOp iop = inserts.get(j);
List<? extends InsertBeforeOp> inserts = getKindOfOps(rewrites, InsertBeforeOp.class, i);
for (InsertBeforeOp iop : inserts) {
if ( iop.index == rop.index ) {
// E.g., insert before 2, delete 2..2; update replace
// text to include insert before, kill insert
@ -489,9 +488,8 @@ public class TokenRewriteStream extends CommonTokenStream {
}
}
// Drop any prior replaces contained within
List<ReplaceOp> prevReplaces = getKindOfOps(rewrites, ReplaceOp.class, i);
for (int j = 0; j < prevReplaces.size(); j++) {
ReplaceOp prevRop = prevReplaces.get(j);
List<? extends ReplaceOp> prevReplaces = getKindOfOps(rewrites, ReplaceOp.class, i);
for (ReplaceOp prevRop : prevReplaces) {
if ( prevRop.index>=rop.index && prevRop.lastIndex <= rop.lastIndex ) {
// delete replace as it's a no-op.
rewrites.set(prevRop.instructionIndex, null);
@ -525,9 +523,8 @@ public class TokenRewriteStream extends CommonTokenStream {
if ( !(op instanceof InsertBeforeOp) ) continue;
InsertBeforeOp iop = (InsertBeforeOp)rewrites.get(i);
// combine current insert with prior if any at same index
List<InsertBeforeOp> prevInserts = getKindOfOps(rewrites, InsertBeforeOp.class, i);
for (int j = 0; j < prevInserts.size(); j++) {
InsertBeforeOp prevIop = prevInserts.get(j);
List<? extends InsertBeforeOp> prevInserts = getKindOfOps(rewrites, InsertBeforeOp.class, i);
for (InsertBeforeOp prevIop : prevInserts) {
if ( prevIop.index == iop.index ) { // combine objects
// convert to strings...we're in process of toString'ing
// whole token buffer so no lazy eval issue with any templates
@ -537,9 +534,8 @@ public class TokenRewriteStream extends CommonTokenStream {
}
}
// look for replaces where iop.index is in range; error
List<ReplaceOp> prevReplaces = getKindOfOps(rewrites, ReplaceOp.class, i);
for (int j = 0; j < prevReplaces.size(); j++) {
ReplaceOp rop = prevReplaces.get(j);
List<? extends ReplaceOp> prevReplaces = getKindOfOps(rewrites, ReplaceOp.class, i);
for (ReplaceOp rop : prevReplaces) {
if ( iop.index == rop.index ) {
rop.text = catOpText(iop.text,rop.text);
rewrites.set(i, null); // delete current insert
@ -573,18 +569,17 @@ public class TokenRewriteStream extends CommonTokenStream {
return x+y;
}
protected <T extends RewriteOperation> List<T> getKindOfOps(List<? extends RewriteOperation> rewrites, Class<T> kind) {
protected <T extends RewriteOperation> List<? extends T> getKindOfOps(List<? extends RewriteOperation> rewrites, Class<T> kind) {
return getKindOfOps(rewrites, kind, rewrites.size());
}
/** Get all operations before an index of a particular kind */
protected <T extends RewriteOperation> List<T> getKindOfOps(List<? extends RewriteOperation> rewrites, Class<T> kind, int before) {
protected <T extends RewriteOperation> List<? extends T> getKindOfOps(List<? extends RewriteOperation> rewrites, Class<T> kind, int before) {
List<T> ops = new ArrayList<T>();
for (int i=0; i<before && i<rewrites.size(); i++) {
RewriteOperation op = rewrites.get(i);
if ( op==null ) continue; // ignore deleted
if ( op.getClass() == kind ) {
//noinspection unchecked
if ( kind.isInstance(op) ) {
ops.add((T)op);
}
}

View File

@ -32,7 +32,7 @@ package org.antlr.v4.runtime;
/** A stream of tokens accessing tokens from a TokenSource */
public interface TokenStream extends SymbolStream<Token> {
/** Get Token at current input pointer + i ahead where i=1 is next Token.
* i<0 indicates tokens in the past. So -1 is previous token and -2 is
* i&lt;0 indicates tokens in the past. So -1 is previous token and -2 is
* two tokens ago. LT(0) is undefined. For i>=n, return Token.EOFToken.
* Return null for LT(0) and any index that results in an absolute address
* that is negative.

View File

@ -508,7 +508,7 @@ public class ParserATNSimulator<Symbol extends Token> extends ATNSimulator {
if ( greedy ) {
int k = input.index() - startIndex + 1; // how much input we used
// System.out.println("used k="+k);
if ( outerContext == ParserRuleContext.EMPTY || // in grammar start rule
if ( outerContext.isEmpty() || // in grammar start rule
!D.configset.dipsIntoOuterContext ||
k == 1 ) // SLL(1) == LL(1)
{

View File

@ -217,7 +217,7 @@ public class Tool {
i++;
}
// use reflection to set field
Class c = this.getClass();
Class<? extends Tool> c = this.getClass();
try {
Field f = c.getField(o.fieldName);
if ( argValue==null ) {
@ -417,7 +417,7 @@ public class Tool {
}
catch (RecognitionException re) {
// TODO: do we gen errors now?
errMgr.internalError("can't generate this message at moment; antlr recovers");
ErrorManager.internalError("can't generate this message at moment; antlr recovers");
}
return null;
}
@ -511,7 +511,7 @@ public class Tool {
* @return
*/
public File getOutputDirectory(String fileNameWithPath) {
File outputDir = new File(outputDirectory);
File outputDir;
String fileDirectory;
// Some files are given to us without a PATH but should should
@ -592,21 +592,21 @@ public class Tool {
public List<ANTLRToolListener> getListeners() { return listeners; }
public void info(String msg) {
if ( listeners.size()==0 ) {
if ( listeners.isEmpty() ) {
defaultListener.info(msg);
return;
}
for (ANTLRToolListener l : listeners) l.info(msg);
}
public void error(ANTLRMessage msg) {
if ( listeners.size()==0 ) {
if ( listeners.isEmpty() ) {
defaultListener.error(msg);
return;
}
for (ANTLRToolListener l : listeners) l.error(msg);
}
public void warning(ANTLRMessage msg) {
if ( listeners.size()==0 ) {
if ( listeners.isEmpty() ) {
defaultListener.warning(msg);
return;
}

View File

@ -65,7 +65,7 @@ public class LexerATNFactory extends ParserATNFactory {
for (String modeName : modes) {
// create s0, start state; implied Tokens rule node
TokensStartState startState =
(TokensStartState)newState(TokensStartState.class, null);
newState(TokensStartState.class, null);
atn.modeNameToStartState.put(modeName, startState);
atn.modeToStartState.add(startState);
atn.defineDecisionState(startState);

View File

@ -50,21 +50,21 @@ import java.util.Map;
/** */
public class ActionTranslator implements ActionSplitterListener {
public static final Map<String, Class> thisRulePropToModelMap = new HashMap<String, Class>() {{
public static final Map<String, Class<? extends RulePropertyRef>> thisRulePropToModelMap = new HashMap<String, Class<? extends RulePropertyRef>>() {{
put("start", ThisRulePropertyRef_start.class);
put("stop", ThisRulePropertyRef_stop.class);
put("text", ThisRulePropertyRef_text.class);
put("ctx", ThisRulePropertyRef_ctx.class);
}};
public static final Map<String, Class> rulePropToModelMap = new HashMap<String, Class>() {{
public static final Map<String, Class<? extends RulePropertyRef>> rulePropToModelMap = new HashMap<String, Class<? extends RulePropertyRef>>() {{
put("start", RulePropertyRef_start.class);
put("stop", RulePropertyRef_stop.class);
put("text", RulePropertyRef_text.class);
put("ctx", RulePropertyRef_ctx.class);
}};
public static final Map<String, Class> tokenPropToModelMap = new HashMap<String, Class>() {{
public static final Map<String, Class<? extends TokenPropertyRef>> tokenPropToModelMap = new HashMap<String, Class<? extends TokenPropertyRef>>() {{
put("text", TokenPropertyRef_text.class);
put("type", TokenPropertyRef_type.class);
put("line", TokenPropertyRef_line.class);
@ -236,10 +236,10 @@ public class ActionTranslator implements ActionSplitterListener {
TokenPropertyRef getTokenPropertyRef(Token x, Token y) {
try {
Class c = tokenPropToModelMap.get(y.getText());
Constructor ctor = c.getConstructor(new Class[] {StructDecl.class, String.class});
Class<? extends TokenPropertyRef> c = tokenPropToModelMap.get(y.getText());
Constructor<? extends TokenPropertyRef> ctor = c.getConstructor(StructDecl.class, String.class);
TokenPropertyRef ref =
(TokenPropertyRef)ctor.newInstance(nodeContext, getTokenLabel(x.getText()));
ctor.newInstance(nodeContext, getTokenLabel(x.getText()));
return ref;
}
catch (Exception e) {
@ -251,10 +251,10 @@ public class ActionTranslator implements ActionSplitterListener {
// $text
RulePropertyRef getRulePropertyRef(Token prop) {
try {
Class c = thisRulePropToModelMap.get(prop.getText());
Constructor ctor = c.getConstructor(new Class[] {StructDecl.class, String.class});
Class<? extends RulePropertyRef> c = thisRulePropToModelMap.get(prop.getText());
Constructor<? extends RulePropertyRef> ctor = c.getConstructor(StructDecl.class, String.class);
RulePropertyRef ref =
(RulePropertyRef)ctor.newInstance(nodeContext, getRuleLabel(prop.getText()));
ctor.newInstance(nodeContext, getRuleLabel(prop.getText()));
return ref;
}
catch (Exception e) {
@ -266,10 +266,10 @@ public class ActionTranslator implements ActionSplitterListener {
RulePropertyRef getRulePropertyRef(Token x, Token prop) {
Grammar g = factory.getGrammar();
try {
Class c = rulePropToModelMap.get(prop.getText());
Constructor ctor = c.getConstructor(new Class[] {StructDecl.class, String.class});
Class<? extends RulePropertyRef> c = rulePropToModelMap.get(prop.getText());
Constructor<? extends RulePropertyRef> ctor = c.getConstructor(StructDecl.class, String.class);
RulePropertyRef ref =
(RulePropertyRef)ctor.newInstance(nodeContext, getRuleLabel(x.getText()));
ctor.newInstance(nodeContext, getRuleLabel(x.getText()));
return ref;
}
catch (Exception e) {

View File

@ -284,7 +284,7 @@ public class ParserFactory extends DefaultOutputModelFactory {
Alternative currentOuterMostAlt = getCurrentOuterMostAlt();
boolean actionRefsAsToken = currentOuterMostAlt.tokenRefsInActions.containsKey(ID.getText());
boolean actionRefsAsRule = currentOuterMostAlt.ruleRefsInActions.containsKey(ID.getText());
return op.getLabels().size()==0 && (actionRefsAsToken || actionRefsAsRule);
return op.getLabels().isEmpty() && (actionRefsAsToken || actionRefsAsRule);
}
// support

View File

@ -129,7 +129,7 @@ public class Target {
* TODO: unused and should call CharSupport.getANTLRCharLiteralForChar anyway
*/
public String getTargetCharLiteralCharValue(int c) {
StringBuffer buf = new StringBuffer();
StringBuilder buf = new StringBuilder();
buf.append('\'');
if ( c< Lexer.MIN_CHAR_VALUE ) return "'\u0000'";
if ( c<targetCharValueEscape.length &&
@ -162,7 +162,7 @@ public class Target {
*/
public String getTarget64BitStringFromValue(long word) {
int numHexDigits = 8*2;
StringBuffer buf = new StringBuffer(numHexDigits+2);
StringBuilder buf = new StringBuilder(numHexDigits+2);
buf.append("0x");
String digits = Long.toHexString(word);
digits = digits.toUpperCase();
@ -199,7 +199,7 @@ public class Target {
return null;
}
StringBuffer buf = new StringBuffer();
StringBuilder buf = new StringBuilder();
if ( quoted ) {
buf.append('"');
}
@ -241,7 +241,7 @@ public class Target {
String literal, boolean addQuotes)
{
StringBuilder sb = new StringBuilder();
StringBuffer is = new StringBuffer(literal);
StringBuilder is = new StringBuilder(literal);
if ( addQuotes ) sb.append('"');

View File

@ -174,9 +174,9 @@ public class Utils {
}
/** Find exact object type or sublass of cl in list */
public static Object find(List<?> ops, Class cl) {
public static <T> T find(List<?> ops, Class<T> cl) {
for (Object o : ops) {
if ( cl.isInstance(o) ) return o;
if ( cl.isInstance(o) ) return cl.cast(o);
// if ( o.getClass() == cl ) return o;
}
return null;

View File

@ -252,10 +252,10 @@ public class SymbolChecks {
public void checkForRuleArgumentAndReturnValueConflicts(Rule r) {
if ( r.retvals!=null ) {
Set conflictingKeys = r.retvals.intersection(r.args);
Set<String> conflictingKeys = r.retvals.intersection(r.args);
if (conflictingKeys!=null) {
for (Iterator it = conflictingKeys.iterator(); it.hasNext();) {
String key = (String) it.next();
for (Iterator<String> it = conflictingKeys.iterator(); it.hasNext();) {
String key = it.next();
errMgr.grammarError(
ErrorType.ARG_RETVAL_CONFLICT,
g.fileName,

View File

@ -73,7 +73,7 @@ public class AttributeDict {
public LinkedHashMap<String, Attribute> attributes =
new LinkedHashMap<String, Attribute>();
public AttributeDict() {;}
public AttributeDict() {}
public AttributeDict(DictType type) { this.type = type; }
public Attribute add(Attribute a) { a.dict = this; return attributes.put(a.name, a); }
@ -88,19 +88,19 @@ public class AttributeDict {
/** Return the set of keys that collide from
* this and other.
*/
public Set intersection(AttributeDict other) {
public Set<String> intersection(AttributeDict other) {
if ( other==null || other.size()==0 || size()==0 ) {
return null;
}
Set<String> inter = new HashSet<String>();
Set thisKeys = attributes.keySet();
for (Iterator it = thisKeys.iterator(); it.hasNext();) {
String key = (String) it.next();
Set<String> thisKeys = attributes.keySet();
for (Iterator<String> it = thisKeys.iterator(); it.hasNext();) {
String key = it.next();
if ( other.attributes.get(key)!=null ) {
inter.add(key);
}
}
if ( inter.size()==0 ) {
if ( inter.isEmpty() ) {
return null;
}
return inter;

View File

@ -38,11 +38,11 @@ import java.util.Map;
public class BlockAST extends GrammarASTWithOptions implements RuleElementAST {
// TODO: maybe I need a Subrule object like Rule so these options mov to that?
/** What are the default options for a subrule? */
public static final Map defaultBlockOptions =
new HashMap() {{put("greedy","true");}};
public static final Map<String, String> defaultBlockOptions =
new HashMap<String, String>() {{put("greedy","true");}};
public static final Map defaultLexerBlockOptions =
new HashMap() {{put("greedy","true");}};
public static final Map<String, String> defaultLexerBlockOptions =
new HashMap<String, String>() {{put("greedy","true");}};
public BlockAST(GrammarAST node) {
super(node);

View File

@ -56,7 +56,7 @@ public class GrammarAST extends CommonTree {
public String textOverride;
public GrammarAST() {;}
public GrammarAST() {}
public GrammarAST(Token t) { super(t); }
public GrammarAST(GrammarAST node) {
super(node);
@ -99,7 +99,7 @@ public class GrammarAST extends CommonTree {
List<GrammarAST> nodes = new ArrayList<GrammarAST>();
List<GrammarAST> work = new LinkedList<GrammarAST>();
work.add(this);
GrammarAST t = null;
GrammarAST t;
while ( work.size()>0 ) {
t = work.remove(0);
if ( types.contains(t.getType()) ) nodes.add(t);
@ -121,7 +121,7 @@ public class GrammarAST extends CommonTree {
* If not a rule element, just returns null.
*/
public String getAltLabel() {
List ancestors = this.getAncestors();
List<? extends Tree> ancestors = this.getAncestors();
if ( ancestors==null ) return null;
for (int i=ancestors.size()-1; i>=0; i--) {
GrammarAST p = (GrammarAST)ancestors.get(i);
@ -228,7 +228,7 @@ public class GrammarAST extends CommonTree {
GrammarASTAdaptor adaptor = new GrammarASTAdaptor(input);
CommonTreeNodeStream nodes =
new CommonTreeNodeStream(adaptor, this);
StringBuffer buf = new StringBuffer();
StringBuilder buf = new StringBuilder();
GrammarAST o = (GrammarAST)nodes.LT(1);
int type = adaptor.getType(o);
while ( type!=Token.EOF ) {