Merge pull request #329 from parrt/master

Lots of little fixes thanks to Coverity Scan
This commit is contained in:
Terence Parr 2013-08-31 17:50:25 -07:00
commit 6d1d0e0488
19 changed files with 93 additions and 63 deletions

View File

@ -1,5 +1,9 @@
ANTLR v4 Honey Badger
August 31, 2013
* Lots of little fixes thanks to Coverity Scan
August 7, 2013
* [BREAKING CHANGE] Altered left-recursion elimination to be simpler. Now,

View File

@ -297,7 +297,7 @@ public class DefaultErrorStrategy implements ANTLRErrorStrategy {
{
TokenStream tokens = recognizer.getInputStream();
String input;
if (tokens instanceof TokenStream) {
if ( tokens!=null ) {
if ( e.getStartToken().getType()==Token.EOF ) input = "<EOF>";
else input = tokens.getText(e.getStartToken(), e.getOffendingToken());
}

View File

@ -259,7 +259,7 @@ public class LexerATNSimulator extends ATNSimulator {
if (s.edges == null || t < MIN_DFA_EDGE || t > MAX_DFA_EDGE) {
return null;
}
DFAState target = s.edges[t - MIN_DFA_EDGE];
if (debug && target != null) {
System.out.println("reuse state "+s.stateNumber+
@ -619,7 +619,6 @@ public class LexerATNSimulator extends ATNSimulator {
System.out.println("EDGE "+p+" -> "+q+" upon "+((char)t));
}
DFA dfa = decisionToDFA[mode];
synchronized (p) {
if ( p.edges==null ) {
// make room for tokens 1..n and -1 masquerading as index 0

View File

@ -173,8 +173,10 @@ public abstract class PredictionContext {
boolean rootIsWildcard,
DoubleKeyMap<PredictionContext,PredictionContext,PredictionContext> mergeCache)
{
assert a!=null && b!=null; // must be empty context, never null
// share same graph if both same
if ( (a==null&&b==null) || a==b || (a!=null&&a.equals(b)) ) return a;
if ( a==b || a.equals(b) ) return a;
if ( a instanceof SingletonPredictionContext && b instanceof SingletonPredictionContext) {
return mergeSingletons((SingletonPredictionContext)a,
@ -705,20 +707,6 @@ public abstract class PredictionContext {
// return toString(recog, ParserRuleContext.EMPTY);
}
// recog null unless ParserRuleContext, in which case we use subclass toString(...)
public String toString(@Nullable Recognizer<?,?> recog, RuleContext stop) {
StringBuilder buf = new StringBuilder();
PredictionContext p = this;
buf.append("[");
// while ( p != null && p != stop ) {
// if ( !p.isEmpty() ) buf.append(p.returnState);
// if ( p.parent != null && !p.parent.isEmpty() ) buf.append(" ");
// p = p.parent;
// }
buf.append("]");
return buf.toString();
}
public String[] toStrings(Recognizer<?, ?> recognizer, int currentState) {
return toStrings(recognizer, EMPTY, currentState);
}

View File

@ -67,6 +67,7 @@ public class DFASerializer {
}
String output = buf.toString();
if ( output.length()==0 ) return null;
//return Utils.sortLinesInString(output);
return output;
}

View File

@ -81,14 +81,14 @@ public class GraphicsSupport {
public static void saveImage(final JComponent comp, String fileName)
throws IOException, PrintException
{
if (fileName.endsWith(".ps") || fileName.endsWith(".eps") ) {
if ( fileName.endsWith(".ps") || fileName.endsWith(".eps") ) {
DocFlavor flavor = DocFlavor.SERVICE_FORMATTED.PRINTABLE;
String mimeType = "application/postscript";
StreamPrintServiceFactory[] factories =
StreamPrintServiceFactory.lookupStreamPrintServiceFactories(flavor, mimeType);
System.out.println(Arrays.toString(factories));
FileOutputStream out = new FileOutputStream(fileName);
if (factories.length > 0) {
FileOutputStream out = new FileOutputStream(fileName);
PrintService service = factories[0].getPrintService(out);
SimpleDoc doc = new SimpleDoc(new Printable() {
@Override
@ -115,6 +115,7 @@ public class GraphicsSupport {
DocPrintJob job = service.createPrintJob();
PrintRequestAttributeSet attributes = new HashPrintRequestAttributeSet();
job.print(doc, attributes);
out.close();
}
} else {
// parrt: works with [image/jpeg, image/png, image/x-png, image/vnd.wap.wbmp, image/bmp, image/gif]

View File

@ -74,13 +74,21 @@ public class Interval {
@Override
public boolean equals(Object o) {
if ( o==null ) {
if ( o==null || !(o instanceof Interval) ) {
return false;
}
Interval other = (Interval)o;
return this.a==other.a && this.b==other.b;
}
@Override
public int hashCode() {
int hash = 23;
hash = hash * 31 + a;
hash = hash * 31 + b;
return hash;
}
/** Does this start completely before other? Disjoint */
public boolean startsBeforeDisjoint(Interval other) {
return this.a<other.a && this.b<other.a;

View File

@ -30,7 +30,7 @@
package org.antlr.v4.runtime.misc;
import java.awt.Window;
import java.awt.*;
import java.awt.event.WindowAdapter;
import java.awt.event.WindowEvent;
import java.io.BufferedWriter;
@ -94,8 +94,12 @@ public class Utils {
public static void writeFile(String fileName, String content) throws IOException {
FileWriter fw = new FileWriter(fileName);
Writer w = new BufferedWriter(fw);
w.write(content);
w.close();
try {
w.write(content);
}
finally {
w.close();
}
}
public static void waitForClose(final Window window) throws InterruptedException {

View File

@ -68,8 +68,12 @@ public class Trees {
String ps = getPS(t, ruleNames, fontName, fontSize);
FileWriter f = new FileWriter(fileName);
BufferedWriter bw = new BufferedWriter(f);
bw.write(ps);
bw.close();
try {
bw.write(ps);
}
finally {
bw.close();
}
}
public static void writePS(Tree t, @Nullable List<String> ruleNames, String fileName)

View File

@ -385,10 +385,8 @@ public class Tool {
}
public void processNonCombinedGrammar(Grammar g, boolean gencode) {
if ( g.ast!=null && internalOption_PrintGrammarTree ) System.out.println(g.ast.toStringTree());
//g.ast.inspect();
if ( g.ast.hasErrors ) return;
if ( g.ast==null || g.ast.hasErrors ) return;
if ( internalOption_PrintGrammarTree ) System.out.println(g.ast.toStringTree());
boolean ruleFail = checkForRuleIssues(g);
if ( ruleFail ) return;
@ -781,8 +779,12 @@ public class Tool {
protected void writeDOTFile(Grammar g, String name, String dot) throws IOException {
Writer fw = getOutputFileWriter(g, name + ".dot");
fw.write(dot);
fw.close();
try {
fw.write(dot);
}
finally {
fw.close();
}
}
public void help() {

View File

@ -137,7 +137,7 @@ public class LeftRecursiveRuleAnalyzer extends LeftRecursiveRuleWalker {
}
altAssociativity.put(alt, assoc);
System.out.println("setAltAssoc: op " + alt + ": " + t.getText()+", assoc="+assoc);
// System.out.println("setAltAssoc: op " + alt + ": " + t.getText()+", assoc="+assoc);
}
@Override

View File

@ -111,8 +111,7 @@ public class ATNPrinter {
}
else if ( t instanceof AtomTransition ) {
AtomTransition a = (AtomTransition)t;
String label = a.toString();
if ( g!=null ) label = g.getTokenDisplayName(a.label);
String label = g.getTokenDisplayName(a.label);
buf.append("-").append(label).append("->").append(getStateString(t.target)).append('\n');
}
else {

View File

@ -353,7 +353,7 @@ public class ATNSerializer {
throw new UnsupportedOperationException(new InvalidClassException(ATN.class.getName(), reason));
}
int grammarType = ATNSimulator.toInt(data[p++]);
p++; // skip grammarType
int maxType = ATNSimulator.toInt(data[p++]);
buf.append("max type ").append(maxType).append("\n");
int nstates = ATNSimulator.toInt(data[p++]);

View File

@ -37,14 +37,9 @@ import org.antlr.v4.runtime.misc.NotNull;
import org.antlr.v4.tool.ErrorType;
import org.antlr.v4.tool.Grammar;
import org.stringtemplate.v4.AutoIndentWriter;
import org.stringtemplate.v4.NumberRenderer;
import org.stringtemplate.v4.ST;
import org.stringtemplate.v4.STErrorListener;
import org.stringtemplate.v4.STGroup;
import org.stringtemplate.v4.STGroupFile;
import org.stringtemplate.v4.STWriter;
import org.stringtemplate.v4.StringRenderer;
import org.stringtemplate.v4.misc.STMessage;
import java.io.IOException;
import java.io.Writer;
@ -226,13 +221,13 @@ public class CodeGenerator {
public void write(ST code, String fileName) {
try {
long start = System.currentTimeMillis();
// long start = System.currentTimeMillis();
Writer w = tool.getOutputFileWriter(g, fileName);
STWriter wr = new AutoIndentWriter(w);
wr.setLineWidth(lineWidth);
code.write(wr);
w.close();
long stop = System.currentTimeMillis();
// long stop = System.currentTimeMillis();
}
catch (IOException ioe) {
tool.errMgr.toolError(ErrorType.CANNOT_WRITE_FILE,

View File

@ -97,7 +97,9 @@ public class Graph<T> {
n = tNode;
if ( !visited.contains(n) ) break;
}
DFS(n, visited, sorted);
if (n!=null) { // if at least one unvisited
DFS(n, visited, sorted);
}
}
return sorted;
}

View File

@ -34,6 +34,15 @@ public class MutableInt extends Number implements Comparable<Number> {
public int v;
public MutableInt(int v) { this.v = v; }
@Override
public boolean equals(Object o) {
if ( o instanceof Number ) return v == ((Number)o).intValue();
return false;
}
@Override public int hashCode() { return v; }
@Override public int compareTo(Number o) { return v-o.intValue(); }
@Override public int intValue() { return v; }
@Override public long longValue() { return v; }

View File

@ -30,6 +30,7 @@
package org.antlr.v4.parse;
import org.antlr.runtime.Token;
import org.antlr.v4.Tool;
import org.antlr.v4.codegen.CodeGenerator;
import org.antlr.v4.tool.ErrorType;
@ -59,10 +60,12 @@ public class TokenVocabParser {
Map<String,Integer> tokens = new LinkedHashMap<String,Integer>();
int maxTokenType = -1;
File fullFile = getImportedVocabFile();
FileReader fr = null;
BufferedReader br = null;
try {
Pattern tokenDefPattern = Pattern.compile("([^\n]+?)[ \\t]*?=[ \\t]*?([0-9]+)");
FileReader fr = new FileReader(fullFile);
BufferedReader br = new BufferedReader(fr);
fr = new FileReader(fullFile);
br = new BufferedReader(fr);
String tokenDef = br.readLine();
int lineNum = 1;
while ( tokenDef!=null ) {
@ -70,7 +73,17 @@ public class TokenVocabParser {
if ( matcher.find() ) {
String tokenID = matcher.group(1);
String tokenTypeS = matcher.group(2);
int tokenType = Integer.valueOf(tokenTypeS);
int tokenType;
try {
tokenType = Integer.valueOf(tokenTypeS);
}
catch (NumberFormatException nfe) {
tool.errMgr.toolError(ErrorType.TOKENS_FILE_SYNTAX_ERROR,
vocabName + CodeGenerator.VOCAB_FILE_EXTENSION,
" bad token type: "+tokenTypeS,
lineNum);
tokenType = Token.INVALID_TOKEN_TYPE;
}
tool.log("grammar", "import "+tokenID+"="+tokenType);
tokens.put(tokenID, tokenType);
maxTokenType = Math.max(maxTokenType,tokenType);
@ -86,22 +99,26 @@ public class TokenVocabParser {
}
tokenDef = br.readLine();
}
br.close();
}
catch (FileNotFoundException fnfe) {
tool.errMgr.toolError(ErrorType.CANNOT_FIND_TOKENS_FILE,
fullFile);
}
catch (IOException ioe) {
tool.errMgr.toolError(ErrorType.ERROR_READING_TOKENS_FILE,
fullFile,
ioe);
}
catch (Exception e) {
tool.errMgr.toolError(ErrorType.ERROR_READING_TOKENS_FILE,
fullFile,
e);
}
finally {
try {
if ( br!=null ) br.close();
}
catch (IOException ioe) {
tool.errMgr.toolError(ErrorType.ERROR_READING_TOKENS_FILE,
fullFile,
ioe);
}
}
return tokens;
}

View File

@ -96,7 +96,7 @@ public class RuleCollector extends GrammarTreeVisitor {
if ( locals!=null ) {
r.locals = ScopeParser.parseTypedArgList(locals, locals.getText(), g.tool.errMgr);
r.locals.type = AttributeDict.DictType.LOCAL;
r.locals.ast = returns;
r.locals.ast = locals;
}
for (GrammarAST a : actions) {

View File

@ -334,22 +334,19 @@ public class GrammarTransformPipeline {
List<GrammarAST> rulesWeMoved = new ArrayList<GrammarAST>();
GrammarASTWithOptions[] rules;
if (combinedRulesRoot.getChildCount() > 0) {
rules = ((List<?>)combinedRulesRoot.getChildren()).toArray(new GrammarASTWithOptions[0]);
rules = combinedRulesRoot.getChildren().toArray(new GrammarASTWithOptions[0]);
}
else {
rules = new GrammarASTWithOptions[0];
}
if ( rules!=null ) {
for (GrammarASTWithOptions r : rules) {
String ruleName = r.getChild(0).getText();
if (Grammar.isTokenName(ruleName)) {
lexerRulesRoot.addChild((Tree)adaptor.dupTree(r));
rulesWeMoved.add(r);
}
for (GrammarASTWithOptions r : rules) {
String ruleName = r.getChild(0).getText();
if (Grammar.isTokenName(ruleName)) {
lexerRulesRoot.addChild((Tree)adaptor.dupTree(r));
rulesWeMoved.add(r);
}
}
int nLexicalRules = rulesWeMoved.size();
for (GrammarAST r : rulesWeMoved) {
combinedRulesRoot.deleteChild( r );
}