Initialize the elements of the decisionToDFA arrays when the array is created

This commit is contained in:
Sam Harwell 2013-04-12 13:17:53 -05:00
parent 31bf3ceef0
commit 05f667d2e5
9 changed files with 25 additions and 33 deletions

View File

@ -751,7 +751,7 @@ public abstract class Parser extends Recognizer<Token, ParserATNSimulator> {
boolean seenOne = false; boolean seenOne = false;
for (int d = 0; d < _interp.decisionToDFA.length; d++) { for (int d = 0; d < _interp.decisionToDFA.length; d++) {
DFA dfa = _interp.decisionToDFA[d]; DFA dfa = _interp.decisionToDFA[d];
if ( dfa!=null ) { if ( !dfa.states.isEmpty() ) {
if ( seenOne ) System.out.println(); if ( seenOne ) System.out.println();
System.out.println("Decision " + dfa.decision + ":"); System.out.println("Decision " + dfa.decision + ":");
System.out.print(dfa.toString(getTokenNames())); System.out.print(dfa.toString(getTokenNames()));

View File

@ -119,15 +119,6 @@ public class LexerATNSimulator extends ATNSimulator {
{ {
super(atn,sharedContextCache); super(atn,sharedContextCache);
this.decisionToDFA = decisionToDFA; this.decisionToDFA = decisionToDFA;
if ( decisionToDFA[Lexer.DEFAULT_MODE]==null ) { // create all mode dfa
synchronized (this.decisionToDFA) {
if ( decisionToDFA[Lexer.DEFAULT_MODE]==null ) { // create all mode dfa
for (int i=0; i<atn.modeToStartState.size(); i++) {
this.decisionToDFA[i] = new DFA(atn.modeToStartState.get(i));
}
}
}
}
this.recog = recog; this.recog = recog;
} }

View File

@ -325,18 +325,7 @@ public class ParserATNSimulator extends ATNSimulator {
_outerContext = outerContext; _outerContext = outerContext;
predict_calls++; predict_calls++;
DFA dfa = decisionToDFA[decision]; DFA dfa = decisionToDFA[decision];
// First, synchronize on the array of DFA for this parser
// so that we can get the DFA for a decision or create and set one
if ( dfa==null ) { // only create one if not there
synchronized (decisionToDFA) {
dfa = decisionToDFA[decision];
if ( dfa==null ) { // the usual double-check
DecisionState startState = atn.decisionToState.get(decision);
decisionToDFA[decision] = new DFA(startState, decision);
dfa = decisionToDFA[decision];
}
}
}
// Now we are certain to have a specific decision's DFA // Now we are certain to have a specific decision's DFA
// But, do we still need an initial state? // But, do we still need an initial state?
if ( dfa.s0==null ) { // recheck if ( dfa.s0==null ) { // recheck

View File

@ -838,6 +838,9 @@ public static final ATN _ATN =
ATNSimulator.deserialize(_serializedATN.toCharArray()); ATNSimulator.deserialize(_serializedATN.toCharArray());
static { static {
_decisionToDFA = new DFA[_ATN.getNumberOfDecisions()]; _decisionToDFA = new DFA[_ATN.getNumberOfDecisions()];
for (int i = 0; i \< _ATN.getNumberOfDecisions(); i++) {
_decisionToDFA[i] = new DFA(_ATN.getDecisionState(i), i);
}
<! org.antlr.v4.tool.DOTGenerator dot = new org.antlr.v4.tool.DOTGenerator(null);!> <! org.antlr.v4.tool.DOTGenerator dot = new org.antlr.v4.tool.DOTGenerator(null);!>
<! System.out.println(dot.getDOT(_ATN.decisionToState.get(0), ruleNames, false));!> <! System.out.println(dot.getDOT(_ATN.decisionToState.get(0), ruleNames, false));!>
<! System.out.println(dot.getDOT(_ATN.ruleToStartState[2], ruleNames, false));!> <! System.out.println(dot.getDOT(_ATN.ruleToStartState[2], ruleNames, false));!>

View File

@ -53,7 +53,7 @@ public class LexerInterpreter implements TokenSource {
/** How to create token objects */ /** How to create token objects */
protected TokenFactory<?> _factory = CommonTokenFactory.DEFAULT; protected TokenFactory<?> _factory = CommonTokenFactory.DEFAULT;
protected final DFA[] _decisionToDFA = new DFA[1]; protected final DFA[] _decisionToDFA;
protected final PredictionContextCache _sharedContextCache = protected final PredictionContextCache _sharedContextCache =
new PredictionContextCache(); new PredictionContextCache();
@ -65,6 +65,10 @@ public class LexerInterpreter implements TokenSource {
public LexerInterpreter(LexerGrammar g) { public LexerInterpreter(LexerGrammar g) {
Tool antlr = new Tool(); Tool antlr = new Tool();
antlr.process(g,false); antlr.process(g,false);
_decisionToDFA = new DFA[g.atn.getNumberOfDecisions()];
for (int i = 0; i < _decisionToDFA.length; i++) {
_decisionToDFA[i] = new DFA(g.atn.getDecisionState(i), i);
}
interp = new LexerATNSimulator(g.atn,_decisionToDFA,_sharedContextCache); interp = new LexerATNSimulator(g.atn,_decisionToDFA,_sharedContextCache);
} }

View File

@ -46,15 +46,20 @@ import org.antlr.v4.tool.Grammar;
public class ParserInterpreter { public class ParserInterpreter {
public static class DummyParser extends Parser { public static class DummyParser extends Parser {
public final ATN atn;
public final DFA[] decisionToDFA; // not shared for interp public final DFA[] decisionToDFA; // not shared for interp
public final PredictionContextCache sharedContextCache = public final PredictionContextCache sharedContextCache =
new PredictionContextCache(); new PredictionContextCache();
public Grammar g; public Grammar g;
public DummyParser(Grammar g, TokenStream input) { public DummyParser(Grammar g, ATN atn, TokenStream input) {
super(input); super(input);
this.g = g; this.g = g;
decisionToDFA = new DFA[100]; this.atn = atn;
this.decisionToDFA = new DFA[atn.getNumberOfDecisions()];
for (int i = 0; i < decisionToDFA.length; i++) {
decisionToDFA[i] = new DFA(atn.getDecisionState(i), i);
}
} }
@Override @Override
@ -74,9 +79,7 @@ public class ParserInterpreter {
@Override @Override
public ATN getATN() { public ATN getATN() {
return null; return atn;
}
static {
} }
} }
@ -92,7 +95,7 @@ public class ParserInterpreter {
public ParserInterpreter(@NotNull Grammar g, @NotNull TokenStream input) { public ParserInterpreter(@NotNull Grammar g, @NotNull TokenStream input) {
Tool antlr = new Tool(); Tool antlr = new Tool();
antlr.process(g,false); antlr.process(g,false);
parser = new DummyParser(g, input); parser = new DummyParser(g, g.atn, input);
atnSimulator = atnSimulator =
new ParserATNSimulator(parser, g.atn, parser.decisionToDFA, new ParserATNSimulator(parser, g.atn, parser.decisionToDFA,
parser.sharedContextCache); parser.sharedContextCache);

View File

@ -236,7 +236,7 @@ public abstract class BaseTest {
ATN atn, ATN atn,
CharStream input) CharStream input)
{ {
LexerATNSimulator interp = new LexerATNSimulator(atn,new DFA[1],null); LexerATNSimulator interp = new LexerATNSimulator(atn,new DFA[] { new DFA(atn.modeToStartState.get(Lexer.DEFAULT_MODE)) },null);
List<String> tokenTypes = new ArrayList<String>(); List<String> tokenTypes = new ArrayList<String>();
int ttype; int ttype;
boolean hitEOF = false; boolean hitEOF = false;

View File

@ -31,6 +31,7 @@
package org.antlr.v4.test; package org.antlr.v4.test;
import org.antlr.v4.automata.ParserATNFactory; import org.antlr.v4.automata.ParserATNFactory;
import org.antlr.v4.runtime.Lexer;
import org.antlr.v4.runtime.NoViableAltException; import org.antlr.v4.runtime.NoViableAltException;
import org.antlr.v4.runtime.atn.ATN; import org.antlr.v4.runtime.atn.ATN;
import org.antlr.v4.runtime.atn.ATNState; import org.antlr.v4.runtime.atn.ATNState;
@ -381,7 +382,7 @@ public class TestATNInterpreter extends BaseTest {
int expected) int expected)
{ {
ATN lexatn = createATN(lg, true); ATN lexatn = createATN(lg, true);
LexerATNSimulator lexInterp = new LexerATNSimulator(lexatn,new DFA[1],null); LexerATNSimulator lexInterp = new LexerATNSimulator(lexatn,new DFA[] { new DFA(lexatn.modeToStartState.get(Lexer.DEFAULT_MODE)) },null);
IntegerList types = getTokenTypesViaATN(inputString, lexInterp); IntegerList types = getTokenTypesViaATN(inputString, lexInterp);
System.out.println(types); System.out.println(types);

View File

@ -32,6 +32,7 @@ package org.antlr.v4.test;
import org.antlr.v4.Tool; import org.antlr.v4.Tool;
import org.antlr.v4.automata.ParserATNFactory; import org.antlr.v4.automata.ParserATNFactory;
import org.antlr.v4.runtime.Lexer;
import org.antlr.v4.runtime.NoViableAltException; import org.antlr.v4.runtime.NoViableAltException;
import org.antlr.v4.runtime.ParserRuleContext; import org.antlr.v4.runtime.ParserRuleContext;
import org.antlr.v4.runtime.TokenStream; import org.antlr.v4.runtime.TokenStream;
@ -460,7 +461,7 @@ public class TestATNParserPrediction extends BaseTest {
Tool.internalOption_ShowATNConfigsInDFA = true; Tool.internalOption_ShowATNConfigsInDFA = true;
ATN lexatn = createATN(lg, true); ATN lexatn = createATN(lg, true);
LexerATNSimulator lexInterp = LexerATNSimulator lexInterp =
new LexerATNSimulator(lexatn,new DFA[1],new PredictionContextCache()); new LexerATNSimulator(lexatn,new DFA[] { new DFA(lexatn.modeToStartState.get(Lexer.DEFAULT_MODE)) },new PredictionContextCache());
IntegerList types = getTokenTypesViaATN(inputString, lexInterp); IntegerList types = getTokenTypesViaATN(inputString, lexInterp);
System.out.println(types); System.out.println(types);
@ -550,7 +551,7 @@ public class TestATNParserPrediction extends BaseTest {
// Tool.internalOption_ShowATNConfigsInDFA = true; // Tool.internalOption_ShowATNConfigsInDFA = true;
ATN lexatn = createATN(lg, true); ATN lexatn = createATN(lg, true);
LexerATNSimulator lexInterp = LexerATNSimulator lexInterp =
new LexerATNSimulator(lexatn,new DFA[1], new PredictionContextCache()); new LexerATNSimulator(lexatn,new DFA[] { new DFA(lexatn.getDecisionState(Lexer.DEFAULT_MODE)) }, new PredictionContextCache());
semanticProcess(lg); semanticProcess(lg);
g.importVocab(lg); g.importVocab(lg);