Initialize the elements of the decisionToDFA arrays when the array is created
This commit is contained in:
parent
31bf3ceef0
commit
05f667d2e5
|
@ -751,7 +751,7 @@ public abstract class Parser extends Recognizer<Token, ParserATNSimulator> {
|
|||
boolean seenOne = false;
|
||||
for (int d = 0; d < _interp.decisionToDFA.length; d++) {
|
||||
DFA dfa = _interp.decisionToDFA[d];
|
||||
if ( dfa!=null ) {
|
||||
if ( !dfa.states.isEmpty() ) {
|
||||
if ( seenOne ) System.out.println();
|
||||
System.out.println("Decision " + dfa.decision + ":");
|
||||
System.out.print(dfa.toString(getTokenNames()));
|
||||
|
|
|
@ -119,15 +119,6 @@ public class LexerATNSimulator extends ATNSimulator {
|
|||
{
|
||||
super(atn,sharedContextCache);
|
||||
this.decisionToDFA = decisionToDFA;
|
||||
if ( decisionToDFA[Lexer.DEFAULT_MODE]==null ) { // create all mode dfa
|
||||
synchronized (this.decisionToDFA) {
|
||||
if ( decisionToDFA[Lexer.DEFAULT_MODE]==null ) { // create all mode dfa
|
||||
for (int i=0; i<atn.modeToStartState.size(); i++) {
|
||||
this.decisionToDFA[i] = new DFA(atn.modeToStartState.get(i));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
this.recog = recog;
|
||||
}
|
||||
|
||||
|
|
|
@ -325,18 +325,7 @@ public class ParserATNSimulator extends ATNSimulator {
|
|||
_outerContext = outerContext;
|
||||
predict_calls++;
|
||||
DFA dfa = decisionToDFA[decision];
|
||||
// First, synchronize on the array of DFA for this parser
|
||||
// so that we can get the DFA for a decision or create and set one
|
||||
if ( dfa==null ) { // only create one if not there
|
||||
synchronized (decisionToDFA) {
|
||||
dfa = decisionToDFA[decision];
|
||||
if ( dfa==null ) { // the usual double-check
|
||||
DecisionState startState = atn.decisionToState.get(decision);
|
||||
decisionToDFA[decision] = new DFA(startState, decision);
|
||||
dfa = decisionToDFA[decision];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Now we are certain to have a specific decision's DFA
|
||||
// But, do we still need an initial state?
|
||||
if ( dfa.s0==null ) { // recheck
|
||||
|
|
|
@ -838,6 +838,9 @@ public static final ATN _ATN =
|
|||
ATNSimulator.deserialize(_serializedATN.toCharArray());
|
||||
static {
|
||||
_decisionToDFA = new DFA[_ATN.getNumberOfDecisions()];
|
||||
for (int i = 0; i \< _ATN.getNumberOfDecisions(); i++) {
|
||||
_decisionToDFA[i] = new DFA(_ATN.getDecisionState(i), i);
|
||||
}
|
||||
<! org.antlr.v4.tool.DOTGenerator dot = new org.antlr.v4.tool.DOTGenerator(null);!>
|
||||
<! System.out.println(dot.getDOT(_ATN.decisionToState.get(0), ruleNames, false));!>
|
||||
<! System.out.println(dot.getDOT(_ATN.ruleToStartState[2], ruleNames, false));!>
|
||||
|
|
|
@ -53,7 +53,7 @@ public class LexerInterpreter implements TokenSource {
|
|||
/** How to create token objects */
|
||||
protected TokenFactory<?> _factory = CommonTokenFactory.DEFAULT;
|
||||
|
||||
protected final DFA[] _decisionToDFA = new DFA[1];
|
||||
protected final DFA[] _decisionToDFA;
|
||||
protected final PredictionContextCache _sharedContextCache =
|
||||
new PredictionContextCache();
|
||||
|
||||
|
@ -65,6 +65,10 @@ public class LexerInterpreter implements TokenSource {
|
|||
public LexerInterpreter(LexerGrammar g) {
|
||||
Tool antlr = new Tool();
|
||||
antlr.process(g,false);
|
||||
_decisionToDFA = new DFA[g.atn.getNumberOfDecisions()];
|
||||
for (int i = 0; i < _decisionToDFA.length; i++) {
|
||||
_decisionToDFA[i] = new DFA(g.atn.getDecisionState(i), i);
|
||||
}
|
||||
interp = new LexerATNSimulator(g.atn,_decisionToDFA,_sharedContextCache);
|
||||
}
|
||||
|
||||
|
|
|
@ -46,15 +46,20 @@ import org.antlr.v4.tool.Grammar;
|
|||
|
||||
public class ParserInterpreter {
|
||||
public static class DummyParser extends Parser {
|
||||
public final ATN atn;
|
||||
public final DFA[] decisionToDFA; // not shared for interp
|
||||
public final PredictionContextCache sharedContextCache =
|
||||
new PredictionContextCache();
|
||||
|
||||
public Grammar g;
|
||||
public DummyParser(Grammar g, TokenStream input) {
|
||||
public DummyParser(Grammar g, ATN atn, TokenStream input) {
|
||||
super(input);
|
||||
this.g = g;
|
||||
decisionToDFA = new DFA[100];
|
||||
this.atn = atn;
|
||||
this.decisionToDFA = new DFA[atn.getNumberOfDecisions()];
|
||||
for (int i = 0; i < decisionToDFA.length; i++) {
|
||||
decisionToDFA[i] = new DFA(atn.getDecisionState(i), i);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -74,9 +79,7 @@ public class ParserInterpreter {
|
|||
|
||||
@Override
|
||||
public ATN getATN() {
|
||||
return null;
|
||||
}
|
||||
static {
|
||||
return atn;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -92,7 +95,7 @@ public class ParserInterpreter {
|
|||
public ParserInterpreter(@NotNull Grammar g, @NotNull TokenStream input) {
|
||||
Tool antlr = new Tool();
|
||||
antlr.process(g,false);
|
||||
parser = new DummyParser(g, input);
|
||||
parser = new DummyParser(g, g.atn, input);
|
||||
atnSimulator =
|
||||
new ParserATNSimulator(parser, g.atn, parser.decisionToDFA,
|
||||
parser.sharedContextCache);
|
||||
|
|
|
@ -236,7 +236,7 @@ public abstract class BaseTest {
|
|||
ATN atn,
|
||||
CharStream input)
|
||||
{
|
||||
LexerATNSimulator interp = new LexerATNSimulator(atn,new DFA[1],null);
|
||||
LexerATNSimulator interp = new LexerATNSimulator(atn,new DFA[] { new DFA(atn.modeToStartState.get(Lexer.DEFAULT_MODE)) },null);
|
||||
List<String> tokenTypes = new ArrayList<String>();
|
||||
int ttype;
|
||||
boolean hitEOF = false;
|
||||
|
|
|
@ -31,6 +31,7 @@
|
|||
package org.antlr.v4.test;
|
||||
|
||||
import org.antlr.v4.automata.ParserATNFactory;
|
||||
import org.antlr.v4.runtime.Lexer;
|
||||
import org.antlr.v4.runtime.NoViableAltException;
|
||||
import org.antlr.v4.runtime.atn.ATN;
|
||||
import org.antlr.v4.runtime.atn.ATNState;
|
||||
|
@ -381,7 +382,7 @@ public class TestATNInterpreter extends BaseTest {
|
|||
int expected)
|
||||
{
|
||||
ATN lexatn = createATN(lg, true);
|
||||
LexerATNSimulator lexInterp = new LexerATNSimulator(lexatn,new DFA[1],null);
|
||||
LexerATNSimulator lexInterp = new LexerATNSimulator(lexatn,new DFA[] { new DFA(lexatn.modeToStartState.get(Lexer.DEFAULT_MODE)) },null);
|
||||
IntegerList types = getTokenTypesViaATN(inputString, lexInterp);
|
||||
System.out.println(types);
|
||||
|
||||
|
|
|
@ -32,6 +32,7 @@ package org.antlr.v4.test;
|
|||
|
||||
import org.antlr.v4.Tool;
|
||||
import org.antlr.v4.automata.ParserATNFactory;
|
||||
import org.antlr.v4.runtime.Lexer;
|
||||
import org.antlr.v4.runtime.NoViableAltException;
|
||||
import org.antlr.v4.runtime.ParserRuleContext;
|
||||
import org.antlr.v4.runtime.TokenStream;
|
||||
|
@ -460,7 +461,7 @@ public class TestATNParserPrediction extends BaseTest {
|
|||
Tool.internalOption_ShowATNConfigsInDFA = true;
|
||||
ATN lexatn = createATN(lg, true);
|
||||
LexerATNSimulator lexInterp =
|
||||
new LexerATNSimulator(lexatn,new DFA[1],new PredictionContextCache());
|
||||
new LexerATNSimulator(lexatn,new DFA[] { new DFA(lexatn.modeToStartState.get(Lexer.DEFAULT_MODE)) },new PredictionContextCache());
|
||||
IntegerList types = getTokenTypesViaATN(inputString, lexInterp);
|
||||
System.out.println(types);
|
||||
|
||||
|
@ -550,7 +551,7 @@ public class TestATNParserPrediction extends BaseTest {
|
|||
// Tool.internalOption_ShowATNConfigsInDFA = true;
|
||||
ATN lexatn = createATN(lg, true);
|
||||
LexerATNSimulator lexInterp =
|
||||
new LexerATNSimulator(lexatn,new DFA[1], new PredictionContextCache());
|
||||
new LexerATNSimulator(lexatn,new DFA[] { new DFA(lexatn.getDecisionState(Lexer.DEFAULT_MODE)) }, new PredictionContextCache());
|
||||
|
||||
semanticProcess(lg);
|
||||
g.importVocab(lg);
|
||||
|
|
Loading…
Reference in New Issue