forked from jasder/antlr
Merge pull request #724 from sharwell/fix-709
Deterministic output improvements
This commit is contained in:
commit
badc21bf49
|
@ -36,8 +36,8 @@ import org.antlr.v4.tool.Rule;
|
|||
import org.antlr.v4.tool.ast.ActionAST;
|
||||
import org.antlr.v4.tool.ast.AltAST;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.LinkedHashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
@ -52,13 +52,13 @@ public class ListenerFile extends OutputFile {
|
|||
/**
|
||||
* The names of all listener contexts.
|
||||
*/
|
||||
public Set<String> listenerNames = new HashSet<String>();
|
||||
public Set<String> listenerNames = new LinkedHashSet<String>();
|
||||
/**
|
||||
* For listener contexts created for a labeled outer alternative, maps from
|
||||
* a listener context name to the name of the rule which defines the
|
||||
* context.
|
||||
*/
|
||||
public Map<String, String> listenerLabelRuleNames = new HashMap<String, String>();
|
||||
public Map<String, String> listenerLabelRuleNames = new LinkedHashMap<String, String>();
|
||||
|
||||
@ModelElement public Action header;
|
||||
|
||||
|
|
|
@ -36,8 +36,8 @@ import org.antlr.v4.tool.Rule;
|
|||
import org.antlr.v4.tool.ast.ActionAST;
|
||||
import org.antlr.v4.tool.ast.AltAST;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.LinkedHashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
@ -49,13 +49,13 @@ public class VisitorFile extends OutputFile {
|
|||
/**
|
||||
* The names of all rule contexts which may need to be visited.
|
||||
*/
|
||||
public Set<String> visitorNames = new HashSet<String>();
|
||||
public Set<String> visitorNames = new LinkedHashSet<String>();
|
||||
/**
|
||||
* For rule contexts created for a labeled outer alternative, maps from
|
||||
* a listener context name to the name of the rule which defines the
|
||||
* context.
|
||||
*/
|
||||
public Map<String, String> visitorLabelRuleNames = new HashMap<String, String>();
|
||||
public Map<String, String> visitorLabelRuleNames = new LinkedHashMap<String, String>();
|
||||
|
||||
@ModelElement public Action header;
|
||||
|
||||
|
|
|
@ -32,7 +32,7 @@ package org.antlr.v4.misc;
|
|||
import org.antlr.v4.runtime.misc.OrderedHashSet;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
@ -59,7 +59,7 @@ public class Graph<T> {
|
|||
}
|
||||
|
||||
/** Map from node payload to node containing it */
|
||||
protected Map<T,Node<T>> nodes = new HashMap<T,Node<T>>();
|
||||
protected Map<T,Node<T>> nodes = new LinkedHashMap<T,Node<T>>();
|
||||
|
||||
public void addEdge(T a, T b) {
|
||||
//System.out.println("add edge "+a+" to "+b);
|
||||
|
|
|
@ -72,6 +72,7 @@ import java.util.Arrays;
|
|||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.LinkedHashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
@ -1229,7 +1230,7 @@ public class Grammar implements AttributeResolver {
|
|||
}
|
||||
|
||||
public Set<String> getStringLiterals() {
|
||||
final Set<String> strings = new HashSet<String>();
|
||||
final Set<String> strings = new LinkedHashSet<String>();
|
||||
GrammarTreeVisitor collector = new GrammarTreeVisitor() {
|
||||
@Override
|
||||
public void stringRef(TerminalAST ref) {
|
||||
|
|
|
@ -418,6 +418,7 @@ public class GrammarTransformPipeline {
|
|||
// add strings from combined grammar (and imported grammars) into lexer
|
||||
// put them first as they are keywords; must resolve ambigs to these rules
|
||||
// tool.log("grammar", "strings from parser: "+stringLiterals);
|
||||
int insertIndex = 0;
|
||||
nextLit:
|
||||
for (String lit : stringLiterals) {
|
||||
// if lexer already has a rule for literal, continue
|
||||
|
@ -439,9 +440,12 @@ public class GrammarTransformPipeline {
|
|||
CommonToken idToken = new CommonToken(ANTLRParser.TOKEN_REF, rname);
|
||||
litRule.addChild(new TerminalAST(idToken));
|
||||
litRule.addChild(blk);
|
||||
lexerRulesRoot.insertChild(0, litRule); // add first
|
||||
lexerRulesRoot.insertChild(insertIndex, litRule);
|
||||
// lexerRulesRoot.getChildren().add(0, litRule);
|
||||
lexerRulesRoot.freshenParentAndChildIndexes(); // reset indexes and set litRule parent
|
||||
|
||||
// next literal will be added after the one just added
|
||||
insertIndex++;
|
||||
}
|
||||
|
||||
// TODO: take out after stable if slow
|
||||
|
|
|
@ -41,6 +41,7 @@ import org.stringtemplate.v4.misc.MultiMap;
|
|||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
@ -212,7 +213,7 @@ public class Rule implements AttributeResolver {
|
|||
* this label. Unlabeled alternatives are not included in the result.
|
||||
*/
|
||||
public Map<String, List<Pair<Integer, AltAST>>> getAltLabels() {
|
||||
Map<String, List<Pair<Integer, AltAST>>> labels = new HashMap<String, List<Pair<Integer, AltAST>>>();
|
||||
Map<String, List<Pair<Integer, AltAST>>> labels = new LinkedHashMap<String, List<Pair<Integer, AltAST>>>();
|
||||
for (int i=1; i<=numberOfAlts; i++) {
|
||||
GrammarAST altLabel = alt[i].ast.altLabel;
|
||||
if ( altLabel!=null ) {
|
||||
|
|
|
@ -196,8 +196,8 @@ public class TestFullContextParsing extends BaseTest {
|
|||
input, true);
|
||||
expecting =
|
||||
"Decision 1:\n" +
|
||||
"s0-'else'->:s1^=>1\n" +
|
||||
"s0-'}'->:s2=>2\n";
|
||||
"s0-'}'->:s2=>2\n" +
|
||||
"s0-'else'->:s1^=>1\n";
|
||||
assertEquals(expecting, result);
|
||||
assertEquals("line 1:29 reportAttemptingFullContext d=1 (stat), input='else'\n" +
|
||||
"line 1:38 reportAmbiguity d=1 (stat): ambigAlts={1, 2}, input='elsefoo}'\n",
|
||||
|
@ -228,8 +228,8 @@ public class TestFullContextParsing extends BaseTest {
|
|||
input, true);
|
||||
expecting =
|
||||
"Decision 1:\n" +
|
||||
"s0-'else'->:s1^=>1\n" +
|
||||
"s0-'}'->:s2=>2\n";
|
||||
"s0-'}'->:s2=>2\n" +
|
||||
"s0-'else'->:s1^=>1\n";
|
||||
assertEquals(expecting, result);
|
||||
assertEquals("line 1:19 reportAttemptingFullContext d=1 (stat), input='else'\n" +
|
||||
"line 1:19 reportContextSensitivity d=1 (stat), input='else'\n" +
|
||||
|
@ -244,8 +244,8 @@ public class TestFullContextParsing extends BaseTest {
|
|||
input, true);
|
||||
expecting =
|
||||
"Decision 1:\n" +
|
||||
"s0-'else'->:s1^=>1\n" +
|
||||
"s0-'}'->:s2=>2\n";
|
||||
"s0-'}'->:s2=>2\n" +
|
||||
"s0-'else'->:s1^=>1\n";
|
||||
assertEquals(expecting, result);
|
||||
assertEquals("line 1:19 reportAttemptingFullContext d=1 (stat), input='else'\n" +
|
||||
"line 1:19 reportContextSensitivity d=1 (stat), input='else'\n" +
|
||||
|
|
|
@ -201,7 +201,7 @@ public class TestLexerErrors extends BaseTest {
|
|||
String result = execLexer("T.g4", grammar, "TLexer", "x : x", false);
|
||||
String expecting =
|
||||
"[@0,0:0='x',<3>,1:0]\n" +
|
||||
"[@1,2:2=':',<2>,1:2]\n" +
|
||||
"[@1,2:2=':',<1>,1:2]\n" +
|
||||
"[@2,4:4='x',<3>,1:4]\n" +
|
||||
"[@3,5:4='<EOF>',<-1>,1:5]\n";
|
||||
assertEquals(expecting, result);
|
||||
|
|
|
@ -82,7 +82,7 @@ public class TestParseErrors extends BaseTest {
|
|||
"grammar T;\n" +
|
||||
"a : 'a' x='b' {System.out.println(\"conjured=\"+$x);} 'c' ;";
|
||||
String result = execParser("T.g4", grammar, "TParser", "TLexer", "a", "ac", false);
|
||||
String expecting = "conjured=[@-1,-1:-1='<missing 'b'>',<1>,1:1]\n";
|
||||
String expecting = "conjured=[@-1,-1:-1='<missing 'b'>',<2>,1:1]\n";
|
||||
assertEquals(expecting, result);
|
||||
}
|
||||
|
||||
|
@ -101,7 +101,7 @@ public class TestParseErrors extends BaseTest {
|
|||
"grammar T;\n" +
|
||||
"a : 'a' x=('b'|'c') {System.out.println(\"conjured=\"+$x);} 'd' ;";
|
||||
String result = execParser("T.g4", grammar, "TParser", "TLexer", "a", "ad", false);
|
||||
String expecting = "conjured=[@-1,-1:-1='<missing 'b'>',<1>,1:1]\n";
|
||||
String expecting = "conjured=[@-1,-1:-1='<missing 'b'>',<2>,1:1]\n";
|
||||
assertEquals(expecting, result);
|
||||
}
|
||||
|
||||
|
|
|
@ -53,7 +53,7 @@ public class TestTopologicalSort extends BaseTest {
|
|||
g.addEdge("F", "H");
|
||||
g.addEdge("E", "F");
|
||||
|
||||
String expecting = "[H, F, E, D, G, A, B, C]";
|
||||
String expecting = "[H, F, G, E, D, A, B, C]";
|
||||
List<String> nodes = g.sort();
|
||||
String result = nodes.toString();
|
||||
assertEquals(expecting, result);
|
||||
|
@ -95,7 +95,7 @@ public class TestTopologicalSort extends BaseTest {
|
|||
g.addEdge("Def.g4", "Java.tokens"); // walkers feed off generated tokens
|
||||
g.addEdge("Ref.g4", "Java.tokens");
|
||||
|
||||
String expecting = "[MyJava.tokens, Java.g4, Java.tokens, Ref.g4, Def.g4]";
|
||||
String expecting = "[MyJava.tokens, Java.g4, Java.tokens, Def.g4, Ref.g4]";
|
||||
List<String> nodes = g.sort();
|
||||
String result = nodes.toString();
|
||||
assertEquals(expecting, result);
|
||||
|
@ -109,7 +109,7 @@ public class TestTopologicalSort extends BaseTest {
|
|||
g.addEdge("Def.g4", "JavaLexer.tokens");
|
||||
g.addEdge("Ref.g4", "JavaLexer.tokens");
|
||||
|
||||
String expecting = "[JavaLexer.g4, JavaLexer.tokens, JavaParser.g4, Ref.g4, Def.g4]";
|
||||
String expecting = "[JavaLexer.g4, JavaLexer.tokens, JavaParser.g4, Def.g4, Ref.g4]";
|
||||
List<String> nodes = g.sort();
|
||||
String result = nodes.toString();
|
||||
assertEquals(expecting, result);
|
||||
|
|
Loading…
Reference in New Issue