forked from jasder/antlr
commit
417c208787
12
.travis.yml
12
.travis.yml
|
@ -1,4 +1,4 @@
|
|||
sudo: false
|
||||
sudo: true
|
||||
language: java
|
||||
script:
|
||||
- mvn install
|
||||
|
@ -6,3 +6,13 @@ jdk:
|
|||
- openjdk6
|
||||
- oraclejdk7
|
||||
- oraclejdk8
|
||||
before_install:
|
||||
- sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv-keys 3FA7E0328081BFF6A14DA29AA6A19B38D3D831EF
|
||||
- sudo add-apt-repository ppa:fkrull/deadsnakes -y
|
||||
- sudo add-apt-repository ppa:rwky/nodejs -y
|
||||
- sudo apt-get update -qq
|
||||
- sudo apt-get install -qq python3.4
|
||||
- sudo apt-get install -qq nodejs
|
||||
- echo "deb http://download.mono-project.com/repo/debian wheezy/snapshots/3.12.1 main" | sudo tee /etc/apt/sources.list.d/mono-xamarin.list
|
||||
- sudo apt-get install -qq mono-complete
|
||||
|
|
@ -2,6 +2,8 @@
|
|||
|
||||
**ANTLR** (ANother Tool for Language Recognition) is a powerful parser generator for reading, processing, executing, or translating structured text or binary files. It's widely used to build languages, tools, and frameworks. From a grammar, ANTLR generates a parser that can build parse trees and also generates a listener interface (or visitor) that makes it easy to respond to the recognition of phrases of interest.
|
||||
|
||||
*Given day-job constraints, my time working on this project is limited so I'll have to focus first on fixing bugs rather than changing/improving the feature set. Likely I'll do it in bursts every few months. Please do not be offended if your bug or pull request does not yield a response! --parrt*
|
||||
|
||||
## Authors and major contributors
|
||||
|
||||
* [Terence Parr](http://www.cs.usfca.edu/~parrt/), parrt@cs.usfca.edu
|
||||
|
|
|
@ -74,4 +74,6 @@ YYYY/MM/DD, github id, Full name, email
|
|||
2015/05/12, Pursuit92, Josh Chase, jcjoshuachase@gmail.com
|
||||
2015/05/20, peturingi, Pétur Ingi Egilsson, petur@petur.eu
|
||||
2015/05/27, jcbrinfo, Jean-Christophe Beaupré, jcbrinfo@users.noreply.github.com
|
||||
2015/06/29, jvanzyl, Jason van Zyl, jason@takari.io
|
||||
2015/06/29, jvanzyl, Jason van Zyl, jason@takari.io
|
||||
2015/08/18, krzkaczor, Krzysztof Kaczor, krzysztof@kaczor.io
|
||||
2015/09/18, worsht, Rajiv Subrahmanyam, rajiv.public@gmail.com
|
||||
|
|
|
@ -41,7 +41,7 @@
|
|||
<dependency>
|
||||
<groupId>org.seleniumhq.selenium</groupId>
|
||||
<artifactId>selenium-java</artifactId>
|
||||
<version>2.44.0</version>
|
||||
<version>2.46.0</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
|
@ -67,15 +67,21 @@
|
|||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-surefire-plugin</artifactId>
|
||||
<version>2.12.4</version>
|
||||
<configuration>
|
||||
<includes>
|
||||
<include>**/csharp/Test*.java</include>
|
||||
<include>**/java/Test*.java</include>
|
||||
<include>**/javascript/node/Test*.java</include>
|
||||
<include>**/python2/Test*.java</include>
|
||||
<include>**/python3/Test*.java</include>
|
||||
</includes>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-jar-plugin</artifactId>
|
||||
<version>2.4</version>
|
||||
<executions>
|
||||
<execution>
|
||||
<goals>
|
||||
|
@ -106,8 +112,8 @@
|
|||
<mainClass>org.antlr.v4.testgen.TestGenerator</mainClass>
|
||||
<arguments>
|
||||
<argument>-root</argument>
|
||||
<argument>${basedir}/resources</argument>
|
||||
<argument>-o</argument>
|
||||
<argument>${basedir}</argument>
|
||||
<argument>-outdir</argument>
|
||||
<argument>${basedir}/test</argument>
|
||||
<argument>-templates</argument>
|
||||
<argument>${basedir}/resources/org/antlr/v4/test/runtime/templates</argument>
|
||||
|
|
|
@ -12,7 +12,7 @@ import org.junit.Test;
|
|||
import static org.junit.Assert.*;
|
||||
|
||||
<if(file.Options.("ImportErrorQueue"))>
|
||||
import org.antlr.v4.test.tool.ErrorQueue;
|
||||
import org.antlr.v4.test.runtime.java.ErrorQueue;
|
||||
<endif>
|
||||
<if(file.Options.("ImportGrammar"))>
|
||||
import org.antlr.v4.tool.Grammar;
|
||||
|
|
|
@ -12,7 +12,7 @@ import org.junit.Test;
|
|||
import static org.junit.Assert.*;
|
||||
|
||||
<if(file.Options.("ImportErrorQueue"))>
|
||||
import org.antlr.v4.test.tool.ErrorQueue;
|
||||
import org.antlr.v4.test.runtime.java.ErrorQueue;
|
||||
<endif>
|
||||
<if(file.Options.("ImportGrammar"))>
|
||||
import org.antlr.v4.tool.Grammar;
|
||||
|
|
|
@ -12,7 +12,7 @@ import org.junit.Test;
|
|||
import static org.junit.Assert.*;
|
||||
|
||||
<if(file.Options.("ImportErrorQueue"))>
|
||||
import org.antlr.v4.test.tool.ErrorQueue;
|
||||
import org.antlr.v4.test.runtime.java.ErrorQueue;
|
||||
<endif>
|
||||
<if(file.Options.("ImportGrammar"))>
|
||||
import org.antlr.v4.tool.Grammar;
|
||||
|
|
|
@ -12,7 +12,7 @@ import org.junit.Test;
|
|||
import static org.junit.Assert.*;
|
||||
|
||||
<if(file.Options.("ImportErrorQueue"))>
|
||||
import org.antlr.v4.test.tool.ErrorQueue;
|
||||
import org.antlr.v4.test.runtime.java.ErrorQueue;
|
||||
<endif>
|
||||
<if(file.Options.("ImportGrammar"))>
|
||||
import org.antlr.v4.tool.Grammar;
|
||||
|
|
|
@ -45,18 +45,8 @@ import org.stringtemplate.v4.STGroupFile;
|
|||
import org.stringtemplate.v4.gui.STViz;
|
||||
|
||||
public class TestGenerator {
|
||||
public final static String[] targets = {"CSharp", "Java", "Python2", "Python3", "JavaScript"};
|
||||
|
||||
// This project uses UTF-8, but the plugin might be used in another project
|
||||
// which is not. Always load templates with UTF-8, but write using the
|
||||
// specified encoding.
|
||||
protected final String encoding;
|
||||
|
||||
protected final File runtimeTemplates;
|
||||
|
||||
protected final File outputDirectory;
|
||||
|
||||
protected final boolean visualize;
|
||||
|
||||
public final static String[] targets = {"CSharp", "Java", "Python2", "Python3", "JavaScript/Node", "JavaScript/Safari", "JavaScript/Firefox", "JavaScript/Explorer", "JavaScript/Chrome"};
|
||||
|
||||
/** Execute from antlr4 root dir:
|
||||
* *
|
||||
|
@ -70,7 +60,8 @@ public class TestGenerator {
|
|||
String rootDir = null;
|
||||
String outDir = null;
|
||||
String templatesRoot = null;
|
||||
String targetSpecificTemplateFile = null;
|
||||
String target = "ALL";
|
||||
boolean browsers = false;
|
||||
boolean viz = false;
|
||||
|
||||
int i = 0;
|
||||
|
@ -80,14 +71,20 @@ public class TestGenerator {
|
|||
i++;
|
||||
rootDir = args[i];
|
||||
}
|
||||
else if (arg.startsWith("-o")) {
|
||||
else if (arg.startsWith("-outdir")) {
|
||||
i++;
|
||||
outDir = args[i];
|
||||
}
|
||||
else if (arg.startsWith("-templates")) {
|
||||
i++;
|
||||
targetSpecificTemplateFile = args[i];
|
||||
templatesRoot = targetSpecificTemplateFile;
|
||||
templatesRoot = args[i];
|
||||
}
|
||||
else if (arg.startsWith("-target")) {
|
||||
i++;
|
||||
target = args[i];
|
||||
}
|
||||
else if (arg.startsWith("-browsers")) {
|
||||
browsers = true;
|
||||
}
|
||||
else if (arg.startsWith("-viz")) {
|
||||
viz = true;
|
||||
|
@ -97,107 +94,102 @@ public class TestGenerator {
|
|||
|
||||
System.out.println("rootDir = " + rootDir);
|
||||
System.out.println("outputDir = " + outDir);
|
||||
System.out.println("templates = " + targetSpecificTemplateFile);
|
||||
System.out.println("templates = " + templatesRoot);
|
||||
System.out.println("target = " + target);
|
||||
System.out.println("browsers = " + browsers);
|
||||
System.out.println("viz = " + viz);
|
||||
|
||||
if ( rootDir!=null) {
|
||||
genAllTargets(outDir, rootDir, templatesRoot, viz);
|
||||
if(rootDir==null) {
|
||||
System.out.println("rootDir is mandatory!" + rootDir);
|
||||
return;
|
||||
}
|
||||
if(outDir==null)
|
||||
outDir = rootDir + "/test";
|
||||
|
||||
if(templatesRoot==null)
|
||||
templatesRoot = rootDir + "/resources/org/antlr/v4/test/runtime/templates";
|
||||
|
||||
if ( outDir==null || targetSpecificTemplateFile==null ) {
|
||||
System.err.println("You must give an output root dir and templates file");
|
||||
return;
|
||||
}
|
||||
|
||||
genTarget(outDir, targetSpecificTemplateFile, templatesRoot, viz);
|
||||
if ( "ALL".equalsIgnoreCase(target)) {
|
||||
genAllTargets(rootDir, outDir, templatesRoot, browsers, viz);
|
||||
} else
|
||||
genTarget(rootDir, outDir, target, templatesRoot, viz);
|
||||
}
|
||||
|
||||
public static void genAllTargets(String outDirRoot, final String rootDir, final String templatesRoot, boolean viz) {
|
||||
for (String target : targets) {
|
||||
String templatesPackage = rootDir + "/org/antlr/v4/test/runtime/" + target.toLowerCase();
|
||||
String templates = templatesPackage + "/" + target + ".test.stg";
|
||||
if ( target.equals("JavaScript") ) {
|
||||
templates = templatesPackage+"/node/Node.test.stg";
|
||||
}
|
||||
String outDir = rootDir + "/runtime-testsuite/test";
|
||||
if ( outDirRoot!=null ) {
|
||||
outDir = outDirRoot;
|
||||
}
|
||||
genTarget(outDir, templates, templatesRoot, viz);
|
||||
public static void genAllTargets(String rootDir, String outDirRoot, String templatesRoot, boolean browsers, boolean viz) {
|
||||
for(String target : targets) {
|
||||
if(!browsers && "JavaScript/Safari".equals(target))
|
||||
return;
|
||||
genTarget(rootDir, outDirRoot, target, templatesRoot, viz);
|
||||
}
|
||||
}
|
||||
|
||||
public static void genTarget(final String outDir, final String targetSpecificTemplateFile, final String templates, boolean viz) {
|
||||
|
||||
public static void genTarget(final String rootDir, final String outDir, final String fullTarget, final String templatesDir, boolean viz) {
|
||||
String[] parts = fullTarget.split("/");
|
||||
String target = parts[0];
|
||||
String subTarget = parts.length>1 ? parts[1] : target;
|
||||
String targetPackage = rootDir + "/resources/org/antlr/v4/test/runtime/" + fullTarget.toLowerCase();
|
||||
String targetTemplate = targetPackage + "/" + subTarget + ".test.stg";
|
||||
TestGenerator gen = new TestGenerator("UTF-8",
|
||||
new File(targetSpecificTemplateFile),
|
||||
new File(outDir),
|
||||
viz)
|
||||
{
|
||||
@Override
|
||||
protected void info(String message) {
|
||||
System.err.println(message);
|
||||
}
|
||||
@Override
|
||||
public File getOutputDir(String templateFolder) {
|
||||
String targetName = getTargetNameFromTemplatesFileName();
|
||||
// compute package
|
||||
String templatePath = runtimeTemplates.getPath();
|
||||
int packageStart = templatePath.indexOf("org/antlr/v4/test/runtime");
|
||||
int packageEnd = templatePath.indexOf("/" + targetName + ".test.stg");
|
||||
String packageDir = templatePath.substring(packageStart, packageEnd);
|
||||
return new File(outputDirectory, packageDir);
|
||||
}
|
||||
@Override
|
||||
public String getTestTemplatesResourceDir() {
|
||||
return templates;
|
||||
//return "resources/org/antlr/v4/test/runtime/templates";
|
||||
}
|
||||
};
|
||||
|
||||
// Somehow the templates directory is getting picked up so let's block that
|
||||
if(!targetSpecificTemplateFile.endsWith(".stg")) {
|
||||
return;
|
||||
}
|
||||
|
||||
gen.info("Generating target " + gen.getTargetNameFromTemplatesFileName());
|
||||
fullTarget,
|
||||
rootDir,
|
||||
new File(outDir),
|
||||
new File(templatesDir),
|
||||
new File(targetTemplate),
|
||||
viz);
|
||||
gen.info("Generating target " + gen.getTargetName());
|
||||
gen.execute();
|
||||
}
|
||||
|
||||
public TestGenerator(String encoding, File runtimeTemplates, File outputDirectory, boolean visualize) {
|
||||
// This project uses UTF-8, but the plugin might be used in another project
|
||||
// which is not. Always load templates with UTF-8, but write using the
|
||||
// specified encoding.
|
||||
protected final String encoding;
|
||||
protected final String targetName;
|
||||
protected final String rootDir;
|
||||
protected final File outputDir;
|
||||
protected final File testTemplates;
|
||||
protected final File runtimeTemplate;
|
||||
protected final boolean visualize;
|
||||
|
||||
public TestGenerator(String encoding, String targetName, String rootDir, File outputDir, File testTemplates, File runtimeTemplate, boolean visualize) {
|
||||
this.encoding = encoding;
|
||||
this.runtimeTemplates = runtimeTemplates;
|
||||
this.outputDirectory = outputDirectory;
|
||||
this.targetName = targetName;
|
||||
this.rootDir = rootDir;
|
||||
this.outputDir = outputDir;
|
||||
this.testTemplates = testTemplates;
|
||||
this.runtimeTemplate = runtimeTemplate;
|
||||
this.visualize = visualize;
|
||||
}
|
||||
|
||||
private String getTargetName() {
|
||||
return targetName;
|
||||
}
|
||||
|
||||
|
||||
public void execute() {
|
||||
STGroup targetGroup = new STGroupFile(runtimeTemplates.getPath());
|
||||
STGroup targetGroup = new STGroupFile(runtimeTemplate.getPath());
|
||||
targetGroup.registerModelAdaptor(STGroup.class, new STGroupModelAdaptor());
|
||||
targetGroup.defineDictionary("escape", new JavaEscapeStringMap());
|
||||
targetGroup.defineDictionary("lines", new LinesStringMap());
|
||||
targetGroup.defineDictionary("strlen", new StrlenStringMap());
|
||||
|
||||
String rootFolder = getTestTemplatesResourceDir();
|
||||
generateCodeForFoldersInIndex(targetGroup, rootFolder);
|
||||
generateCodeForFoldersInIndex(targetGroup);
|
||||
}
|
||||
|
||||
protected void generateCodeForFoldersInIndex(STGroup targetGroup, String rootFolder) {
|
||||
STGroup index = new STGroupFile(rootFolder+"/Index.stg");
|
||||
protected void generateCodeForFoldersInIndex(STGroup targetGroup) {
|
||||
File targetFolder = getOutputDir(testTemplates+"");
|
||||
STGroup index = new STGroupFile(testTemplates+"/Index.stg");
|
||||
index.load(); // make sure the index group is loaded since we call rawGetDictionary
|
||||
|
||||
Map<String, Object> folders = index.rawGetDictionary("TestFolders");
|
||||
if (folders != null) {
|
||||
for (String key : folders.keySet()) {
|
||||
final String testdir = rootFolder + "/" + key;
|
||||
STGroup testIndex = new STGroupFile(testdir + "/Index.stg");
|
||||
final String testDir = testTemplates + "/" + key;
|
||||
STGroup testIndex = new STGroupFile(testDir + "/Index.stg");
|
||||
testIndex.load();
|
||||
Map<String, Object> templateNames = testIndex.rawGetDictionary("TestTemplates");
|
||||
if ( templateNames != null && !templateNames.isEmpty() ) {
|
||||
final ArrayList<String> sortedTemplateNames = new ArrayList<String>(templateNames.keySet());
|
||||
Collections.sort(sortedTemplateNames);
|
||||
generateTestFile(testIndex, targetGroup,
|
||||
testdir,
|
||||
sortedTemplateNames);
|
||||
generateTestFile(testIndex, targetGroup, testDir, sortedTemplateNames, targetFolder);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -205,16 +197,16 @@ public class TestGenerator {
|
|||
|
||||
protected void generateTestFile(STGroup index,
|
||||
STGroup targetGroup,
|
||||
String testdir,
|
||||
Collection<String> testTemplates)
|
||||
String testDir,
|
||||
Collection<String> testTemplates,
|
||||
File targetFolder)
|
||||
{
|
||||
File targetFolder = getOutputDir(testdir);
|
||||
String testName = testdir.substring(testdir.lastIndexOf('/') + 1);
|
||||
String testName = testDir.substring(testDir.lastIndexOf('/') + 1);
|
||||
File targetFile = new File(targetFolder, "Test" + testName + ".java");
|
||||
info("Generating file "+targetFile.getAbsolutePath());
|
||||
List<ST> templates = new ArrayList<ST>();
|
||||
for (String template : testTemplates) {
|
||||
STGroup testGroup = new STGroupFile(testdir + "/" + template + STGroup.GROUP_FILE_EXTENSION);
|
||||
STGroup testGroup = new STGroupFile(testDir + "/" + template + STGroup.GROUP_FILE_EXTENSION);
|
||||
importLanguageTemplates(testGroup, targetGroup);
|
||||
ST testType = testGroup.getInstanceOf("TestType");
|
||||
if (testType == null) {
|
||||
|
@ -287,19 +279,18 @@ public class TestGenerator {
|
|||
}
|
||||
}
|
||||
|
||||
public String getTestTemplatesResourceDir() { return "org/antlr/v4/test/runtime/templates"; }
|
||||
|
||||
public String getTargetNameFromTemplatesFileName() {
|
||||
// runtimeTemplates is like ~/antlr/code/antlr4/runtime-testsuite/resources/org/antlr/v4/test/runtime/java/Java.test.stg
|
||||
int targetEnd = runtimeTemplates.getName().indexOf(".test.stg");
|
||||
return runtimeTemplates.getName().substring(0, targetEnd);
|
||||
}
|
||||
|
||||
public File getOutputDir(String templateFolder) {
|
||||
return new File(outputDirectory, templateFolder.substring(0, templateFolder.indexOf("/templates")));
|
||||
if(templateFolder.startsWith(rootDir))
|
||||
templateFolder = templateFolder.substring(rootDir.length());
|
||||
if(templateFolder.startsWith("/resources"))
|
||||
templateFolder = templateFolder.substring("/resources".length());
|
||||
templateFolder = templateFolder.substring(0, templateFolder.indexOf("/templates"));
|
||||
templateFolder += "/" + targetName.toLowerCase();
|
||||
return new File(outputDir, templateFolder);
|
||||
}
|
||||
|
||||
protected void info(String message) {
|
||||
// System.out.println("INFO: " + message);
|
||||
}
|
||||
|
||||
protected void warn(String message) {
|
||||
|
|
|
@ -289,7 +289,10 @@ public abstract class BaseTest {
|
|||
writeFile(tmpdir, "input", input);
|
||||
writeLexerTestFile(lexerName, showDFA);
|
||||
addSourceFiles("Test.cs");
|
||||
compile();
|
||||
if(!compile()) {
|
||||
System.err.println("Failed to compile!");
|
||||
return stderrDuringParse;
|
||||
}
|
||||
String output = execTest();
|
||||
if ( stderrDuringParse!=null && stderrDuringParse.length()>0 ) {
|
||||
System.err.println(stderrDuringParse);
|
||||
|
@ -416,8 +419,10 @@ public abstract class BaseTest {
|
|||
"/p:Configuration=Release",
|
||||
getTestProjectFile().getAbsolutePath()
|
||||
};
|
||||
System.err.println("Starting build "+Utils.join(args, " "));
|
||||
Process process = Runtime.getRuntime().exec(args, null, new File(tmpdir));
|
||||
System.err.println("Starting build "+ Utils.join(args, " "));
|
||||
ProcessBuilder pb = new ProcessBuilder(args);
|
||||
pb.directory(new File(tmpdir));
|
||||
Process process = pb.start();
|
||||
StreamVacuum stdoutVacuum = new StreamVacuum(process.getInputStream());
|
||||
StreamVacuum stderrVacuum = new StreamVacuum(process.getErrorStream());
|
||||
stdoutVacuum.start();
|
||||
|
@ -425,11 +430,13 @@ public abstract class BaseTest {
|
|||
process.waitFor();
|
||||
stdoutVacuum.join();
|
||||
stderrVacuum.join();
|
||||
if ( stderrVacuum.toString().length()>0 ) {
|
||||
this.stderrDuringParse = stderrVacuum.toString();
|
||||
System.err.println("buildProject stderrVacuum: "+ stderrVacuum);
|
||||
// xbuild sends errors to output, so check exit code
|
||||
boolean success = process.exitValue()==0;
|
||||
if ( !success ) {
|
||||
this.stderrDuringParse = stdoutVacuum.toString();
|
||||
System.err.println("buildProject stderrVacuum: "+ this.stderrDuringParse);
|
||||
}
|
||||
return process.exitValue()==0;
|
||||
return success;
|
||||
}
|
||||
|
||||
private String locateMSBuild() {
|
||||
|
@ -472,7 +479,7 @@ public abstract class BaseTest {
|
|||
String runtimeName = isWindows() ? "Antlr4.Runtime.vs2013.csproj" : "Antlr4.Runtime.mono.csproj";
|
||||
final URL runtimeProj = loader.getResource("CSharp/runtime/CSharp/Antlr4.Runtime/"+runtimeName);
|
||||
if ( runtimeProj==null ) {
|
||||
throw new RuntimeException("C# runtime project file not found at:" + runtimeProj.getPath());
|
||||
throw new RuntimeException("C# runtime project file not found!");
|
||||
}
|
||||
String runtimeProjPath = runtimeProj.getPath();
|
||||
XPathExpression exp = XPathFactory.newInstance().newXPath()
|
||||
|
@ -529,12 +536,12 @@ public abstract class BaseTest {
|
|||
new String[] { "mono", exec, new File(tmpdir, "input").getAbsolutePath() };
|
||||
ProcessBuilder pb = new ProcessBuilder(args);
|
||||
pb.directory(new File(tmpdir));
|
||||
Process p = pb.start();
|
||||
StreamVacuum stdoutVacuum = new StreamVacuum(p.getInputStream());
|
||||
StreamVacuum stderrVacuum = new StreamVacuum(p.getErrorStream());
|
||||
Process process = pb.start();
|
||||
StreamVacuum stdoutVacuum = new StreamVacuum(process.getInputStream());
|
||||
StreamVacuum stderrVacuum = new StreamVacuum(process.getErrorStream());
|
||||
stdoutVacuum.start();
|
||||
stderrVacuum.start();
|
||||
p.waitFor();
|
||||
process.waitFor();
|
||||
stdoutVacuum.join();
|
||||
stderrVacuum.join();
|
||||
String output = stdoutVacuum.toString();
|
||||
|
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
|
@ -41,7 +41,7 @@ public abstract class BasePython3Test extends BasePythonTest {
|
|||
|
||||
@Override
|
||||
protected String getPythonExecutable() {
|
||||
return "python3.4";
|
||||
return "python3.4";
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -34,7 +34,7 @@
|
|||
// when you construct the object.
|
||||
//
|
||||
var InputStream = require('./InputStream').InputStream;
|
||||
var isNodeJs = typeof window === 'undefined';
|
||||
var isNodeJs = typeof window === 'undefined' && typeof importScripts === 'undefined';
|
||||
var fs = isNodeJs ? require("fs") : null;
|
||||
|
||||
function FileStream(fileName) {
|
||||
|
|
|
@ -214,7 +214,7 @@ ParserRuleContext.prototype.getSourceInterval = function() {
|
|||
if( this.start === null || this.stop === null) {
|
||||
return INVALID_INTERVAL;
|
||||
} else {
|
||||
return Interval(this.start.tokenIndex, this.stop.tokenIndex);
|
||||
return new Interval(this.start.tokenIndex, this.stop.tokenIndex);
|
||||
}
|
||||
};
|
||||
|
||||
|
|
|
@ -191,6 +191,13 @@ function escapeWhitespace(s, escapeSpaces) {
|
|||
return s;
|
||||
}
|
||||
|
||||
exports.isArray = function (entity) {
|
||||
return Object.prototype.toString.call( entity ) === '[object Array]'
|
||||
};
|
||||
|
||||
exports.titleCase = function(str) {
|
||||
return str.replace(/\w\S*/g, function(txt){return txt.charAt(0).toUpperCase() + txt.substr(1);});
|
||||
};
|
||||
|
||||
exports.Set = Set;
|
||||
exports.BitSet = BitSet;
|
||||
|
|
|
@ -35,6 +35,8 @@
|
|||
var Token = require('./../Token').Token;
|
||||
var Interval = require('./../IntervalSet').Interval;
|
||||
var INVALID_INTERVAL = new Interval(-1, -2);
|
||||
var Utils = require('../Utils.js');
|
||||
|
||||
|
||||
function Tree() {
|
||||
return this;
|
||||
|
@ -84,6 +86,26 @@ function ParseTreeVisitor() {
|
|||
return this;
|
||||
}
|
||||
|
||||
ParseTreeVisitor.prototype.visit = function(ctx) {
|
||||
if (Utils.isArray(ctx)) {
|
||||
var self = this;
|
||||
return ctx.map(function(child) { return visitAtom(self, child)});
|
||||
} else {
|
||||
return visitAtom(this, ctx);
|
||||
}
|
||||
};
|
||||
|
||||
var visitAtom = function(visitor, ctx) {
|
||||
if (ctx.parser === undefined) { //is terminal
|
||||
return;
|
||||
}
|
||||
|
||||
var name = ctx.parser.ruleNames[ctx.ruleIndex];
|
||||
var funcName = "visit" + Utils.titleCase(name);
|
||||
|
||||
return visitor[funcName](ctx);
|
||||
};
|
||||
|
||||
function ParseTreeListener() {
|
||||
return this;
|
||||
}
|
||||
|
|
|
@ -0,0 +1,258 @@
|
|||
//
|
||||
// This file is part of Honey Require
|
||||
//
|
||||
// Copyright (c) 2015 Torben Haase
|
||||
//
|
||||
// Honey Require is free software: you can redistribute it and/or modify it
|
||||
// under the terms of the MIT License (MIT).
|
||||
//
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
//
|
||||
// You should have received a copy of the MIT License along with Honey Require.
|
||||
// If not, see <https://opensource.org/licenses/MIT>.
|
||||
//
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
// NOTE The load parameter points to the function, which prepares the
|
||||
// environment for each module and runs its code. Scroll down to the end of
|
||||
// the file to see the function definition.
|
||||
(function(load) { 'use strict';
|
||||
|
||||
// NOTE Mozilla still sets the wrong fileName property for errors that occur
|
||||
// inside an eval call (even with sourceURL). However, the stack
|
||||
// contains the correct source, so it can be used to re-threw the error
|
||||
// with the correct fileName property.
|
||||
// WARN Re-throwing an error object will mess up the stack trace and the
|
||||
// column number.
|
||||
if (typeof (new Error()).fileName == "string") {
|
||||
self.addEventListener("error", function(evt) {
|
||||
if (evt.error instanceof Error) {
|
||||
if (pwd[0]) {
|
||||
evt.preventDefault();
|
||||
throw new evt.error.constructor(evt.error.message, pwd[0].uri, evt.error.lineNumber);
|
||||
}
|
||||
else {
|
||||
var m = evt.error.stack.match(/^[^\n@]*@([^\n]+):\d+:\d+/);
|
||||
if (m === null) {
|
||||
console.warn("Honey: unable to read file name from stack");
|
||||
}
|
||||
else if (evt.error.fileName != m[1]) {
|
||||
evt.preventDefault();
|
||||
throw new evt.error.constructor(evt.error.message, m[1], evt.error.lineNumber);
|
||||
}
|
||||
}
|
||||
}
|
||||
}, false);
|
||||
}
|
||||
|
||||
// INFO Current module descriptors
|
||||
// pwd[0] contains the descriptor of the currently loaded module,
|
||||
// pwd[1] contains the descriptor its parent module and so on.
|
||||
|
||||
var pwd = Array();
|
||||
|
||||
// INFO Path parser
|
||||
// Older browsers don't support the URL interface, therefore we use an
|
||||
// anchor element as parser in that case. Thes breaks web worker support,
|
||||
// but we don't care since these browsers also don't support web workers.
|
||||
|
||||
var parser = URL ? new URL(location.href) : document.createElement('A');
|
||||
|
||||
// INFO Module cache
|
||||
// Contains getter functions for the exports objects of all the loaded
|
||||
// modules. The getter for the module 'mymod' is name '$name' to prevent
|
||||
// collisions with predefined object properties (see note below).
|
||||
// As long as a module has not been loaded the getter is either undefined
|
||||
// or contains the module code as a function (in case the module has been
|
||||
// pre-loaded in a bundle).
|
||||
// WARN IE8 supports defineProperty only for DOM objects, therfore we use a
|
||||
// HTMLDivElement as cache in that case. This breaks web worker support,
|
||||
// but we don't care since IE8 has no web workers at all.
|
||||
|
||||
try {
|
||||
var cache = new Object();
|
||||
Object.defineProperty(cache, "foo", {'value':"bar",'configurable':true});
|
||||
delete cache.foo;
|
||||
}
|
||||
catch (e) {
|
||||
console.warn("Honey: falling back to DOM workaround for defineProperty ("+e+")");
|
||||
cache = document.createElement('DIV');
|
||||
}
|
||||
|
||||
// INFO Send lock
|
||||
// Sending the request causes the event loop to continue. Therefore
|
||||
// pending AJAX load events for the same url might be executed before
|
||||
// the synchronous onLoad is called. This should be no problem, but in
|
||||
// Chrome the responseText of the sneaked in load events will be empty.
|
||||
// Therefore we have to lock the loading while executing send().
|
||||
|
||||
var lock = new Object();
|
||||
|
||||
// INFO Honey options
|
||||
// The values can be set by defining a object called Honey. The
|
||||
// Honey object has to be defined before this script here is loaded
|
||||
// and changing the values in the Honey object will have no effect
|
||||
// afterwards!
|
||||
|
||||
var requirePath = self.Honey&&self.Honey.requirePath!==undefined ? self.Honey.requirePath.slice(0) : ['./'];
|
||||
var requireCompiler = self.Honey&&self.Honey.requireCompiler!==undefined ? self.Honey.requireCompiler : null;
|
||||
|
||||
// NOTE Parse module root paths
|
||||
var base = [location.origin, location.href.substr(0, location.href.lastIndexOf("/")+1)];
|
||||
for (var i=0; i<requirePath.length; i++) {
|
||||
parser.href = (requirePath[i][0]=="."?base[1]:base[0])+requirePath[i];
|
||||
requirePath[i] = parser.href;
|
||||
}
|
||||
|
||||
// NOTE Add preloaded modules to cache
|
||||
for (var id in (self.Honey && self.Honey.requirePreloaded))
|
||||
cache['$'+resolve(id).id] = self.Honey.requirePreloaded[id].toString();
|
||||
|
||||
// NOTE Add module overrides to cache
|
||||
for (var id in (self.Honey && self.Honey.requireOverrides))
|
||||
cache['$'+resolve(id).id] = self.Honey.requireOverrides[id];
|
||||
|
||||
// INFO Module getter
|
||||
// Takes a module identifier, resolves it and gets the module code via an
|
||||
// AJAX request from the module URI. If this was successful the code and
|
||||
// some environment variables are passed to the load function. The return
|
||||
// value is the module's `exports` object. If the cache already
|
||||
// contains an object for the module id, this object is returned directly.
|
||||
// NOTE If a callback function has been passed, the AJAX request is asynchronous
|
||||
// and the mpdule exports are passed to the callback function after the
|
||||
// module has been loaded.
|
||||
|
||||
function require(identifier, callback, compiler) {
|
||||
if (identifier instanceof Array) {
|
||||
var modules = new Array();
|
||||
var modcount = identifier.length;
|
||||
for (var index = 0; index < identifier.length; index++) {
|
||||
(function(id, i) {
|
||||
modules.push(require(id, callback&&function(mod) {
|
||||
modules[i] = mod;
|
||||
(--modcount==0) && callback(modules);
|
||||
}, compiler));
|
||||
})(identifier[index], index);
|
||||
}
|
||||
return modules;
|
||||
}
|
||||
|
||||
compiler = compiler!==undefined ? compiler : requireCompiler;
|
||||
var descriptor = resolve(identifier);
|
||||
var cacheid = '$'+descriptor.id;
|
||||
|
||||
if (cache[cacheid]) {
|
||||
if (typeof cache[cacheid] === 'string')
|
||||
load(descriptor, cache, pwd, cache[cacheid]);
|
||||
// NOTE The callback should always be called asynchronously to ensure
|
||||
// that a cached call won't differ from an uncached one.
|
||||
callback && setTimeout(function(){callback(cache[cacheid])}, 0);
|
||||
return cache[cacheid];
|
||||
}
|
||||
|
||||
var request = new XMLHttpRequest();
|
||||
|
||||
// NOTE IE8 doesn't support the onload event, therefore we use
|
||||
// onreadystatechange as a fallback here. However, onreadystatechange
|
||||
// shouldn't be used for all browsers, since at least mobile Safari
|
||||
// seems to have an issue where onreadystatechange is called twice for
|
||||
// readyState 4.
|
||||
callback && (request[request.onload===null?'onload':'onreadystatechange'] = onLoad);
|
||||
request.open('GET', descriptor.uri, !!callback);
|
||||
lock[cacheid] = lock[cacheid]++||1;
|
||||
request.send();
|
||||
lock[cacheid]--;
|
||||
!callback && onLoad();
|
||||
return cache[cacheid];
|
||||
|
||||
function onLoad() {
|
||||
if (request.readyState != 4)
|
||||
return;
|
||||
if (request.status != 200)
|
||||
throw new Error("Honey: unable to load "+descriptor.id+" ("+request.status+" "+request.statusText+")");
|
||||
if (lock[cacheid]) {
|
||||
console.warn("Honey: module locked: "+descriptor.id);
|
||||
callback && setTimeout(onLoad, 0);
|
||||
return;
|
||||
}
|
||||
if (!cache[cacheid]) {
|
||||
var source = compiler ? compiler(request.responseText) : request.responseText;
|
||||
load(descriptor, cache, pwd, 'function(){\n'+source+'\n}');
|
||||
}
|
||||
callback && callback(cache[cacheid]);
|
||||
}
|
||||
}
|
||||
|
||||
// INFO Module resolver
|
||||
// Takes a module identifier and resolves it to a module id and URI. Both
|
||||
// values are returned as a module descriptor, which can be passed to
|
||||
// `fetch` to load a module.
|
||||
|
||||
function resolve(identifier) {
|
||||
// NOTE Matches [1]:[..]/[path/to/][file][.js]
|
||||
var m = identifier.match(/^(?:([^:\/]+):)?(\.\.?)?\/?((?:.*\/)?)([^\.]+)?(\..*)?$/);
|
||||
// NOTE Matches [1]:[/path/to/]file.js
|
||||
var p = (pwd[0]?pwd[0].id:"").match(/^(?:([^:\/]+):)?(.*\/|)[^\/]*$/);
|
||||
var root = m[2] ? requirePath[p[1]?parseInt(p[1]):0] : requirePath[m[1]?parseInt(m[1]):0];
|
||||
parser.href = (m[2]?root+p[2]+m[2]+'/':root)+m[3]+(m[4]?m[4]:'index');
|
||||
var uri = parser.href+(m[5]?m[5]:'.js');
|
||||
if (uri.substr(0,root.length) != root)
|
||||
throw new Error("Honey: relative identifier outside of module root");
|
||||
var id = (m[1]?m[1]+":":"0:")+parser.href.substr(root.length);
|
||||
return {'id':id,'uri':uri};
|
||||
}
|
||||
|
||||
// INFO Exporting require to global scope
|
||||
|
||||
if (self.require !== undefined)
|
||||
throw new Error("Honey: '\'require\' already defined in global scope");
|
||||
|
||||
try {
|
||||
Object.defineProperty(self, 'require', {'value':require});
|
||||
Object.defineProperty(self.require, 'resolve', {'value':resolve});
|
||||
Object.defineProperty(self.require, 'path', {'get':function(){return requirePath.slice(0);}});
|
||||
}
|
||||
catch (e) {
|
||||
// NOTE IE8 can't use defineProperty on non-DOM objects, so we have to fall
|
||||
// back to unsave property assignments in this case.
|
||||
self.require = require;
|
||||
self.require.resolve = resolve;
|
||||
self.require.path = requirePath.slice(0);
|
||||
}
|
||||
|
||||
})(
|
||||
|
||||
// INFO Module loader
|
||||
// Takes the module descriptor, the global variables and the module code,
|
||||
// sets up the module envirinment, defines the module getter in the cache
|
||||
// and evaluates the module code. If module is a bundle the code of the
|
||||
// pre-loaded modules will be stored in the cache afterwards.
|
||||
// NOTE This functions is defined as an anonymous function, which is passed as
|
||||
// a parameter to the closure above to provide a clean environment (only
|
||||
// global variables, module and exports) for the loaded module. This is
|
||||
// also the reason why `source`, `pwd` & `cache` are not named parameters.
|
||||
// NOTE If we would strict use mode here, the evaluated code would be forced to be
|
||||
// in strict mode, too.
|
||||
|
||||
function /*load*/(module/*, cache, pwd, source*/) {
|
||||
var global = self;
|
||||
var exports = new Object();
|
||||
Object.defineProperty(module, 'exports', {'get':function(){return exports;},'set':function(e){exports=e;}});
|
||||
arguments[2].unshift(module);
|
||||
Object.defineProperty(arguments[1], '$'+module.id, {'get':function(){return exports;}});
|
||||
arguments[3] = '('+arguments[3]+')();\n//# sourceURL='+module.uri;
|
||||
eval(arguments[3]);
|
||||
// NOTE Store module code in the cache if the loaded file is a bundle
|
||||
if (typeof module.id !== 'string')
|
||||
for (id in module)
|
||||
arguments[1]['$'+require.resolve(id).id] = module[id].toString();
|
||||
arguments[2].shift();
|
||||
}
|
||||
|
||||
);
|
|
@ -128,7 +128,6 @@ class BufferedTokenStream(TokenStream):
|
|||
# @see #get(int i)
|
||||
#/
|
||||
def sync(self, i):
|
||||
assert i >= 0
|
||||
n = i - len(self.tokens) + 1 # how many more elements we need?
|
||||
if n > 0 :
|
||||
fetched = self.fetch(n)
|
||||
|
|
|
@ -320,7 +320,7 @@ class Lexer(Recognizer, TokenSource):
|
|||
elif c=='\r':
|
||||
return "\\r"
|
||||
else:
|
||||
return str(c)
|
||||
return unicode(c)
|
||||
|
||||
def getCharErrorDisplay(self, c):
|
||||
return "'" + self.getErrorDisplayForChar(c) + "'"
|
||||
|
|
|
@ -281,8 +281,8 @@ class Parser (Recognizer):
|
|||
if lexer is None:
|
||||
if self.getTokenStream() is not None:
|
||||
tokenSource = self.getTokenStream().getTokenSource()
|
||||
if isinstance( tokenSource, Lexer ):
|
||||
lexer = tokenSource
|
||||
if isinstance( tokenSource, Lexer ):
|
||||
lexer = tokenSource
|
||||
if lexer is None:
|
||||
raise UnsupportedOperationException("Parser can't discover a lexer to use")
|
||||
|
||||
|
|
|
@ -73,6 +73,9 @@ class PredictionContext(object):
|
|||
def __init__(self, cachedHashCode):
|
||||
self.cachedHashCode = cachedHashCode
|
||||
|
||||
def __len__(self):
|
||||
return 0
|
||||
|
||||
# This means only the {@link #EMPTY} context is in set.
|
||||
def isEmpty(self):
|
||||
return self is self.EMPTY
|
||||
|
@ -80,6 +83,9 @@ class PredictionContext(object):
|
|||
def hasEmptyPath(self):
|
||||
return self.getReturnState(len(self) - 1) == self.EMPTY_RETURN_STATE
|
||||
|
||||
def getReturnState(self, index):
|
||||
raise "illegal!"
|
||||
|
||||
def __hash__(self):
|
||||
return self.cachedHashCode
|
||||
|
||||
|
@ -88,11 +94,13 @@ class PredictionContext(object):
|
|||
|
||||
|
||||
def calculateHashCode(parent, returnState):
|
||||
return hash( str(parent) + str(returnState))
|
||||
|
||||
def calculateEmptyHashCode():
|
||||
return hash("")
|
||||
return hash("") if parent is None else hash((hash(parent), returnState))
|
||||
|
||||
def calculateListsHashCode(parents, returnStates ):
|
||||
h = 0
|
||||
for parent, returnState in parents, returnStates:
|
||||
h = hash((h, calculateHashCode(parent, returnState)))
|
||||
return h
|
||||
|
||||
# Used to cache {@link PredictionContext} objects. Its used for the shared
|
||||
# context cash associated with contexts in DFA states. This cache
|
||||
|
@ -134,8 +142,7 @@ class SingletonPredictionContext(PredictionContext):
|
|||
return SingletonPredictionContext(parent, returnState)
|
||||
|
||||
def __init__(self, parent, returnState):
|
||||
assert returnState!=ATNState.INVALID_STATE_NUMBER
|
||||
hashCode = calculateHashCode(parent, returnState) if parent is not None else calculateEmptyHashCode()
|
||||
hashCode = calculateHashCode(parent, returnState)
|
||||
super(SingletonPredictionContext, self).__init__(hashCode)
|
||||
self.parentCtx = parent
|
||||
self.returnState = returnState
|
||||
|
@ -144,11 +151,9 @@ class SingletonPredictionContext(PredictionContext):
|
|||
return 1
|
||||
|
||||
def getParent(self, index):
|
||||
assert index == 0
|
||||
return self.parentCtx
|
||||
|
||||
def getReturnState(self, index):
|
||||
assert index == 0
|
||||
return self.returnState
|
||||
|
||||
def __eq__(self, other):
|
||||
|
@ -158,8 +163,6 @@ class SingletonPredictionContext(PredictionContext):
|
|||
return False
|
||||
elif not isinstance(other, SingletonPredictionContext):
|
||||
return False
|
||||
elif hash(self) != hash(other):
|
||||
return False # can't be same if hash is different
|
||||
else:
|
||||
return self.returnState == other.returnState and self.parentCtx==other.parentCtx
|
||||
|
||||
|
@ -185,15 +188,12 @@ class EmptyPredictionContext(SingletonPredictionContext):
|
|||
def isEmpty(self):
|
||||
return True
|
||||
|
||||
def getParent(self, index):
|
||||
return None
|
||||
|
||||
def getReturnState(self, index):
|
||||
return self.returnState
|
||||
|
||||
def __eq__(self, other):
|
||||
return self is other
|
||||
|
||||
def __hash__(self):
|
||||
return self.cachedHashCode
|
||||
|
||||
def __unicode__(self):
|
||||
return "$"
|
||||
|
||||
|
@ -206,9 +206,7 @@ class ArrayPredictionContext(PredictionContext):
|
|||
# returnState == {@link #EMPTY_RETURN_STATE}.
|
||||
|
||||
def __init__(self, parents, returnStates):
|
||||
super(ArrayPredictionContext, self).__init__(calculateHashCode(parents, returnStates))
|
||||
assert parents is not None and len(parents)>0
|
||||
assert returnStates is not None and len(returnStates)>0
|
||||
super(ArrayPredictionContext, self).__init__(calculateListsHashCode(parents, returnStates))
|
||||
self.parents = parents
|
||||
self.returnStates = returnStates
|
||||
|
||||
|
@ -276,17 +274,7 @@ def PredictionContextFromRuleContext(atn, outerContext=None):
|
|||
return SingletonPredictionContext.create(parent, transition.followState.stateNumber)
|
||||
|
||||
|
||||
def calculateListsHashCode(parents, returnStates ):
|
||||
|
||||
with StringIO() as s:
|
||||
for parent in parents:
|
||||
s.write(unicode(parent))
|
||||
for returnState in returnStates:
|
||||
s.write(unicode(returnState))
|
||||
return hash(s.getvalue())
|
||||
|
||||
def merge(a, b, rootIsWildcard, mergeCache):
|
||||
assert a is not None and b is not None # must be empty context, never null
|
||||
|
||||
# share same graph if both same
|
||||
if a==b:
|
||||
|
@ -305,9 +293,9 @@ def merge(a, b, rootIsWildcard, mergeCache):
|
|||
|
||||
# convert singleton so both are arrays to normalize
|
||||
if isinstance( a, SingletonPredictionContext ):
|
||||
a = ArrayPredictionContext(a)
|
||||
a = ArrayPredictionContext([a.parentCtx], [a.returnState])
|
||||
if isinstance( b, SingletonPredictionContext):
|
||||
b = ArrayPredictionContext(b)
|
||||
b = ArrayPredictionContext([b.parentCtx], [b.returnState])
|
||||
return mergeArrays(a, b, rootIsWildcard, mergeCache)
|
||||
|
||||
|
||||
|
@ -380,7 +368,7 @@ def mergeSingletons(a, b, rootIsWildcard, mergeCache):
|
|||
payloads[0] = b.returnState
|
||||
payloads[1] = a.returnState
|
||||
parents = [singleParent, singleParent]
|
||||
a_ = ArrayPredictionContext(parents, payloads);
|
||||
a_ = ArrayPredictionContext(parents, payloads)
|
||||
if mergeCache is not None:
|
||||
mergeCache.put(a, b, a_)
|
||||
return a_
|
||||
|
@ -486,9 +474,9 @@ def mergeArrays(a, b, rootIsWildcard, mergeCache):
|
|||
return previous
|
||||
|
||||
# merge sorted payloads a + b => M
|
||||
i = 0; # walks a
|
||||
j = 0; # walks b
|
||||
k = 0; # walks target M array
|
||||
i = 0 # walks a
|
||||
j = 0 # walks b
|
||||
k = 0 # walks target M array
|
||||
|
||||
mergedReturnStates = [] * (len(a.returnState) + len( b.returnStates))
|
||||
mergedParents = [] * len(mergedReturnStates)
|
||||
|
|
|
@ -93,10 +93,7 @@ class ATNConfig(object):
|
|||
and self.precedenceFilterSuppressed==other.precedenceFilterSuppressed
|
||||
|
||||
def __hash__(self):
|
||||
return hash( str(self.state.stateNumber) + "/" +
|
||||
str(self.alt) + "/" +
|
||||
str(self.context) + "/" +
|
||||
str(self.semanticContext) )
|
||||
return hash((self.state.stateNumber, self.alt, self.context, self.semanticContext))
|
||||
|
||||
def __str__(self):
|
||||
return unicode(self)
|
||||
|
@ -132,9 +129,9 @@ class LexerATNConfig(ATNConfig):
|
|||
self.passedThroughNonGreedyDecision = False if config is None else self.checkNonGreedyDecision(config, state)
|
||||
|
||||
def __hash__(self):
|
||||
return hash(str(self.state.stateNumber) + str(self.alt) + str(self.context) \
|
||||
+ str(self.semanticContext) + str(1 if self.passedThroughNonGreedyDecision else 0) \
|
||||
+ str(self.lexerActionExecutor))
|
||||
return hash((self.state.stateNumber, self.alt, self.context,
|
||||
self.semanticContext, self.passedThroughNonGreedyDecision,
|
||||
self.lexerActionExecutor))
|
||||
|
||||
def __eq__(self, other):
|
||||
if self is other:
|
||||
|
|
|
@ -34,7 +34,7 @@
|
|||
# graph-structured stack.
|
||||
#/
|
||||
from io import StringIO
|
||||
from antlr4.PredictionContext import PredictionContext, merge
|
||||
from antlr4.PredictionContext import merge
|
||||
from antlr4.Utils import str_list
|
||||
from antlr4.atn.ATN import ATN
|
||||
from antlr4.atn.SemanticContext import SemanticContext
|
||||
|
@ -51,7 +51,7 @@ class ATNConfigSet(object):
|
|||
def __init__(self, fullCtx=True):
|
||||
# All configs but hashed by (s, i, _, pi) not including context. Wiped out
|
||||
# when we go readonly as this set becomes a DFA state.
|
||||
self.configLookup = set()
|
||||
self.configLookup = dict()
|
||||
# Indicates that this configuration set is part of a full context
|
||||
# LL prediction. It will be used to determine how to merge $. With SLL
|
||||
# it's a wildcard whereas it is not for LL context merge.
|
||||
|
@ -115,10 +115,17 @@ class ATNConfigSet(object):
|
|||
return True
|
||||
|
||||
def getOrAdd(self, config):
|
||||
for c in self.configLookup:
|
||||
if c==config:
|
||||
return c
|
||||
self.configLookup.add(config)
|
||||
h = hash(config)
|
||||
l = self.configLookup.get(h, None)
|
||||
if l is not None:
|
||||
for c in l:
|
||||
if c==config:
|
||||
return c
|
||||
if l is None:
|
||||
l = [config]
|
||||
self.configLookup[h] = l
|
||||
else:
|
||||
l.append(config)
|
||||
return config
|
||||
|
||||
def getStates(self):
|
||||
|
@ -140,7 +147,7 @@ class ATNConfigSet(object):
|
|||
def optimizeConfigs(self, interpreter):
|
||||
if self.readonly:
|
||||
raise IllegalStateException("This set is readonly")
|
||||
if len(self.configLookup)==0:
|
||||
if len(self.configs)==0:
|
||||
return
|
||||
for config in self.configs:
|
||||
config.context = interpreter.getCachedContext(config.context)
|
||||
|
@ -157,7 +164,7 @@ class ATNConfigSet(object):
|
|||
return False
|
||||
|
||||
same = self.configs is not None and \
|
||||
self.configs==other.configs and \
|
||||
self.configs == other.configs and \
|
||||
self.fullCtx == other.fullCtx and \
|
||||
self.uniqueAlt == other.uniqueAlt and \
|
||||
self.conflictingAlts == other.conflictingAlts and \
|
||||
|
@ -174,10 +181,10 @@ class ATNConfigSet(object):
|
|||
return self.hashConfigs()
|
||||
|
||||
def hashConfigs(self):
|
||||
with StringIO() as buf:
|
||||
for cfg in self.configs:
|
||||
buf.write(unicode(cfg))
|
||||
return hash(buf.getvalue())
|
||||
h = 0
|
||||
for cfg in self.configs:
|
||||
h = hash((h, cfg))
|
||||
return h
|
||||
|
||||
def __len__(self):
|
||||
return len(self.configs)
|
||||
|
@ -185,16 +192,12 @@ class ATNConfigSet(object):
|
|||
def isEmpty(self):
|
||||
return len(self.configs)==0
|
||||
|
||||
def __contains__(self, item):
|
||||
def __contains__(self, config):
|
||||
if self.configLookup is None:
|
||||
raise UnsupportedOperationException("This method is not implemented for readonly sets.")
|
||||
return item in self.configLookup
|
||||
|
||||
def containsFast(self, obj):
|
||||
if self.configLookup is None:
|
||||
raise UnsupportedOperationException("This method is not implemented for readonly sets.")
|
||||
return self.configLookup.containsFast(obj)
|
||||
|
||||
h = hash(config)
|
||||
l = self.configLookup.get(h, None)
|
||||
return l is not None and config in l
|
||||
|
||||
def clear(self):
|
||||
if self.readonly:
|
||||
|
@ -231,7 +234,6 @@ class OrderedATNConfigSet(ATNConfigSet):
|
|||
|
||||
def __init__(self):
|
||||
super(OrderedATNConfigSet, self).__init__()
|
||||
# self.configLookup = set()
|
||||
|
||||
|
||||
|
||||
|
|
|
@ -533,9 +533,6 @@ class LexerATNSimulator(ATNSimulator):
|
|||
# configuration containing an ATN rule stop state. Later, when
|
||||
# traversing the DFA, we will know which rule to accept.
|
||||
def addDFAState(self, configs):
|
||||
# the lexer evaluates predicates on-the-fly; by this point configs
|
||||
# should not contain any configurations with unevaluated predicates.
|
||||
assert not configs.hasSemanticContext
|
||||
|
||||
proposed = DFAState(configs=configs)
|
||||
firstConfigWithRuleStopState = None
|
||||
|
@ -586,3 +583,4 @@ class LexerATNSimulator(ATNSimulator):
|
|||
return "'" + chr(t) + "'"
|
||||
|
||||
|
||||
LexerATNSimulator.ERROR = DFAState(0x7FFFFFFF, ATNConfigSet())
|
||||
|
|
|
@ -47,7 +47,7 @@ class LexerAction(object):
|
|||
self.isPositionDependent = False
|
||||
|
||||
def __hash__(self):
|
||||
return hash(str(self.actionType))
|
||||
return hash(self.actionType)
|
||||
|
||||
def __eq__(self, other):
|
||||
return self is other
|
||||
|
@ -92,7 +92,7 @@ class LexerTypeAction(LexerAction):
|
|||
lexer.type = self.type
|
||||
|
||||
def __hash__(self):
|
||||
return hash(str(self.actionType) + str(self.type))
|
||||
return hash((self.actionType, self.type))
|
||||
|
||||
def __eq__(self, other):
|
||||
if self is other:
|
||||
|
@ -120,7 +120,7 @@ class LexerPushModeAction(LexerAction):
|
|||
lexer.pushMode(self.mode)
|
||||
|
||||
def __hash__(self):
|
||||
return hash(str(self.actionType) + str(self.mode))
|
||||
return hash((self.actionType, self.mode))
|
||||
|
||||
def __eq__(self, other):
|
||||
if self is other:
|
||||
|
@ -188,7 +188,7 @@ class LexerModeAction(LexerAction):
|
|||
lexer.mode(self.mode)
|
||||
|
||||
def __hash__(self):
|
||||
return hash(str(self.actionType) + str(self.mode))
|
||||
return hash((self.actionType, self.mode))
|
||||
|
||||
def __eq__(self, other):
|
||||
if self is other:
|
||||
|
@ -232,7 +232,7 @@ class LexerCustomAction(LexerAction):
|
|||
lexer.action(None, self.ruleIndex, self.actionIndex)
|
||||
|
||||
def __hash__(self):
|
||||
return hash(str(self.actionType) + str(self.ruleIndex) + str(self.actionIndex))
|
||||
return hash((self.actionType, self.ruleIndex, self.actionIndex))
|
||||
|
||||
def __eq__(self, other):
|
||||
if self is other:
|
||||
|
@ -258,7 +258,7 @@ class LexerChannelAction(LexerAction):
|
|||
lexer._channel = self.channel
|
||||
|
||||
def __hash__(self):
|
||||
return hash(str(self.actionType) + str(self.channel))
|
||||
return hash((self.actionType, self.channel))
|
||||
|
||||
def __eq__(self, other):
|
||||
if self is other:
|
||||
|
@ -305,7 +305,7 @@ class LexerIndexedCustomAction(LexerAction):
|
|||
self.action.execute(lexer)
|
||||
|
||||
def __hash__(self):
|
||||
return hash(str(self.actionType) + str(self.offset) + str(self.action))
|
||||
return hash((self.actionType, self.offset, self.action))
|
||||
|
||||
def __eq__(self, other):
|
||||
if self is other:
|
||||
|
|
|
@ -711,7 +711,6 @@ class ParserATNSimulator(ATNSimulator):
|
|||
print("testing " + self.getTokenName(t) + " at " + str(c))
|
||||
|
||||
if isinstance(c.state, RuleStopState):
|
||||
assert c.context.isEmpty()
|
||||
if fullCtx or t == Token.EOF:
|
||||
if skippedStopStates is None:
|
||||
skippedStopStates = list()
|
||||
|
@ -787,7 +786,6 @@ class ParserATNSimulator(ATNSimulator):
|
|||
# multiple alternatives are viable.
|
||||
#
|
||||
if skippedStopStates is not None and ( (not fullCtx) or (not PredictionMode.hasConfigInRuleStopState(reach))):
|
||||
assert len(skippedStopStates)>0
|
||||
for c in skippedStopStates:
|
||||
reach.add(c, self.mergeCache)
|
||||
if len(reach)==0:
|
||||
|
@ -979,7 +977,6 @@ class ParserATNSimulator(ATNSimulator):
|
|||
for i in range(1, len(altToPred)):
|
||||
pred = altToPred[i]
|
||||
# unpredicated is indicated by SemanticContext.NONE
|
||||
assert pred is not None
|
||||
if ambigAlts is not None and i in ambigAlts:
|
||||
pairs.append(PredPrediction(pred, i))
|
||||
if pred is not SemanticContext.NONE:
|
||||
|
@ -1119,7 +1116,6 @@ class ParserATNSimulator(ATNSimulator):
|
|||
initialDepth = 0;
|
||||
self.closureCheckingStopState(config, configs, closureBusy, collectPredicates,
|
||||
fullCtx, initialDepth, treatEofAsEpsilon)
|
||||
assert not fullCtx or not configs.dipsIntoOuterContext
|
||||
|
||||
|
||||
def closureCheckingStopState(self, config, configs, closureBusy, collectPredicates, fullCtx, depth, treatEofAsEpsilon):
|
||||
|
@ -1131,7 +1127,8 @@ class ParserATNSimulator(ATNSimulator):
|
|||
# run thru all possible stack tops in ctx
|
||||
if not config.context.isEmpty():
|
||||
for i in range(0, len(config.context)):
|
||||
if config.context.getReturnState(i) is PredictionContext.EMPTY_RETURN_STATE:
|
||||
state = config.context.getReturnState(i)
|
||||
if state is PredictionContext.EMPTY_RETURN_STATE:
|
||||
if fullCtx:
|
||||
configs.add(ATNConfig(state=config.state, context=PredictionContext.EMPTY, config=config), self.mergeCache)
|
||||
continue
|
||||
|
@ -1142,14 +1139,13 @@ class ParserATNSimulator(ATNSimulator):
|
|||
self.closure_(config, configs, closureBusy, collectPredicates,
|
||||
fullCtx, depth, treatEofAsEpsilon)
|
||||
continue
|
||||
returnState = self.atn.states[config.context.getReturnState(i)]
|
||||
returnState = self.atn.states[state]
|
||||
newContext = config.context.getParent(i) # "pop" return state
|
||||
c = ATNConfig(state=returnState, alt=config.alt, context=newContext, semantic=config.semanticContext)
|
||||
# While we have context to pop back from, we may have
|
||||
# gotten that context AFTER having falling off a rule.
|
||||
# Make sure we track that we are now out of context.
|
||||
c.reachesIntoOuterContext = config.reachesIntoOuterContext
|
||||
assert depth > - 2**63
|
||||
self.closureCheckingStopState(c, configs, closureBusy, collectPredicates, fullCtx, depth - 1, treatEofAsEpsilon)
|
||||
return
|
||||
elif fullCtx:
|
||||
|
@ -1183,7 +1179,6 @@ class ParserATNSimulator(ATNSimulator):
|
|||
closureBusy.add(c)
|
||||
newDepth = depth
|
||||
if isinstance( config.state, RuleStopState):
|
||||
assert not fullCtx
|
||||
# target fell off end of rule; mark resulting c as having dipped into outer context
|
||||
# We can't get here if incoming config was rule stop and we had context
|
||||
# track how far we dip into outer context. Might
|
||||
|
@ -1200,7 +1195,6 @@ class ParserATNSimulator(ATNSimulator):
|
|||
c.precedenceFilterSuppressed = True
|
||||
c.reachesIntoOuterContext += 1
|
||||
configs.dipsIntoOuterContext = True # TODO: can remove? only care when we add to set per middle of this method
|
||||
assert newDepth > - 2**63
|
||||
newDepth -= 1
|
||||
if self.debug:
|
||||
print("dips into outer ctx: " + str(c))
|
||||
|
@ -1217,28 +1211,30 @@ class ParserATNSimulator(ATNSimulator):
|
|||
else:
|
||||
return "<rule " + str(index) + ">"
|
||||
|
||||
def getEpsilonTarget(self, config, t, collectPredicates, inContext, fullCtx, treatEofAsEpsilon):
|
||||
tt = t.serializationType
|
||||
if tt==Transition.RULE:
|
||||
return self.ruleTransition(config, t)
|
||||
elif tt==Transition.PRECEDENCE:
|
||||
return self.precedenceTransition(config, t, collectPredicates, inContext, fullCtx)
|
||||
elif tt==Transition.PREDICATE:
|
||||
return self.predTransition(config, t, collectPredicates, inContext, fullCtx)
|
||||
elif tt==Transition.ACTION:
|
||||
return self.actionTransition(config, t)
|
||||
elif tt==Transition.EPSILON:
|
||||
return ATNConfig(state=t.target, config=config)
|
||||
elif tt in [ Transition.ATOM, Transition.RANGE, Transition.SET ]:
|
||||
# EOF transitions act like epsilon transitions after the first EOF
|
||||
# transition is traversed
|
||||
if treatEofAsEpsilon:
|
||||
if t.matches(Token.EOF, 0, 1):
|
||||
return ATNConfig(state=t.target, config=config)
|
||||
return None
|
||||
epsilonTargetMethods = dict()
|
||||
epsilonTargetMethods[Transition.RULE] = lambda sim, config, t, collectPredicates, inContext, fullCtx, treatEofAsEpsilon: \
|
||||
sim.ruleTransition(config, t)
|
||||
epsilonTargetMethods[Transition.PRECEDENCE] = lambda sim, config, t, collectPredicates, inContext, fullCtx, treatEofAsEpsilon: \
|
||||
sim.precedenceTransition(config, t, collectPredicates, inContext, fullCtx)
|
||||
epsilonTargetMethods[Transition.PREDICATE] = lambda sim, config, t, collectPredicates, inContext, fullCtx, treatEofAsEpsilon: \
|
||||
sim.predTransition(config, t, collectPredicates, inContext, fullCtx)
|
||||
epsilonTargetMethods[Transition.ACTION] = lambda sim, config, t, collectPredicates, inContext, fullCtx, treatEofAsEpsilon: \
|
||||
sim.actionTransition(config, t)
|
||||
epsilonTargetMethods[Transition.EPSILON] = lambda sim, config, t, collectPredicates, inContext, fullCtx, treatEofAsEpsilon: \
|
||||
ATNConfig(state=t.target, config=config)
|
||||
epsilonTargetMethods[Transition.ATOM] = lambda sim, config, t, collectPredicates, inContext, fullCtx, treatEofAsEpsilon: \
|
||||
ATNConfig(state=t.target, config=config) if treatEofAsEpsilon and t.matches(Token.EOF, 0, 1) else None
|
||||
epsilonTargetMethods[Transition.RANGE] = lambda sim, config, t, collectPredicates, inContext, fullCtx, treatEofAsEpsilon: \
|
||||
ATNConfig(state=t.target, config=config) if treatEofAsEpsilon and t.matches(Token.EOF, 0, 1) else None
|
||||
epsilonTargetMethods[Transition.SET] = lambda sim, config, t, collectPredicates, inContext, fullCtx, treatEofAsEpsilon: \
|
||||
ATNConfig(state=t.target, config=config) if treatEofAsEpsilon and t.matches(Token.EOF, 0, 1) else None
|
||||
|
||||
else:
|
||||
def getEpsilonTarget(self, config, t, collectPredicates, inContext, fullCtx, treatEofAsEpsilon):
|
||||
m = self.epsilonTargetMethods.get(t.serializationType, None)
|
||||
if m is None:
|
||||
return None
|
||||
else:
|
||||
return m(self, config, t, collectPredicates, inContext, fullCtx, treatEofAsEpsilon)
|
||||
|
||||
def actionTransition(self, config, t):
|
||||
if self.debug:
|
||||
|
@ -1261,7 +1257,7 @@ class ParserATNSimulator(ATNSimulator):
|
|||
# later during conflict resolution.
|
||||
currentPosition = self._input.index
|
||||
self._input.seek(self._startIndex)
|
||||
predSucceeds = pt.getPredicate().eval(self.parser, self._outerContext);
|
||||
predSucceeds = pt.getPredicate().eval(self.parser, self._outerContext)
|
||||
self._input.seek(currentPosition)
|
||||
if predSucceeds:
|
||||
c = ATNConfig(state=pt.target, config=config) # no pred context
|
||||
|
|
|
@ -498,11 +498,11 @@ class PredictionMode(object):
|
|||
def getConflictingAltSubsets(cls, configs):
|
||||
configToAlts = dict()
|
||||
for c in configs:
|
||||
s = str(c.state.stateNumber) + "/" + str(c.context)
|
||||
alts = configToAlts.get(s, None)
|
||||
h = hash((c.state.stateNumber, c.context))
|
||||
alts = configToAlts.get(h, None)
|
||||
if alts is None:
|
||||
alts = set()
|
||||
configToAlts[s] = alts
|
||||
configToAlts[h] = alts
|
||||
alts.add(c.alt)
|
||||
return configToAlts.values()
|
||||
|
||||
|
|
|
@ -136,13 +136,7 @@ class Predicate(SemanticContext):
|
|||
return parser.sempred(localctx, self.ruleIndex, self.predIndex)
|
||||
|
||||
def __hash__(self):
|
||||
with StringIO() as buf:
|
||||
buf.write(unicode(self.ruleIndex))
|
||||
buf.write(u"/")
|
||||
buf.write(unicode(self.predIndex))
|
||||
buf.write(u"/")
|
||||
buf.write(unicode(self.isCtxDependent))
|
||||
return hash(buf.getvalue())
|
||||
return hash((self.ruleIndex, self.predIndex, self.isCtxDependent))
|
||||
|
||||
def __eq__(self, other):
|
||||
if self is other:
|
||||
|
@ -220,7 +214,10 @@ class AND(SemanticContext):
|
|||
return self.opnds == other.opnds
|
||||
|
||||
def __hash__(self):
|
||||
return hash(str(self.opnds)+ "/AND")
|
||||
h = 0
|
||||
for o in self.opnds:
|
||||
h = hash((h, o))
|
||||
return hash((h, "AND"))
|
||||
|
||||
#
|
||||
# {@inheritDoc}
|
||||
|
@ -308,7 +305,10 @@ class OR (SemanticContext):
|
|||
return self.opnds == other.opnds
|
||||
|
||||
def __hash__(self):
|
||||
return hash(str(self.opnds)+"/OR")
|
||||
h = 0
|
||||
for o in self.opnds:
|
||||
h = hash((h, o))
|
||||
return hash((h, "OR"))
|
||||
|
||||
# <p>
|
||||
# The evaluation of predicates by this context is short-circuiting, but
|
||||
|
|
|
@ -131,7 +131,6 @@ class BufferedTokenStream(TokenStream):
|
|||
# @see #get(int i)
|
||||
#/
|
||||
def sync(self, i:int):
|
||||
assert i >= 0
|
||||
n = i - len(self.tokens) + 1 # how many more elements we need?
|
||||
if n > 0 :
|
||||
fetched = self.fetch(n)
|
||||
|
|
|
@ -101,9 +101,6 @@ class Parser (Recognizer):
|
|||
self._syntaxErrors = 0
|
||||
self.setInputStream(input)
|
||||
|
||||
def __setattr__(self, key, value):
|
||||
object.__setattr__(self, key, value)
|
||||
|
||||
# reset the parser's state#
|
||||
def reset(self):
|
||||
if self._input is not None:
|
||||
|
@ -170,7 +167,7 @@ class Parser (Recognizer):
|
|||
self.consume()
|
||||
else:
|
||||
t = self._errHandler.recoverInline(self)
|
||||
if self._buildParseTrees and t.tokenIndex == -1:
|
||||
if self.buildParseTrees and t.tokenIndex == -1:
|
||||
# we must have conjured up a new token during single token insertion
|
||||
# if it's not the current symbol
|
||||
self._ctx.addErrorNode(t)
|
||||
|
|
|
@ -74,6 +74,9 @@ class PredictionContext(object):
|
|||
def __init__(self, cachedHashCode:int):
|
||||
self.cachedHashCode = cachedHashCode
|
||||
|
||||
def __len__(self):
|
||||
return 0
|
||||
|
||||
# This means only the {@link #EMPTY} context is in set.
|
||||
def isEmpty(self):
|
||||
return self is self.EMPTY
|
||||
|
@ -81,16 +84,20 @@ class PredictionContext(object):
|
|||
def hasEmptyPath(self):
|
||||
return self.getReturnState(len(self) - 1) == self.EMPTY_RETURN_STATE
|
||||
|
||||
def getReturnState(self, index:int):
|
||||
raise "illegal!"
|
||||
|
||||
def __hash__(self):
|
||||
return self.cachedHashCode
|
||||
|
||||
|
||||
def calculateHashCode(parent:PredictionContext, returnState:int):
|
||||
return hash( str(parent) + str(returnState))
|
||||
|
||||
def calculateEmptyHashCode():
|
||||
return hash("")
|
||||
return hash("") if parent is None else hash((hash(parent), returnState))
|
||||
|
||||
def calculateListsHashCode(parents:[], returnStates:[] ):
|
||||
h = 0
|
||||
for parent, returnState in parents, returnStates:
|
||||
h = hash((h, calculateHashCode(parent, returnState)))
|
||||
return h
|
||||
|
||||
# Used to cache {@link PredictionContext} objects. Its used for the shared
|
||||
# context cash associated with contexts in DFA states. This cache
|
||||
|
@ -132,8 +139,7 @@ class SingletonPredictionContext(PredictionContext):
|
|||
return SingletonPredictionContext(parent, returnState)
|
||||
|
||||
def __init__(self, parent:PredictionContext, returnState:int):
|
||||
assert returnState!=ATNState.INVALID_STATE_NUMBER
|
||||
hashCode = calculateHashCode(parent, returnState) if parent is not None else calculateEmptyHashCode()
|
||||
hashCode = calculateHashCode(parent, returnState)
|
||||
super().__init__(hashCode)
|
||||
self.parentCtx = parent
|
||||
self.returnState = returnState
|
||||
|
@ -142,11 +148,9 @@ class SingletonPredictionContext(PredictionContext):
|
|||
return 1
|
||||
|
||||
def getParent(self, index:int):
|
||||
assert index == 0
|
||||
return self.parentCtx
|
||||
|
||||
def getReturnState(self, index:int):
|
||||
assert index == 0
|
||||
return self.returnState
|
||||
|
||||
def __eq__(self, other):
|
||||
|
@ -156,10 +160,8 @@ class SingletonPredictionContext(PredictionContext):
|
|||
return False
|
||||
elif not isinstance(other, SingletonPredictionContext):
|
||||
return False
|
||||
elif hash(self) != hash(other):
|
||||
return False # can't be same if hash is different
|
||||
else:
|
||||
return self.returnState == other.returnState and self.parentCtx==other.parentCtx
|
||||
return self.returnState == other.returnState and self.parentCtx == other.parentCtx
|
||||
|
||||
def __hash__(self):
|
||||
return self.cachedHashCode
|
||||
|
@ -183,15 +185,12 @@ class EmptyPredictionContext(SingletonPredictionContext):
|
|||
def isEmpty(self):
|
||||
return True
|
||||
|
||||
def getParent(self, index:int):
|
||||
return None
|
||||
|
||||
def getReturnState(self, index:int):
|
||||
return self.returnState
|
||||
|
||||
def __eq__(self, other):
|
||||
return self is other
|
||||
|
||||
def __hash__(self):
|
||||
return self.cachedHashCode
|
||||
|
||||
def __str__(self):
|
||||
return "$"
|
||||
|
||||
|
@ -204,9 +203,7 @@ class ArrayPredictionContext(PredictionContext):
|
|||
# returnState == {@link #EMPTY_RETURN_STATE}.
|
||||
|
||||
def __init__(self, parents:list, returnStates:list):
|
||||
super().__init__(calculateHashCode(parents, returnStates))
|
||||
assert parents is not None and len(parents)>0
|
||||
assert returnStates is not None and len(returnStates)>0
|
||||
super().__init__(calculateListsHashCode(parents, returnStates))
|
||||
self.parents = parents
|
||||
self.returnStates = returnStates
|
||||
|
||||
|
@ -274,17 +271,7 @@ def PredictionContextFromRuleContext(atn:ATN, outerContext:RuleContext=None):
|
|||
return SingletonPredictionContext.create(parent, transition.followState.stateNumber)
|
||||
|
||||
|
||||
def calculateListsHashCode(parents:[], returnStates:int ):
|
||||
|
||||
with StringIO() as s:
|
||||
for parent in parents:
|
||||
s.write(str(parent))
|
||||
for returnState in returnStates:
|
||||
s.write(str(returnState))
|
||||
return hash(s.getvalue())
|
||||
|
||||
def merge(a:PredictionContext, b:PredictionContext, rootIsWildcard:bool, mergeCache:dict):
|
||||
assert a is not None and b is not None # must be empty context, never null
|
||||
|
||||
# share same graph if both same
|
||||
if a==b:
|
||||
|
@ -303,9 +290,9 @@ def merge(a:PredictionContext, b:PredictionContext, rootIsWildcard:bool, mergeCa
|
|||
|
||||
# convert singleton so both are arrays to normalize
|
||||
if isinstance( a, SingletonPredictionContext ):
|
||||
a = ArrayPredictionContext([a.parent], [a.returnState])
|
||||
a = ArrayPredictionContext([a.parentCtx], [a.returnState])
|
||||
if isinstance( b, SingletonPredictionContext):
|
||||
b = ArrayPredictionContext([b.parent], [b.returnState])
|
||||
b = ArrayPredictionContext([b.parentCtx], [b.returnState])
|
||||
return mergeArrays(a, b, rootIsWildcard, mergeCache)
|
||||
|
||||
|
||||
|
|
|
@ -60,8 +60,6 @@ class ATNConfig(object):
|
|||
if semantic is None:
|
||||
semantic = SemanticContext.NONE
|
||||
|
||||
if not isinstance(state, ATNState):
|
||||
pass
|
||||
# The ATN state associated with this configuration#/
|
||||
self.state = state
|
||||
# What alt (or lexer rule) is predicted by this configuration#/
|
||||
|
@ -83,7 +81,6 @@ class ATNConfig(object):
|
|||
self.reachesIntoOuterContext = 0 if config is None else config.reachesIntoOuterContext
|
||||
self.precedenceFilterSuppressed = False if config is None else config.precedenceFilterSuppressed
|
||||
|
||||
|
||||
# An ATN configuration is equal to another if both have
|
||||
# the same state, they predict the same alternative, and
|
||||
# syntactic/semantic contexts are the same.
|
||||
|
@ -101,10 +98,7 @@ class ATNConfig(object):
|
|||
and self.precedenceFilterSuppressed==other.precedenceFilterSuppressed
|
||||
|
||||
def __hash__(self):
|
||||
return hash( str(self.state.stateNumber) + "/" +
|
||||
str(self.alt) + "/" +
|
||||
str(self.context) + "/" +
|
||||
str(self.semanticContext) )
|
||||
return hash((self.state.stateNumber, self.alt, self.context, self.semanticContext))
|
||||
|
||||
def __str__(self):
|
||||
with StringIO() as buf:
|
||||
|
@ -141,9 +135,9 @@ class LexerATNConfig(ATNConfig):
|
|||
self.passedThroughNonGreedyDecision = False if config is None else self.checkNonGreedyDecision(config, state)
|
||||
|
||||
def __hash__(self):
|
||||
return hash(str(self.state.stateNumber) + str(self.alt) + str(self.context) \
|
||||
+ str(self.semanticContext) + str(1 if self.passedThroughNonGreedyDecision else 0) \
|
||||
+ str(self.lexerActionExecutor))
|
||||
return hash((self.state.stateNumber, self.alt, self.context,
|
||||
self.semanticContext, self.passedThroughNonGreedyDecision,
|
||||
self.lexerActionExecutor))
|
||||
|
||||
def __eq__(self, other):
|
||||
if self is other:
|
||||
|
|
|
@ -54,7 +54,7 @@ class ATNConfigSet(object):
|
|||
def __init__(self, fullCtx:bool=True):
|
||||
# All configs but hashed by (s, i, _, pi) not including context. Wiped out
|
||||
# when we go readonly as this set becomes a DFA state.
|
||||
self.configLookup = set()
|
||||
self.configLookup = dict()
|
||||
# Indicates that this configuration set is part of a full context
|
||||
# LL prediction. It will be used to determine how to merge $. With SLL
|
||||
# it's a wildcard whereas it is not for LL context merge.
|
||||
|
@ -118,10 +118,17 @@ class ATNConfigSet(object):
|
|||
return True
|
||||
|
||||
def getOrAdd(self, config:ATNConfig):
|
||||
for c in self.configLookup:
|
||||
if c==config:
|
||||
return c
|
||||
self.configLookup.add(config)
|
||||
h = hash(config)
|
||||
l = self.configLookup.get(h, None)
|
||||
if l is not None:
|
||||
for c in l:
|
||||
if c==config:
|
||||
return c
|
||||
if l is None:
|
||||
l = [config]
|
||||
self.configLookup[h] = l
|
||||
else:
|
||||
l.append(config)
|
||||
return config
|
||||
|
||||
def getStates(self):
|
||||
|
@ -143,7 +150,7 @@ class ATNConfigSet(object):
|
|||
def optimizeConfigs(self, interpreter:ATNSimulator):
|
||||
if self.readonly:
|
||||
raise IllegalStateException("This set is readonly")
|
||||
if len(self.configLookup)==0:
|
||||
if len(self.configs)==0:
|
||||
return
|
||||
for config in self.configs:
|
||||
config.context = interpreter.getCachedContext(config.context)
|
||||
|
@ -177,10 +184,10 @@ class ATNConfigSet(object):
|
|||
return self.hashConfigs()
|
||||
|
||||
def hashConfigs(self):
|
||||
with StringIO() as buf:
|
||||
for cfg in self.configs:
|
||||
buf.write(str(cfg))
|
||||
return hash(buf.getvalue())
|
||||
h = 0
|
||||
for cfg in self.configs:
|
||||
h = hash((h, cfg))
|
||||
return h
|
||||
|
||||
def __len__(self):
|
||||
return len(self.configs)
|
||||
|
@ -188,16 +195,12 @@ class ATNConfigSet(object):
|
|||
def isEmpty(self):
|
||||
return len(self.configs)==0
|
||||
|
||||
def __contains__(self, item):
|
||||
def __contains__(self, config):
|
||||
if self.configLookup is None:
|
||||
raise UnsupportedOperationException("This method is not implemented for readonly sets.")
|
||||
return item in self.configLookup
|
||||
|
||||
def containsFast(self, obj:ATNConfig):
|
||||
if self.configLookup is None:
|
||||
raise UnsupportedOperationException("This method is not implemented for readonly sets.")
|
||||
return self.configLookup.containsFast(obj)
|
||||
|
||||
h = hash(config)
|
||||
l = self.configLookup.get(h, None)
|
||||
return l is not None and config in l
|
||||
|
||||
def clear(self):
|
||||
if self.readonly:
|
||||
|
@ -231,7 +234,6 @@ class OrderedATNConfigSet(ATNConfigSet):
|
|||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
# self.configLookup = set()
|
||||
|
||||
|
||||
|
||||
|
|
|
@ -539,9 +539,6 @@ class LexerATNSimulator(ATNSimulator):
|
|||
# configuration containing an ATN rule stop state. Later, when
|
||||
# traversing the DFA, we will know which rule to accept.
|
||||
def addDFAState(self, configs:ATNConfigSet) -> DFAState:
|
||||
# the lexer evaluates predicates on-the-fly; by this point configs
|
||||
# should not contain any configurations with unevaluated predicates.
|
||||
assert not configs.hasSemanticContext
|
||||
|
||||
proposed = DFAState(configs=configs)
|
||||
firstConfigWithRuleStopState = None
|
||||
|
|
|
@ -53,7 +53,7 @@ class LexerAction(object):
|
|||
self.isPositionDependent = False
|
||||
|
||||
def __hash__(self):
|
||||
return hash(str(self.actionType))
|
||||
return hash(self.actionType)
|
||||
|
||||
def __eq__(self, other):
|
||||
return self is other
|
||||
|
@ -92,7 +92,7 @@ class LexerTypeAction(LexerAction):
|
|||
lexer.type = self.type
|
||||
|
||||
def __hash__(self):
|
||||
return hash(str(self.actionType) + str(self.type))
|
||||
return hash((self.actionType, self.type))
|
||||
|
||||
def __eq__(self, other):
|
||||
if self is other:
|
||||
|
@ -120,7 +120,7 @@ class LexerPushModeAction(LexerAction):
|
|||
lexer.pushMode(self.mode)
|
||||
|
||||
def __hash__(self):
|
||||
return hash(str(self.actionType) + str(self.mode))
|
||||
return hash((self.actionType, self.mode))
|
||||
|
||||
def __eq__(self, other):
|
||||
if self is other:
|
||||
|
@ -188,7 +188,7 @@ class LexerModeAction(LexerAction):
|
|||
lexer.mode(self.mode)
|
||||
|
||||
def __hash__(self):
|
||||
return hash(str(self.actionType) + str(self.mode))
|
||||
return hash((self.actionType, self.mode))
|
||||
|
||||
def __eq__(self, other):
|
||||
if self is other:
|
||||
|
@ -232,7 +232,7 @@ class LexerCustomAction(LexerAction):
|
|||
lexer.action(None, self.ruleIndex, self.actionIndex)
|
||||
|
||||
def __hash__(self):
|
||||
return hash(str(self.actionType) + str(self.ruleIndex) + str(self.actionIndex))
|
||||
return hash((self.actionType, self.ruleIndex, self.actionIndex))
|
||||
|
||||
def __eq__(self, other):
|
||||
if self is other:
|
||||
|
@ -258,7 +258,7 @@ class LexerChannelAction(LexerAction):
|
|||
lexer._channel = self.channel
|
||||
|
||||
def __hash__(self):
|
||||
return hash(str(self.actionType) + str(self.channel))
|
||||
return hash((self.actionType, self.channel))
|
||||
|
||||
def __eq__(self, other):
|
||||
if self is other:
|
||||
|
@ -305,7 +305,7 @@ class LexerIndexedCustomAction(LexerAction):
|
|||
self.action.execute(lexer)
|
||||
|
||||
def __hash__(self):
|
||||
return hash(str(self.actionType) + str(self.offset) + str(self.action))
|
||||
return hash((self.actionType, self.offset, self.action))
|
||||
|
||||
def __eq__(self, other):
|
||||
if self is other:
|
||||
|
|
|
@ -716,7 +716,6 @@ class ParserATNSimulator(ATNSimulator):
|
|||
print("testing " + self.getTokenName(t) + " at " + str(c))
|
||||
|
||||
if isinstance(c.state, RuleStopState):
|
||||
assert c.context.isEmpty()
|
||||
if fullCtx or t == Token.EOF:
|
||||
if skippedStopStates is None:
|
||||
skippedStopStates = list()
|
||||
|
@ -792,7 +791,6 @@ class ParserATNSimulator(ATNSimulator):
|
|||
# multiple alternatives are viable.
|
||||
#
|
||||
if skippedStopStates is not None and ( (not fullCtx) or (not PredictionMode.hasConfigInRuleStopState(reach))):
|
||||
assert len(skippedStopStates)>0
|
||||
for c in skippedStopStates:
|
||||
reach.add(c, self.mergeCache)
|
||||
if len(reach)==0:
|
||||
|
@ -984,7 +982,6 @@ class ParserATNSimulator(ATNSimulator):
|
|||
for i in range(1, len(altToPred)):
|
||||
pred = altToPred[i]
|
||||
# unpredicated is indicated by SemanticContext.NONE
|
||||
assert pred is not None
|
||||
if ambigAlts is not None and i in ambigAlts:
|
||||
pairs.append(PredPrediction(pred, i))
|
||||
if pred is not SemanticContext.NONE:
|
||||
|
@ -1124,7 +1121,6 @@ class ParserATNSimulator(ATNSimulator):
|
|||
initialDepth = 0
|
||||
self.closureCheckingStopState(config, configs, closureBusy, collectPredicates,
|
||||
fullCtx, initialDepth, treatEofAsEpsilon)
|
||||
assert not fullCtx or not configs.dipsIntoOuterContext
|
||||
|
||||
|
||||
def closureCheckingStopState(self, config:ATNConfig, configs:ATNConfigSet, closureBusy:set, collectPredicates:bool, fullCtx:bool, depth:int, treatEofAsEpsilon:bool):
|
||||
|
@ -1136,7 +1132,8 @@ class ParserATNSimulator(ATNSimulator):
|
|||
# run thru all possible stack tops in ctx
|
||||
if not config.context.isEmpty():
|
||||
for i in range(0, len(config.context)):
|
||||
if config.context.getReturnState(i) is PredictionContext.EMPTY_RETURN_STATE:
|
||||
state = config.context.getReturnState(i)
|
||||
if state is PredictionContext.EMPTY_RETURN_STATE:
|
||||
if fullCtx:
|
||||
configs.add(ATNConfig(state=config.state, context=PredictionContext.EMPTY, config=config), self.mergeCache)
|
||||
continue
|
||||
|
@ -1147,14 +1144,13 @@ class ParserATNSimulator(ATNSimulator):
|
|||
self.closure_(config, configs, closureBusy, collectPredicates,
|
||||
fullCtx, depth, treatEofAsEpsilon)
|
||||
continue
|
||||
returnState = self.atn.states[config.context.getReturnState(i)]
|
||||
returnState = self.atn.states[state]
|
||||
newContext = config.context.getParent(i) # "pop" return state
|
||||
c = ATNConfig(state=returnState, alt=config.alt, context=newContext, semantic=config.semanticContext)
|
||||
# While we have context to pop back from, we may have
|
||||
# gotten that context AFTER having falling off a rule.
|
||||
# Make sure we track that we are now out of context.
|
||||
c.reachesIntoOuterContext = config.reachesIntoOuterContext
|
||||
assert depth > - 2**63
|
||||
self.closureCheckingStopState(c, configs, closureBusy, collectPredicates, fullCtx, depth - 1, treatEofAsEpsilon)
|
||||
return
|
||||
elif fullCtx:
|
||||
|
@ -1188,7 +1184,6 @@ class ParserATNSimulator(ATNSimulator):
|
|||
closureBusy.add(c)
|
||||
newDepth = depth
|
||||
if isinstance( config.state, RuleStopState):
|
||||
assert not fullCtx
|
||||
# target fell off end of rule; mark resulting c as having dipped into outer context
|
||||
# We can't get here if incoming config was rule stop and we had context
|
||||
# track how far we dip into outer context. Might
|
||||
|
@ -1205,7 +1200,6 @@ class ParserATNSimulator(ATNSimulator):
|
|||
c.precedenceFilterSuppressed = True
|
||||
c.reachesIntoOuterContext += 1
|
||||
configs.dipsIntoOuterContext = True # TODO: can remove? only care when we add to set per middle of this method
|
||||
assert newDepth > - 2**63
|
||||
newDepth -= 1
|
||||
if self.debug:
|
||||
print("dips into outer ctx: " + str(c))
|
||||
|
@ -1222,28 +1216,30 @@ class ParserATNSimulator(ATNSimulator):
|
|||
else:
|
||||
return "<rule " + str(index) + ">"
|
||||
|
||||
def getEpsilonTarget(self, config:ATNConfig, t:Transition, collectPredicates:bool, inContext:bool, fullCtx:bool, treatEofAsEpsilon:bool):
|
||||
tt = t.serializationType
|
||||
if tt==Transition.RULE:
|
||||
return self.ruleTransition(config, t)
|
||||
elif tt==Transition.PRECEDENCE:
|
||||
return self.precedenceTransition(config, t, collectPredicates, inContext, fullCtx)
|
||||
elif tt==Transition.PREDICATE:
|
||||
return self.predTransition(config, t, collectPredicates, inContext, fullCtx)
|
||||
elif tt==Transition.ACTION:
|
||||
return self.actionTransition(config, t)
|
||||
elif tt==Transition.EPSILON:
|
||||
return ATNConfig(state=t.target, config=config)
|
||||
elif tt in [ Transition.ATOM, Transition.RANGE, Transition.SET ]:
|
||||
# EOF transitions act like epsilon transitions after the first EOF
|
||||
# transition is traversed
|
||||
if treatEofAsEpsilon:
|
||||
if t.matches(Token.EOF, 0, 1):
|
||||
return ATNConfig(state=t.target, config=config)
|
||||
return None
|
||||
epsilonTargetMethods = dict()
|
||||
epsilonTargetMethods[Transition.RULE] = lambda sim, config, t, collectPredicates, inContext, fullCtx, treatEofAsEpsilon: \
|
||||
sim.ruleTransition(config, t)
|
||||
epsilonTargetMethods[Transition.PRECEDENCE] = lambda sim, config, t, collectPredicates, inContext, fullCtx, treatEofAsEpsilon: \
|
||||
sim.precedenceTransition(config, t, collectPredicates, inContext, fullCtx)
|
||||
epsilonTargetMethods[Transition.PREDICATE] = lambda sim, config, t, collectPredicates, inContext, fullCtx, treatEofAsEpsilon: \
|
||||
sim.predTransition(config, t, collectPredicates, inContext, fullCtx)
|
||||
epsilonTargetMethods[Transition.ACTION] = lambda sim, config, t, collectPredicates, inContext, fullCtx, treatEofAsEpsilon: \
|
||||
sim.actionTransition(config, t)
|
||||
epsilonTargetMethods[Transition.EPSILON] = lambda sim, config, t, collectPredicates, inContext, fullCtx, treatEofAsEpsilon: \
|
||||
ATNConfig(state=t.target, config=config)
|
||||
epsilonTargetMethods[Transition.ATOM] = lambda sim, config, t, collectPredicates, inContext, fullCtx, treatEofAsEpsilon: \
|
||||
ATNConfig(state=t.target, config=config) if treatEofAsEpsilon and t.matches(Token.EOF, 0, 1) else None
|
||||
epsilonTargetMethods[Transition.RANGE] = lambda sim, config, t, collectPredicates, inContext, fullCtx, treatEofAsEpsilon: \
|
||||
ATNConfig(state=t.target, config=config) if treatEofAsEpsilon and t.matches(Token.EOF, 0, 1) else None
|
||||
epsilonTargetMethods[Transition.SET] = lambda sim, config, t, collectPredicates, inContext, fullCtx, treatEofAsEpsilon: \
|
||||
ATNConfig(state=t.target, config=config) if treatEofAsEpsilon and t.matches(Token.EOF, 0, 1) else None
|
||||
|
||||
else:
|
||||
def getEpsilonTarget(self, config:ATNConfig, t:Transition, collectPredicates:bool, inContext:bool, fullCtx:bool, treatEofAsEpsilon:bool):
|
||||
m = self.epsilonTargetMethods.get(t.serializationType, None)
|
||||
if m is None:
|
||||
return None
|
||||
else:
|
||||
return m(self, config, t, collectPredicates, inContext, fullCtx, treatEofAsEpsilon)
|
||||
|
||||
def actionTransition(self, config:ATNConfig, t:ActionTransition):
|
||||
if self.debug:
|
||||
|
|
|
@ -501,11 +501,11 @@ class PredictionMode(Enum):
|
|||
def getConflictingAltSubsets(cls, configs:ATNConfigSet):
|
||||
configToAlts = dict()
|
||||
for c in configs:
|
||||
s = str(c.state.stateNumber) + "/" + str(c.context)
|
||||
alts = configToAlts.get(s, None)
|
||||
h = hash((c.state.stateNumber, c.context))
|
||||
alts = configToAlts.get(h, None)
|
||||
if alts is None:
|
||||
alts = set()
|
||||
configToAlts[s] = alts
|
||||
configToAlts[h] = alts
|
||||
alts.add(c.alt)
|
||||
return configToAlts.values()
|
||||
|
||||
|
|
|
@ -137,13 +137,7 @@ class Predicate(SemanticContext):
|
|||
return parser.sempred(localctx, self.ruleIndex, self.predIndex)
|
||||
|
||||
def __hash__(self):
|
||||
with StringIO() as buf:
|
||||
buf.write(str(self.ruleIndex))
|
||||
buf.write("/")
|
||||
buf.write(str(self.predIndex))
|
||||
buf.write("/")
|
||||
buf.write(str(self.isCtxDependent))
|
||||
return hash(buf.getvalue())
|
||||
return hash((self.ruleIndex, self.predIndex, self.isCtxDependent))
|
||||
|
||||
def __eq__(self, other):
|
||||
if self is other:
|
||||
|
@ -221,7 +215,10 @@ class AND(SemanticContext):
|
|||
return self.opnds == other.opnds
|
||||
|
||||
def __hash__(self):
|
||||
return hash(str(self.opnds)+ "/AND")
|
||||
h = 0
|
||||
for o in self.opnds:
|
||||
h = hash((h, o))
|
||||
return hash((h, "AND"))
|
||||
|
||||
#
|
||||
# {@inheritDoc}
|
||||
|
@ -309,7 +306,10 @@ class OR (SemanticContext):
|
|||
return self.opnds == other.opnds
|
||||
|
||||
def __hash__(self):
|
||||
return hash(str(self.opnds)+"/OR")
|
||||
h = 0
|
||||
for o in self.opnds:
|
||||
h = hash((h, o))
|
||||
return hash((h, "OR"))
|
||||
|
||||
# <p>
|
||||
# The evaluation of predicates by this context is short-circuiting, but
|
||||
|
@ -327,7 +327,7 @@ class OR (SemanticContext):
|
|||
for context in self.opnds:
|
||||
evaluated = context.evalPrecedence(parser, outerContext)
|
||||
differs |= evaluated is not context
|
||||
if evaluate is SemanticContext.NONE:
|
||||
if evaluated is SemanticContext.NONE:
|
||||
# The OR context is true if any element is true
|
||||
return SemanticContext.NONE
|
||||
elif evaluated is not None:
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
__author__ = 'ericvergnaud'
|
|
@ -0,0 +1,777 @@
|
|||
void main()
|
||||
{
|
||||
int a=0;
|
||||
if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
a++;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
a++;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
a++;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
a++;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
a++;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
a++;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
a++;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
a++;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
else if( 3 > 4){
|
||||
;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,44 @@
|
|||
import sys
|
||||
sys.setrecursionlimit(4000)
|
||||
import antlr4
|
||||
from parser.cparser import CParser
|
||||
from parser.clexer import CLexer
|
||||
from datetime import datetime
|
||||
import cProfile
|
||||
|
||||
class ErrorListener(antlr4.error.ErrorListener.ErrorListener):
|
||||
|
||||
def __init__(self):
|
||||
super(ErrorListener, self).__init__()
|
||||
self.errored_out = False
|
||||
|
||||
def syntaxError(self, recognizer, offendingSymbol, line, column, msg, e):
|
||||
self.errored_out = True
|
||||
|
||||
|
||||
def sub():
|
||||
# Parse the input file
|
||||
input_stream = antlr4.FileStream("c.c")
|
||||
|
||||
lexer = CLexer(input_stream)
|
||||
token_stream = antlr4.CommonTokenStream(lexer)
|
||||
|
||||
parser = CParser(token_stream)
|
||||
|
||||
|
||||
errors = ErrorListener()
|
||||
parser.addErrorListener(errors)
|
||||
tree = parser.compilationUnit()
|
||||
|
||||
def main():
|
||||
before = datetime.now()
|
||||
sub()
|
||||
after = datetime.now()
|
||||
print(str(after-before))
|
||||
# before = after
|
||||
# sub()
|
||||
# after = datetime.now()
|
||||
# print(str(after-before))
|
||||
|
||||
if __name__ == '__main__':
|
||||
cProfile.run("main()", sort='tottime')
|
|
@ -0,0 +1 @@
|
|||
__author__ = 'ericvergnaud'
|
|
@ -0,0 +1,799 @@
|
|||
# Generated from C.bnf by ANTLR 4.5.1
|
||||
from antlr4 import *
|
||||
from io import StringIO
|
||||
|
||||
|
||||
def serializedATN():
|
||||
with StringIO() as buf:
|
||||
buf.write("\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\2s")
|
||||
buf.write("\u04e7\b\1\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7")
|
||||
buf.write("\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\4\f\t\f\4\r\t\r")
|
||||
buf.write("\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22\4\23")
|
||||
buf.write("\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30")
|
||||
buf.write("\4\31\t\31\4\32\t\32\4\33\t\33\4\34\t\34\4\35\t\35\4\36")
|
||||
buf.write("\t\36\4\37\t\37\4 \t \4!\t!\4\"\t\"\4#\t#\4$\t$\4%\t%")
|
||||
buf.write("\4&\t&\4\'\t\'\4(\t(\4)\t)\4*\t*\4+\t+\4,\t,\4-\t-\4.")
|
||||
buf.write("\t.\4/\t/\4\60\t\60\4\61\t\61\4\62\t\62\4\63\t\63\4\64")
|
||||
buf.write("\t\64\4\65\t\65\4\66\t\66\4\67\t\67\48\t8\49\t9\4:\t:")
|
||||
buf.write("\4;\t;\4<\t<\4=\t=\4>\t>\4?\t?\4@\t@\4A\tA\4B\tB\4C\t")
|
||||
buf.write("C\4D\tD\4E\tE\4F\tF\4G\tG\4H\tH\4I\tI\4J\tJ\4K\tK\4L\t")
|
||||
buf.write("L\4M\tM\4N\tN\4O\tO\4P\tP\4Q\tQ\4R\tR\4S\tS\4T\tT\4U\t")
|
||||
buf.write("U\4V\tV\4W\tW\4X\tX\4Y\tY\4Z\tZ\4[\t[\4\\\t\\\4]\t]\4")
|
||||
buf.write("^\t^\4_\t_\4`\t`\4a\ta\4b\tb\4c\tc\4d\td\4e\te\4f\tf\4")
|
||||
buf.write("g\tg\4h\th\4i\ti\4j\tj\4k\tk\4l\tl\4m\tm\4n\tn\4o\to\4")
|
||||
buf.write("p\tp\4q\tq\4r\tr\4s\ts\4t\tt\4u\tu\4v\tv\4w\tw\4x\tx\4")
|
||||
buf.write("y\ty\4z\tz\4{\t{\4|\t|\4}\t}\4~\t~\4\177\t\177\4\u0080")
|
||||
buf.write("\t\u0080\4\u0081\t\u0081\4\u0082\t\u0082\4\u0083\t\u0083")
|
||||
buf.write("\4\u0084\t\u0084\4\u0085\t\u0085\4\u0086\t\u0086\4\u0087")
|
||||
buf.write("\t\u0087\4\u0088\t\u0088\4\u0089\t\u0089\4\u008a\t\u008a")
|
||||
buf.write("\4\u008b\t\u008b\4\u008c\t\u008c\4\u008d\t\u008d\4\u008e")
|
||||
buf.write("\t\u008e\4\u008f\t\u008f\4\u0090\t\u0090\4\u0091\t\u0091")
|
||||
buf.write("\4\u0092\t\u0092\4\u0093\t\u0093\4\u0094\t\u0094\4\u0095")
|
||||
buf.write("\t\u0095\4\u0096\t\u0096\4\u0097\t\u0097\4\u0098\t\u0098")
|
||||
buf.write("\3\2\3\2\3\2\3\2\3\2\3\2\3\2\3\2\3\2\3\2\3\2\3\2\3\2\3")
|
||||
buf.write("\2\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3")
|
||||
buf.write("\3\3\3\3\3\3\3\3\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3")
|
||||
buf.write("\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3\5\3\5\3\5\3\5")
|
||||
buf.write("\3\5\3\5\3\5\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\7\3\7\3")
|
||||
buf.write("\7\3\7\3\7\3\7\3\7\3\7\3\b\3\b\3\b\3\b\3\b\3\b\3\b\3\b")
|
||||
buf.write("\3\b\3\b\3\b\3\t\3\t\3\t\3\t\3\t\3\t\3\t\3\t\3\t\3\t\3")
|
||||
buf.write("\t\3\n\3\n\3\n\3\n\3\n\3\n\3\n\3\n\3\n\3\n\3\13\3\13\3")
|
||||
buf.write("\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\f\3\f\3")
|
||||
buf.write("\f\3\f\3\f\3\f\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r")
|
||||
buf.write("\3\r\3\r\3\r\3\r\3\16\3\16\3\16\3\16\3\16\3\16\3\16\3")
|
||||
buf.write("\16\3\17\3\17\3\17\3\17\3\17\3\17\3\17\3\17\3\17\3\17")
|
||||
buf.write("\3\17\3\17\3\17\3\20\3\20\3\20\3\20\3\20\3\21\3\21\3\21")
|
||||
buf.write("\3\21\3\21\3\21\3\22\3\22\3\22\3\22\3\22\3\23\3\23\3\23")
|
||||
buf.write("\3\23\3\23\3\24\3\24\3\24\3\24\3\24\3\24\3\25\3\25\3\25")
|
||||
buf.write("\3\25\3\25\3\25\3\25\3\25\3\25\3\26\3\26\3\26\3\26\3\26")
|
||||
buf.write("\3\26\3\26\3\26\3\27\3\27\3\27\3\30\3\30\3\30\3\30\3\30")
|
||||
buf.write("\3\30\3\30\3\31\3\31\3\31\3\31\3\31\3\32\3\32\3\32\3\32")
|
||||
buf.write("\3\32\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\34\3\34\3\34")
|
||||
buf.write("\3\34\3\34\3\34\3\35\3\35\3\35\3\35\3\36\3\36\3\36\3\36")
|
||||
buf.write("\3\36\3\37\3\37\3\37\3 \3 \3 \3 \3 \3 \3 \3!\3!\3!\3!")
|
||||
buf.write("\3\"\3\"\3\"\3\"\3\"\3#\3#\3#\3#\3#\3#\3#\3#\3#\3$\3$")
|
||||
buf.write("\3$\3$\3$\3$\3$\3$\3$\3%\3%\3%\3%\3%\3%\3%\3&\3&\3&\3")
|
||||
buf.write("&\3&\3&\3\'\3\'\3\'\3\'\3\'\3\'\3\'\3(\3(\3(\3(\3(\3(")
|
||||
buf.write("\3(\3)\3)\3)\3)\3)\3)\3)\3*\3*\3*\3*\3*\3*\3*\3+\3+\3")
|
||||
buf.write("+\3+\3+\3+\3+\3,\3,\3,\3,\3,\3,\3,\3,\3-\3-\3-\3-\3-\3")
|
||||
buf.write("-\3.\3.\3.\3.\3.\3.\3.\3.\3.\3/\3/\3/\3/\3/\3\60\3\60")
|
||||
buf.write("\3\60\3\60\3\60\3\60\3\60\3\60\3\60\3\61\3\61\3\61\3\61")
|
||||
buf.write("\3\61\3\61\3\62\3\62\3\62\3\62\3\62\3\62\3\62\3\62\3\62")
|
||||
buf.write("\3\63\3\63\3\63\3\63\3\63\3\63\3\63\3\63\3\63\3\64\3\64")
|
||||
buf.write("\3\64\3\64\3\64\3\64\3\64\3\64\3\65\3\65\3\65\3\65\3\65")
|
||||
buf.write("\3\65\3\66\3\66\3\66\3\66\3\66\3\66\3\66\3\66\3\66\3\67")
|
||||
buf.write("\3\67\3\67\3\67\3\67\3\67\3\67\3\67\3\67\38\38\38\38\3")
|
||||
buf.write("8\38\38\38\38\38\38\39\39\39\39\39\39\39\39\39\39\3:\3")
|
||||
buf.write(":\3:\3:\3:\3:\3:\3:\3:\3:\3:\3:\3:\3:\3:\3;\3;\3;\3;\3")
|
||||
buf.write(";\3;\3;\3;\3;\3;\3;\3;\3;\3;\3<\3<\3=\3=\3>\3>\3?\3?\3")
|
||||
buf.write("@\3@\3A\3A\3B\3B\3C\3C\3C\3D\3D\3E\3E\3E\3F\3F\3F\3G\3")
|
||||
buf.write("G\3G\3H\3H\3I\3I\3I\3J\3J\3K\3K\3K\3L\3L\3M\3M\3N\3N\3")
|
||||
buf.write("O\3O\3P\3P\3Q\3Q\3Q\3R\3R\3R\3S\3S\3T\3T\3U\3U\3V\3V\3")
|
||||
buf.write("W\3W\3X\3X\3Y\3Y\3Z\3Z\3[\3[\3[\3\\\3\\\3\\\3]\3]\3]\3")
|
||||
buf.write("^\3^\3^\3_\3_\3_\3`\3`\3`\3`\3a\3a\3a\3a\3b\3b\3b\3c\3")
|
||||
buf.write("c\3c\3d\3d\3d\3e\3e\3e\3f\3f\3f\3g\3g\3g\3h\3h\3i\3i\3")
|
||||
buf.write("i\3i\3j\3j\3j\7j\u0381\nj\fj\16j\u0384\13j\3k\3k\5k\u0388")
|
||||
buf.write("\nk\3l\3l\3m\3m\3n\3n\3n\3n\3n\3n\3n\3n\3n\3n\5n\u0398")
|
||||
buf.write("\nn\3o\3o\3o\3o\3o\3p\3p\3p\5p\u03a2\np\3q\3q\5q\u03a6")
|
||||
buf.write("\nq\3q\3q\5q\u03aa\nq\3q\3q\5q\u03ae\nq\5q\u03b0\nq\3")
|
||||
buf.write("r\3r\7r\u03b4\nr\fr\16r\u03b7\13r\3s\3s\7s\u03bb\ns\f")
|
||||
buf.write("s\16s\u03be\13s\3t\3t\6t\u03c2\nt\rt\16t\u03c3\3u\3u\3")
|
||||
buf.write("u\3v\3v\3w\3w\3x\3x\3y\3y\5y\u03d1\ny\3y\3y\3y\3y\3y\5")
|
||||
buf.write("y\u03d8\ny\3y\3y\5y\u03dc\ny\5y\u03de\ny\3z\3z\3{\3{\3")
|
||||
buf.write("|\3|\3|\3|\5|\u03e8\n|\3}\3}\5}\u03ec\n}\3~\3~\5~\u03f0")
|
||||
buf.write("\n~\3~\5~\u03f3\n~\3~\3~\3~\5~\u03f8\n~\5~\u03fa\n~\3")
|
||||
buf.write("\177\3\177\3\177\3\177\5\177\u0400\n\177\3\177\3\177\3")
|
||||
buf.write("\177\3\177\5\177\u0406\n\177\5\177\u0408\n\177\3\u0080")
|
||||
buf.write("\5\u0080\u040b\n\u0080\3\u0080\3\u0080\3\u0080\3\u0080")
|
||||
buf.write("\3\u0080\5\u0080\u0412\n\u0080\3\u0081\3\u0081\5\u0081")
|
||||
buf.write("\u0416\n\u0081\3\u0081\3\u0081\3\u0081\5\u0081\u041b\n")
|
||||
buf.write("\u0081\3\u0081\5\u0081\u041e\n\u0081\3\u0082\3\u0082\3")
|
||||
buf.write("\u0083\6\u0083\u0423\n\u0083\r\u0083\16\u0083\u0424\3")
|
||||
buf.write("\u0084\5\u0084\u0428\n\u0084\3\u0084\3\u0084\3\u0084\3")
|
||||
buf.write("\u0084\3\u0084\5\u0084\u042f\n\u0084\3\u0085\3\u0085\5")
|
||||
buf.write("\u0085\u0433\n\u0085\3\u0085\3\u0085\3\u0085\5\u0085\u0438")
|
||||
buf.write("\n\u0085\3\u0085\5\u0085\u043b\n\u0085\3\u0086\6\u0086")
|
||||
buf.write("\u043e\n\u0086\r\u0086\16\u0086\u043f\3\u0087\3\u0087")
|
||||
buf.write("\3\u0088\3\u0088\3\u0088\3\u0088\3\u0088\3\u0088\3\u0088")
|
||||
buf.write("\3\u0088\3\u0088\3\u0088\3\u0088\3\u0088\3\u0088\3\u0088")
|
||||
buf.write("\3\u0088\3\u0088\3\u0088\3\u0088\3\u0088\3\u0088\3\u0088")
|
||||
buf.write("\3\u0088\5\u0088\u045a\n\u0088\3\u0089\6\u0089\u045d\n")
|
||||
buf.write("\u0089\r\u0089\16\u0089\u045e\3\u008a\3\u008a\5\u008a")
|
||||
buf.write("\u0463\n\u008a\3\u008b\3\u008b\3\u008b\3\u008b\5\u008b")
|
||||
buf.write("\u0469\n\u008b\3\u008c\3\u008c\3\u008c\3\u008d\3\u008d")
|
||||
buf.write("\3\u008d\3\u008d\3\u008d\3\u008d\3\u008d\3\u008d\3\u008d")
|
||||
buf.write("\3\u008d\3\u008d\5\u008d\u0479\n\u008d\3\u008e\3\u008e")
|
||||
buf.write("\3\u008e\3\u008e\6\u008e\u047f\n\u008e\r\u008e\16\u008e")
|
||||
buf.write("\u0480\3\u008f\5\u008f\u0484\n\u008f\3\u008f\3\u008f\5")
|
||||
buf.write("\u008f\u0488\n\u008f\3\u008f\3\u008f\3\u0090\3\u0090\3")
|
||||
buf.write("\u0090\5\u0090\u048f\n\u0090\3\u0091\6\u0091\u0492\n\u0091")
|
||||
buf.write("\r\u0091\16\u0091\u0493\3\u0092\3\u0092\5\u0092\u0498")
|
||||
buf.write("\n\u0092\3\u0093\3\u0093\5\u0093\u049c\n\u0093\3\u0093")
|
||||
buf.write("\3\u0093\5\u0093\u04a0\n\u0093\3\u0093\3\u0093\7\u0093")
|
||||
buf.write("\u04a4\n\u0093\f\u0093\16\u0093\u04a7\13\u0093\3\u0093")
|
||||
buf.write("\3\u0093\3\u0094\3\u0094\5\u0094\u04ad\n\u0094\3\u0094")
|
||||
buf.write("\3\u0094\3\u0094\3\u0094\3\u0094\3\u0094\3\u0094\3\u0094")
|
||||
buf.write("\3\u0094\7\u0094\u04b8\n\u0094\f\u0094\16\u0094\u04bb")
|
||||
buf.write("\13\u0094\3\u0094\3\u0094\3\u0095\6\u0095\u04c0\n\u0095")
|
||||
buf.write("\r\u0095\16\u0095\u04c1\3\u0095\3\u0095\3\u0096\3\u0096")
|
||||
buf.write("\5\u0096\u04c8\n\u0096\3\u0096\5\u0096\u04cb\n\u0096\3")
|
||||
buf.write("\u0096\3\u0096\3\u0097\3\u0097\3\u0097\3\u0097\7\u0097")
|
||||
buf.write("\u04d3\n\u0097\f\u0097\16\u0097\u04d6\13\u0097\3\u0097")
|
||||
buf.write("\3\u0097\3\u0097\3\u0097\3\u0097\3\u0098\3\u0098\3\u0098")
|
||||
buf.write("\3\u0098\7\u0098\u04e1\n\u0098\f\u0098\16\u0098\u04e4")
|
||||
buf.write("\13\u0098\3\u0098\3\u0098\3\u04d4\2\u0099\3\3\5\4\7\5")
|
||||
buf.write("\t\6\13\7\r\b\17\t\21\n\23\13\25\f\27\r\31\16\33\17\35")
|
||||
buf.write("\20\37\21!\22#\23%\24\'\25)\26+\27-\30/\31\61\32\63\33")
|
||||
buf.write("\65\34\67\359\36;\37= ?!A\"C#E$G%I&K\'M(O)Q*S+U,W-Y.[")
|
||||
buf.write("/]\60_\61a\62c\63e\64g\65i\66k\67m8o9q:s;u<w=y>{?}@\177")
|
||||
buf.write("A\u0081B\u0083C\u0085D\u0087E\u0089F\u008bG\u008dH\u008f")
|
||||
buf.write("I\u0091J\u0093K\u0095L\u0097M\u0099N\u009bO\u009dP\u009f")
|
||||
buf.write("Q\u00a1R\u00a3S\u00a5T\u00a7U\u00a9V\u00abW\u00adX\u00af")
|
||||
buf.write("Y\u00b1Z\u00b3[\u00b5\\\u00b7]\u00b9^\u00bb_\u00bd`\u00bf")
|
||||
buf.write("a\u00c1b\u00c3c\u00c5d\u00c7e\u00c9f\u00cbg\u00cdh\u00cf")
|
||||
buf.write("i\u00d1j\u00d3k\u00d5\2\u00d7\2\u00d9\2\u00db\2\u00dd")
|
||||
buf.write("\2\u00dfl\u00e1\2\u00e3\2\u00e5\2\u00e7\2\u00e9\2\u00eb")
|
||||
buf.write("\2\u00ed\2\u00ef\2\u00f1\2\u00f3\2\u00f5\2\u00f7\2\u00f9")
|
||||
buf.write("\2\u00fb\2\u00fd\2\u00ff\2\u0101\2\u0103\2\u0105\2\u0107")
|
||||
buf.write("\2\u0109\2\u010b\2\u010d\2\u010f\2\u0111\2\u0113\2\u0115")
|
||||
buf.write("\2\u0117\2\u0119\2\u011b\2\u011dm\u011f\2\u0121\2\u0123")
|
||||
buf.write("\2\u0125n\u0127o\u0129p\u012bq\u012dr\u012fs\3\2\22\5")
|
||||
buf.write("\2C\\aac|\3\2\62;\4\2ZZzz\3\2\63;\3\2\629\5\2\62;CHch")
|
||||
buf.write("\4\2WWww\4\2NNnn\4\2--//\6\2HHNNhhnn\6\2\f\f\17\17))^")
|
||||
buf.write("^\f\2$$))AA^^cdhhppttvvxx\5\2NNWWww\6\2\f\f\17\17$$^^")
|
||||
buf.write("\4\2\f\f\17\17\4\2\13\13\"\"\u0503\2\3\3\2\2\2\2\5\3\2")
|
||||
buf.write("\2\2\2\7\3\2\2\2\2\t\3\2\2\2\2\13\3\2\2\2\2\r\3\2\2\2")
|
||||
buf.write("\2\17\3\2\2\2\2\21\3\2\2\2\2\23\3\2\2\2\2\25\3\2\2\2\2")
|
||||
buf.write("\27\3\2\2\2\2\31\3\2\2\2\2\33\3\2\2\2\2\35\3\2\2\2\2\37")
|
||||
buf.write("\3\2\2\2\2!\3\2\2\2\2#\3\2\2\2\2%\3\2\2\2\2\'\3\2\2\2")
|
||||
buf.write("\2)\3\2\2\2\2+\3\2\2\2\2-\3\2\2\2\2/\3\2\2\2\2\61\3\2")
|
||||
buf.write("\2\2\2\63\3\2\2\2\2\65\3\2\2\2\2\67\3\2\2\2\29\3\2\2\2")
|
||||
buf.write("\2;\3\2\2\2\2=\3\2\2\2\2?\3\2\2\2\2A\3\2\2\2\2C\3\2\2")
|
||||
buf.write("\2\2E\3\2\2\2\2G\3\2\2\2\2I\3\2\2\2\2K\3\2\2\2\2M\3\2")
|
||||
buf.write("\2\2\2O\3\2\2\2\2Q\3\2\2\2\2S\3\2\2\2\2U\3\2\2\2\2W\3")
|
||||
buf.write("\2\2\2\2Y\3\2\2\2\2[\3\2\2\2\2]\3\2\2\2\2_\3\2\2\2\2a")
|
||||
buf.write("\3\2\2\2\2c\3\2\2\2\2e\3\2\2\2\2g\3\2\2\2\2i\3\2\2\2\2")
|
||||
buf.write("k\3\2\2\2\2m\3\2\2\2\2o\3\2\2\2\2q\3\2\2\2\2s\3\2\2\2")
|
||||
buf.write("\2u\3\2\2\2\2w\3\2\2\2\2y\3\2\2\2\2{\3\2\2\2\2}\3\2\2")
|
||||
buf.write("\2\2\177\3\2\2\2\2\u0081\3\2\2\2\2\u0083\3\2\2\2\2\u0085")
|
||||
buf.write("\3\2\2\2\2\u0087\3\2\2\2\2\u0089\3\2\2\2\2\u008b\3\2\2")
|
||||
buf.write("\2\2\u008d\3\2\2\2\2\u008f\3\2\2\2\2\u0091\3\2\2\2\2\u0093")
|
||||
buf.write("\3\2\2\2\2\u0095\3\2\2\2\2\u0097\3\2\2\2\2\u0099\3\2\2")
|
||||
buf.write("\2\2\u009b\3\2\2\2\2\u009d\3\2\2\2\2\u009f\3\2\2\2\2\u00a1")
|
||||
buf.write("\3\2\2\2\2\u00a3\3\2\2\2\2\u00a5\3\2\2\2\2\u00a7\3\2\2")
|
||||
buf.write("\2\2\u00a9\3\2\2\2\2\u00ab\3\2\2\2\2\u00ad\3\2\2\2\2\u00af")
|
||||
buf.write("\3\2\2\2\2\u00b1\3\2\2\2\2\u00b3\3\2\2\2\2\u00b5\3\2\2")
|
||||
buf.write("\2\2\u00b7\3\2\2\2\2\u00b9\3\2\2\2\2\u00bb\3\2\2\2\2\u00bd")
|
||||
buf.write("\3\2\2\2\2\u00bf\3\2\2\2\2\u00c1\3\2\2\2\2\u00c3\3\2\2")
|
||||
buf.write("\2\2\u00c5\3\2\2\2\2\u00c7\3\2\2\2\2\u00c9\3\2\2\2\2\u00cb")
|
||||
buf.write("\3\2\2\2\2\u00cd\3\2\2\2\2\u00cf\3\2\2\2\2\u00d1\3\2\2")
|
||||
buf.write("\2\2\u00d3\3\2\2\2\2\u00df\3\2\2\2\2\u011d\3\2\2\2\2\u0125")
|
||||
buf.write("\3\2\2\2\2\u0127\3\2\2\2\2\u0129\3\2\2\2\2\u012b\3\2\2")
|
||||
buf.write("\2\2\u012d\3\2\2\2\2\u012f\3\2\2\2\3\u0131\3\2\2\2\5\u013f")
|
||||
buf.write("\3\2\2\2\7\u0150\3\2\2\2\t\u0163\3\2\2\2\13\u016a\3\2")
|
||||
buf.write("\2\2\r\u0172\3\2\2\2\17\u017a\3\2\2\2\21\u0185\3\2\2\2")
|
||||
buf.write("\23\u0190\3\2\2\2\25\u019a\3\2\2\2\27\u01a5\3\2\2\2\31")
|
||||
buf.write("\u01ab\3\2\2\2\33\u01b9\3\2\2\2\35\u01c1\3\2\2\2\37\u01ce")
|
||||
buf.write("\3\2\2\2!\u01d3\3\2\2\2#\u01d9\3\2\2\2%\u01de\3\2\2\2")
|
||||
buf.write("\'\u01e3\3\2\2\2)\u01e9\3\2\2\2+\u01f2\3\2\2\2-\u01fa")
|
||||
buf.write("\3\2\2\2/\u01fd\3\2\2\2\61\u0204\3\2\2\2\63\u0209\3\2")
|
||||
buf.write("\2\2\65\u020e\3\2\2\2\67\u0215\3\2\2\29\u021b\3\2\2\2")
|
||||
buf.write(";\u021f\3\2\2\2=\u0224\3\2\2\2?\u0227\3\2\2\2A\u022e\3")
|
||||
buf.write("\2\2\2C\u0232\3\2\2\2E\u0237\3\2\2\2G\u0240\3\2\2\2I\u0249")
|
||||
buf.write("\3\2\2\2K\u0250\3\2\2\2M\u0256\3\2\2\2O\u025d\3\2\2\2")
|
||||
buf.write("Q\u0264\3\2\2\2S\u026b\3\2\2\2U\u0272\3\2\2\2W\u0279\3")
|
||||
buf.write("\2\2\2Y\u0281\3\2\2\2[\u0287\3\2\2\2]\u0290\3\2\2\2_\u0295")
|
||||
buf.write("\3\2\2\2a\u029e\3\2\2\2c\u02a4\3\2\2\2e\u02ad\3\2\2\2")
|
||||
buf.write("g\u02b6\3\2\2\2i\u02be\3\2\2\2k\u02c4\3\2\2\2m\u02cd\3")
|
||||
buf.write("\2\2\2o\u02d6\3\2\2\2q\u02e1\3\2\2\2s\u02eb\3\2\2\2u\u02fa")
|
||||
buf.write("\3\2\2\2w\u0308\3\2\2\2y\u030a\3\2\2\2{\u030c\3\2\2\2")
|
||||
buf.write("}\u030e\3\2\2\2\177\u0310\3\2\2\2\u0081\u0312\3\2\2\2")
|
||||
buf.write("\u0083\u0314\3\2\2\2\u0085\u0316\3\2\2\2\u0087\u0319\3")
|
||||
buf.write("\2\2\2\u0089\u031b\3\2\2\2\u008b\u031e\3\2\2\2\u008d\u0321")
|
||||
buf.write("\3\2\2\2\u008f\u0324\3\2\2\2\u0091\u0326\3\2\2\2\u0093")
|
||||
buf.write("\u0329\3\2\2\2\u0095\u032b\3\2\2\2\u0097\u032e\3\2\2\2")
|
||||
buf.write("\u0099\u0330\3\2\2\2\u009b\u0332\3\2\2\2\u009d\u0334\3")
|
||||
buf.write("\2\2\2\u009f\u0336\3\2\2\2\u00a1\u0338\3\2\2\2\u00a3\u033b")
|
||||
buf.write("\3\2\2\2\u00a5\u033e\3\2\2\2\u00a7\u0340\3\2\2\2\u00a9")
|
||||
buf.write("\u0342\3\2\2\2\u00ab\u0344\3\2\2\2\u00ad\u0346\3\2\2\2")
|
||||
buf.write("\u00af\u0348\3\2\2\2\u00b1\u034a\3\2\2\2\u00b3\u034c\3")
|
||||
buf.write("\2\2\2\u00b5\u034e\3\2\2\2\u00b7\u0351\3\2\2\2\u00b9\u0354")
|
||||
buf.write("\3\2\2\2\u00bb\u0357\3\2\2\2\u00bd\u035a\3\2\2\2\u00bf")
|
||||
buf.write("\u035d\3\2\2\2\u00c1\u0361\3\2\2\2\u00c3\u0365\3\2\2\2")
|
||||
buf.write("\u00c5\u0368\3\2\2\2\u00c7\u036b\3\2\2\2\u00c9\u036e\3")
|
||||
buf.write("\2\2\2\u00cb\u0371\3\2\2\2\u00cd\u0374\3\2\2\2\u00cf\u0377")
|
||||
buf.write("\3\2\2\2\u00d1\u0379\3\2\2\2\u00d3\u037d\3\2\2\2\u00d5")
|
||||
buf.write("\u0387\3\2\2\2\u00d7\u0389\3\2\2\2\u00d9\u038b\3\2\2\2")
|
||||
buf.write("\u00db\u0397\3\2\2\2\u00dd\u0399\3\2\2\2\u00df\u03a1\3")
|
||||
buf.write("\2\2\2\u00e1\u03af\3\2\2\2\u00e3\u03b1\3\2\2\2\u00e5\u03b8")
|
||||
buf.write("\3\2\2\2\u00e7\u03bf\3\2\2\2\u00e9\u03c5\3\2\2\2\u00eb")
|
||||
buf.write("\u03c8\3\2\2\2\u00ed\u03ca\3\2\2\2\u00ef\u03cc\3\2\2\2")
|
||||
buf.write("\u00f1\u03dd\3\2\2\2\u00f3\u03df\3\2\2\2\u00f5\u03e1\3")
|
||||
buf.write("\2\2\2\u00f7\u03e7\3\2\2\2\u00f9\u03eb\3\2\2\2\u00fb\u03f9")
|
||||
buf.write("\3\2\2\2\u00fd\u0407\3\2\2\2\u00ff\u0411\3\2\2\2\u0101")
|
||||
buf.write("\u041d\3\2\2\2\u0103\u041f\3\2\2\2\u0105\u0422\3\2\2\2")
|
||||
buf.write("\u0107\u042e\3\2\2\2\u0109\u043a\3\2\2\2\u010b\u043d\3")
|
||||
buf.write("\2\2\2\u010d\u0441\3\2\2\2\u010f\u0459\3\2\2\2\u0111\u045c")
|
||||
buf.write("\3\2\2\2\u0113\u0462\3\2\2\2\u0115\u0468\3\2\2\2\u0117")
|
||||
buf.write("\u046a\3\2\2\2\u0119\u0478\3\2\2\2\u011b\u047a\3\2\2\2")
|
||||
buf.write("\u011d\u0483\3\2\2\2\u011f\u048e\3\2\2\2\u0121\u0491\3")
|
||||
buf.write("\2\2\2\u0123\u0497\3\2\2\2\u0125\u0499\3\2\2\2\u0127\u04aa")
|
||||
buf.write("\3\2\2\2\u0129\u04bf\3\2\2\2\u012b\u04ca\3\2\2\2\u012d")
|
||||
buf.write("\u04ce\3\2\2\2\u012f\u04dc\3\2\2\2\u0131\u0132\7a\2\2")
|
||||
buf.write("\u0132\u0133\7a\2\2\u0133\u0134\7g\2\2\u0134\u0135\7z")
|
||||
buf.write("\2\2\u0135\u0136\7v\2\2\u0136\u0137\7g\2\2\u0137\u0138")
|
||||
buf.write("\7p\2\2\u0138\u0139\7u\2\2\u0139\u013a\7k\2\2\u013a\u013b")
|
||||
buf.write("\7q\2\2\u013b\u013c\7p\2\2\u013c\u013d\7a\2\2\u013d\u013e")
|
||||
buf.write("\7a\2\2\u013e\4\3\2\2\2\u013f\u0140\7a\2\2\u0140\u0141")
|
||||
buf.write("\7a\2\2\u0141\u0142\7d\2\2\u0142\u0143\7w\2\2\u0143\u0144")
|
||||
buf.write("\7k\2\2\u0144\u0145\7n\2\2\u0145\u0146\7v\2\2\u0146\u0147")
|
||||
buf.write("\7k\2\2\u0147\u0148\7p\2\2\u0148\u0149\7a\2\2\u0149\u014a")
|
||||
buf.write("\7x\2\2\u014a\u014b\7c\2\2\u014b\u014c\7a\2\2\u014c\u014d")
|
||||
buf.write("\7c\2\2\u014d\u014e\7t\2\2\u014e\u014f\7i\2\2\u014f\6")
|
||||
buf.write("\3\2\2\2\u0150\u0151\7a\2\2\u0151\u0152\7a\2\2\u0152\u0153")
|
||||
buf.write("\7d\2\2\u0153\u0154\7w\2\2\u0154\u0155\7k\2\2\u0155\u0156")
|
||||
buf.write("\7n\2\2\u0156\u0157\7v\2\2\u0157\u0158\7k\2\2\u0158\u0159")
|
||||
buf.write("\7p\2\2\u0159\u015a\7a\2\2\u015a\u015b\7q\2\2\u015b\u015c")
|
||||
buf.write("\7h\2\2\u015c\u015d\7h\2\2\u015d\u015e\7u\2\2\u015e\u015f")
|
||||
buf.write("\7g\2\2\u015f\u0160\7v\2\2\u0160\u0161\7q\2\2\u0161\u0162")
|
||||
buf.write("\7h\2\2\u0162\b\3\2\2\2\u0163\u0164\7a\2\2\u0164\u0165")
|
||||
buf.write("\7a\2\2\u0165\u0166\7o\2\2\u0166\u0167\7\63\2\2\u0167")
|
||||
buf.write("\u0168\7\64\2\2\u0168\u0169\7:\2\2\u0169\n\3\2\2\2\u016a")
|
||||
buf.write("\u016b\7a\2\2\u016b\u016c\7a\2\2\u016c\u016d\7o\2\2\u016d")
|
||||
buf.write("\u016e\7\63\2\2\u016e\u016f\7\64\2\2\u016f\u0170\7:\2")
|
||||
buf.write("\2\u0170\u0171\7f\2\2\u0171\f\3\2\2\2\u0172\u0173\7a\2")
|
||||
buf.write("\2\u0173\u0174\7a\2\2\u0174\u0175\7o\2\2\u0175\u0176\7")
|
||||
buf.write("\63\2\2\u0176\u0177\7\64\2\2\u0177\u0178\7:\2\2\u0178")
|
||||
buf.write("\u0179\7k\2\2\u0179\16\3\2\2\2\u017a\u017b\7a\2\2\u017b")
|
||||
buf.write("\u017c\7a\2\2\u017c\u017d\7v\2\2\u017d\u017e\7{\2\2\u017e")
|
||||
buf.write("\u017f\7r\2\2\u017f\u0180\7g\2\2\u0180\u0181\7q\2\2\u0181")
|
||||
buf.write("\u0182\7h\2\2\u0182\u0183\7a\2\2\u0183\u0184\7a\2\2\u0184")
|
||||
buf.write("\20\3\2\2\2\u0185\u0186\7a\2\2\u0186\u0187\7a\2\2\u0187")
|
||||
buf.write("\u0188\7k\2\2\u0188\u0189\7p\2\2\u0189\u018a\7n\2\2\u018a")
|
||||
buf.write("\u018b\7k\2\2\u018b\u018c\7p\2\2\u018c\u018d\7g\2\2\u018d")
|
||||
buf.write("\u018e\7a\2\2\u018e\u018f\7a\2\2\u018f\22\3\2\2\2\u0190")
|
||||
buf.write("\u0191\7a\2\2\u0191\u0192\7a\2\2\u0192\u0193\7u\2\2\u0193")
|
||||
buf.write("\u0194\7v\2\2\u0194\u0195\7f\2\2\u0195\u0196\7e\2\2\u0196")
|
||||
buf.write("\u0197\7c\2\2\u0197\u0198\7n\2\2\u0198\u0199\7n\2\2\u0199")
|
||||
buf.write("\24\3\2\2\2\u019a\u019b\7a\2\2\u019b\u019c\7a\2\2\u019c")
|
||||
buf.write("\u019d\7f\2\2\u019d\u019e\7g\2\2\u019e\u019f\7e\2\2\u019f")
|
||||
buf.write("\u01a0\7n\2\2\u01a0\u01a1\7u\2\2\u01a1\u01a2\7r\2\2\u01a2")
|
||||
buf.write("\u01a3\7g\2\2\u01a3\u01a4\7e\2\2\u01a4\26\3\2\2\2\u01a5")
|
||||
buf.write("\u01a6\7a\2\2\u01a6\u01a7\7a\2\2\u01a7\u01a8\7c\2\2\u01a8")
|
||||
buf.write("\u01a9\7u\2\2\u01a9\u01aa\7o\2\2\u01aa\30\3\2\2\2\u01ab")
|
||||
buf.write("\u01ac\7a\2\2\u01ac\u01ad\7a\2\2\u01ad\u01ae\7c\2\2\u01ae")
|
||||
buf.write("\u01af\7v\2\2\u01af\u01b0\7v\2\2\u01b0\u01b1\7t\2\2\u01b1")
|
||||
buf.write("\u01b2\7k\2\2\u01b2\u01b3\7d\2\2\u01b3\u01b4\7w\2\2\u01b4")
|
||||
buf.write("\u01b5\7v\2\2\u01b5\u01b6\7g\2\2\u01b6\u01b7\7a\2\2\u01b7")
|
||||
buf.write("\u01b8\7a\2\2\u01b8\32\3\2\2\2\u01b9\u01ba\7a\2\2\u01ba")
|
||||
buf.write("\u01bb\7a\2\2\u01bb\u01bc\7c\2\2\u01bc\u01bd\7u\2\2\u01bd")
|
||||
buf.write("\u01be\7o\2\2\u01be\u01bf\7a\2\2\u01bf\u01c0\7a\2\2\u01c0")
|
||||
buf.write("\34\3\2\2\2\u01c1\u01c2\7a\2\2\u01c2\u01c3\7a\2\2\u01c3")
|
||||
buf.write("\u01c4\7x\2\2\u01c4\u01c5\7q\2\2\u01c5\u01c6\7n\2\2\u01c6")
|
||||
buf.write("\u01c7\7c\2\2\u01c7\u01c8\7v\2\2\u01c8\u01c9\7k\2\2\u01c9")
|
||||
buf.write("\u01ca\7n\2\2\u01ca\u01cb\7g\2\2\u01cb\u01cc\7a\2\2\u01cc")
|
||||
buf.write("\u01cd\7a\2\2\u01cd\36\3\2\2\2\u01ce\u01cf\7c\2\2\u01cf")
|
||||
buf.write("\u01d0\7w\2\2\u01d0\u01d1\7v\2\2\u01d1\u01d2\7q\2\2\u01d2")
|
||||
buf.write(" \3\2\2\2\u01d3\u01d4\7d\2\2\u01d4\u01d5\7t\2\2\u01d5")
|
||||
buf.write("\u01d6\7g\2\2\u01d6\u01d7\7c\2\2\u01d7\u01d8\7m\2\2\u01d8")
|
||||
buf.write("\"\3\2\2\2\u01d9\u01da\7e\2\2\u01da\u01db\7c\2\2\u01db")
|
||||
buf.write("\u01dc\7u\2\2\u01dc\u01dd\7g\2\2\u01dd$\3\2\2\2\u01de")
|
||||
buf.write("\u01df\7e\2\2\u01df\u01e0\7j\2\2\u01e0\u01e1\7c\2\2\u01e1")
|
||||
buf.write("\u01e2\7t\2\2\u01e2&\3\2\2\2\u01e3\u01e4\7e\2\2\u01e4")
|
||||
buf.write("\u01e5\7q\2\2\u01e5\u01e6\7p\2\2\u01e6\u01e7\7u\2\2\u01e7")
|
||||
buf.write("\u01e8\7v\2\2\u01e8(\3\2\2\2\u01e9\u01ea\7e\2\2\u01ea")
|
||||
buf.write("\u01eb\7q\2\2\u01eb\u01ec\7p\2\2\u01ec\u01ed\7v\2\2\u01ed")
|
||||
buf.write("\u01ee\7k\2\2\u01ee\u01ef\7p\2\2\u01ef\u01f0\7w\2\2\u01f0")
|
||||
buf.write("\u01f1\7g\2\2\u01f1*\3\2\2\2\u01f2\u01f3\7f\2\2\u01f3")
|
||||
buf.write("\u01f4\7g\2\2\u01f4\u01f5\7h\2\2\u01f5\u01f6\7c\2\2\u01f6")
|
||||
buf.write("\u01f7\7w\2\2\u01f7\u01f8\7n\2\2\u01f8\u01f9\7v\2\2\u01f9")
|
||||
buf.write(",\3\2\2\2\u01fa\u01fb\7f\2\2\u01fb\u01fc\7q\2\2\u01fc")
|
||||
buf.write(".\3\2\2\2\u01fd\u01fe\7f\2\2\u01fe\u01ff\7q\2\2\u01ff")
|
||||
buf.write("\u0200\7w\2\2\u0200\u0201\7d\2\2\u0201\u0202\7n\2\2\u0202")
|
||||
buf.write("\u0203\7g\2\2\u0203\60\3\2\2\2\u0204\u0205\7g\2\2\u0205")
|
||||
buf.write("\u0206\7n\2\2\u0206\u0207\7u\2\2\u0207\u0208\7g\2\2\u0208")
|
||||
buf.write("\62\3\2\2\2\u0209\u020a\7g\2\2\u020a\u020b\7p\2\2\u020b")
|
||||
buf.write("\u020c\7w\2\2\u020c\u020d\7o\2\2\u020d\64\3\2\2\2\u020e")
|
||||
buf.write("\u020f\7g\2\2\u020f\u0210\7z\2\2\u0210\u0211\7v\2\2\u0211")
|
||||
buf.write("\u0212\7g\2\2\u0212\u0213\7t\2\2\u0213\u0214\7p\2\2\u0214")
|
||||
buf.write("\66\3\2\2\2\u0215\u0216\7h\2\2\u0216\u0217\7n\2\2\u0217")
|
||||
buf.write("\u0218\7q\2\2\u0218\u0219\7c\2\2\u0219\u021a\7v\2\2\u021a")
|
||||
buf.write("8\3\2\2\2\u021b\u021c\7h\2\2\u021c\u021d\7q\2\2\u021d")
|
||||
buf.write("\u021e\7t\2\2\u021e:\3\2\2\2\u021f\u0220\7i\2\2\u0220")
|
||||
buf.write("\u0221\7q\2\2\u0221\u0222\7v\2\2\u0222\u0223\7q\2\2\u0223")
|
||||
buf.write("<\3\2\2\2\u0224\u0225\7k\2\2\u0225\u0226\7h\2\2\u0226")
|
||||
buf.write(">\3\2\2\2\u0227\u0228\7k\2\2\u0228\u0229\7p\2\2\u0229")
|
||||
buf.write("\u022a\7n\2\2\u022a\u022b\7k\2\2\u022b\u022c\7p\2\2\u022c")
|
||||
buf.write("\u022d\7g\2\2\u022d@\3\2\2\2\u022e\u022f\7k\2\2\u022f")
|
||||
buf.write("\u0230\7p\2\2\u0230\u0231\7v\2\2\u0231B\3\2\2\2\u0232")
|
||||
buf.write("\u0233\7n\2\2\u0233\u0234\7q\2\2\u0234\u0235\7p\2\2\u0235")
|
||||
buf.write("\u0236\7i\2\2\u0236D\3\2\2\2\u0237\u0238\7t\2\2\u0238")
|
||||
buf.write("\u0239\7g\2\2\u0239\u023a\7i\2\2\u023a\u023b\7k\2\2\u023b")
|
||||
buf.write("\u023c\7u\2\2\u023c\u023d\7v\2\2\u023d\u023e\7g\2\2\u023e")
|
||||
buf.write("\u023f\7t\2\2\u023fF\3\2\2\2\u0240\u0241\7t\2\2\u0241")
|
||||
buf.write("\u0242\7g\2\2\u0242\u0243\7u\2\2\u0243\u0244\7v\2\2\u0244")
|
||||
buf.write("\u0245\7t\2\2\u0245\u0246\7k\2\2\u0246\u0247\7e\2\2\u0247")
|
||||
buf.write("\u0248\7v\2\2\u0248H\3\2\2\2\u0249\u024a\7t\2\2\u024a")
|
||||
buf.write("\u024b\7g\2\2\u024b\u024c\7v\2\2\u024c\u024d\7w\2\2\u024d")
|
||||
buf.write("\u024e\7t\2\2\u024e\u024f\7p\2\2\u024fJ\3\2\2\2\u0250")
|
||||
buf.write("\u0251\7u\2\2\u0251\u0252\7j\2\2\u0252\u0253\7q\2\2\u0253")
|
||||
buf.write("\u0254\7t\2\2\u0254\u0255\7v\2\2\u0255L\3\2\2\2\u0256")
|
||||
buf.write("\u0257\7u\2\2\u0257\u0258\7k\2\2\u0258\u0259\7i\2\2\u0259")
|
||||
buf.write("\u025a\7p\2\2\u025a\u025b\7g\2\2\u025b\u025c\7f\2\2\u025c")
|
||||
buf.write("N\3\2\2\2\u025d\u025e\7u\2\2\u025e\u025f\7k\2\2\u025f")
|
||||
buf.write("\u0260\7|\2\2\u0260\u0261\7g\2\2\u0261\u0262\7q\2\2\u0262")
|
||||
buf.write("\u0263\7h\2\2\u0263P\3\2\2\2\u0264\u0265\7u\2\2\u0265")
|
||||
buf.write("\u0266\7v\2\2\u0266\u0267\7c\2\2\u0267\u0268\7v\2\2\u0268")
|
||||
buf.write("\u0269\7k\2\2\u0269\u026a\7e\2\2\u026aR\3\2\2\2\u026b")
|
||||
buf.write("\u026c\7u\2\2\u026c\u026d\7v\2\2\u026d\u026e\7t\2\2\u026e")
|
||||
buf.write("\u026f\7w\2\2\u026f\u0270\7e\2\2\u0270\u0271\7v\2\2\u0271")
|
||||
buf.write("T\3\2\2\2\u0272\u0273\7u\2\2\u0273\u0274\7y\2\2\u0274")
|
||||
buf.write("\u0275\7k\2\2\u0275\u0276\7v\2\2\u0276\u0277\7e\2\2\u0277")
|
||||
buf.write("\u0278\7j\2\2\u0278V\3\2\2\2\u0279\u027a\7v\2\2\u027a")
|
||||
buf.write("\u027b\7{\2\2\u027b\u027c\7r\2\2\u027c\u027d\7g\2\2\u027d")
|
||||
buf.write("\u027e\7f\2\2\u027e\u027f\7g\2\2\u027f\u0280\7h\2\2\u0280")
|
||||
buf.write("X\3\2\2\2\u0281\u0282\7w\2\2\u0282\u0283\7p\2\2\u0283")
|
||||
buf.write("\u0284\7k\2\2\u0284\u0285\7q\2\2\u0285\u0286\7p\2\2\u0286")
|
||||
buf.write("Z\3\2\2\2\u0287\u0288\7w\2\2\u0288\u0289\7p\2\2\u0289")
|
||||
buf.write("\u028a\7u\2\2\u028a\u028b\7k\2\2\u028b\u028c\7i\2\2\u028c")
|
||||
buf.write("\u028d\7p\2\2\u028d\u028e\7g\2\2\u028e\u028f\7f\2\2\u028f")
|
||||
buf.write("\\\3\2\2\2\u0290\u0291\7x\2\2\u0291\u0292\7q\2\2\u0292")
|
||||
buf.write("\u0293\7k\2\2\u0293\u0294\7f\2\2\u0294^\3\2\2\2\u0295")
|
||||
buf.write("\u0296\7x\2\2\u0296\u0297\7q\2\2\u0297\u0298\7n\2\2\u0298")
|
||||
buf.write("\u0299\7c\2\2\u0299\u029a\7v\2\2\u029a\u029b\7k\2\2\u029b")
|
||||
buf.write("\u029c\7n\2\2\u029c\u029d\7g\2\2\u029d`\3\2\2\2\u029e")
|
||||
buf.write("\u029f\7y\2\2\u029f\u02a0\7j\2\2\u02a0\u02a1\7k\2\2\u02a1")
|
||||
buf.write("\u02a2\7n\2\2\u02a2\u02a3\7g\2\2\u02a3b\3\2\2\2\u02a4")
|
||||
buf.write("\u02a5\7a\2\2\u02a5\u02a6\7C\2\2\u02a6\u02a7\7n\2\2\u02a7")
|
||||
buf.write("\u02a8\7k\2\2\u02a8\u02a9\7i\2\2\u02a9\u02aa\7p\2\2\u02aa")
|
||||
buf.write("\u02ab\7c\2\2\u02ab\u02ac\7u\2\2\u02acd\3\2\2\2\u02ad")
|
||||
buf.write("\u02ae\7a\2\2\u02ae\u02af\7C\2\2\u02af\u02b0\7n\2\2\u02b0")
|
||||
buf.write("\u02b1\7k\2\2\u02b1\u02b2\7i\2\2\u02b2\u02b3\7p\2\2\u02b3")
|
||||
buf.write("\u02b4\7q\2\2\u02b4\u02b5\7h\2\2\u02b5f\3\2\2\2\u02b6")
|
||||
buf.write("\u02b7\7a\2\2\u02b7\u02b8\7C\2\2\u02b8\u02b9\7v\2\2\u02b9")
|
||||
buf.write("\u02ba\7q\2\2\u02ba\u02bb\7o\2\2\u02bb\u02bc\7k\2\2\u02bc")
|
||||
buf.write("\u02bd\7e\2\2\u02bdh\3\2\2\2\u02be\u02bf\7a\2\2\u02bf")
|
||||
buf.write("\u02c0\7D\2\2\u02c0\u02c1\7q\2\2\u02c1\u02c2\7q\2\2\u02c2")
|
||||
buf.write("\u02c3\7n\2\2\u02c3j\3\2\2\2\u02c4\u02c5\7a\2\2\u02c5")
|
||||
buf.write("\u02c6\7E\2\2\u02c6\u02c7\7q\2\2\u02c7\u02c8\7o\2\2\u02c8")
|
||||
buf.write("\u02c9\7r\2\2\u02c9\u02ca\7n\2\2\u02ca\u02cb\7g\2\2\u02cb")
|
||||
buf.write("\u02cc\7z\2\2\u02ccl\3\2\2\2\u02cd\u02ce\7a\2\2\u02ce")
|
||||
buf.write("\u02cf\7I\2\2\u02cf\u02d0\7g\2\2\u02d0\u02d1\7p\2\2\u02d1")
|
||||
buf.write("\u02d2\7g\2\2\u02d2\u02d3\7t\2\2\u02d3\u02d4\7k\2\2\u02d4")
|
||||
buf.write("\u02d5\7e\2\2\u02d5n\3\2\2\2\u02d6\u02d7\7a\2\2\u02d7")
|
||||
buf.write("\u02d8\7K\2\2\u02d8\u02d9\7o\2\2\u02d9\u02da\7c\2\2\u02da")
|
||||
buf.write("\u02db\7i\2\2\u02db\u02dc\7k\2\2\u02dc\u02dd\7p\2\2\u02dd")
|
||||
buf.write("\u02de\7c\2\2\u02de\u02df\7t\2\2\u02df\u02e0\7{\2\2\u02e0")
|
||||
buf.write("p\3\2\2\2\u02e1\u02e2\7a\2\2\u02e2\u02e3\7P\2\2\u02e3")
|
||||
buf.write("\u02e4\7q\2\2\u02e4\u02e5\7t\2\2\u02e5\u02e6\7g\2\2\u02e6")
|
||||
buf.write("\u02e7\7v\2\2\u02e7\u02e8\7w\2\2\u02e8\u02e9\7t\2\2\u02e9")
|
||||
buf.write("\u02ea\7p\2\2\u02ear\3\2\2\2\u02eb\u02ec\7a\2\2\u02ec")
|
||||
buf.write("\u02ed\7U\2\2\u02ed\u02ee\7v\2\2\u02ee\u02ef\7c\2\2\u02ef")
|
||||
buf.write("\u02f0\7v\2\2\u02f0\u02f1\7k\2\2\u02f1\u02f2\7e\2\2\u02f2")
|
||||
buf.write("\u02f3\7a\2\2\u02f3\u02f4\7c\2\2\u02f4\u02f5\7u\2\2\u02f5")
|
||||
buf.write("\u02f6\7u\2\2\u02f6\u02f7\7g\2\2\u02f7\u02f8\7t\2\2\u02f8")
|
||||
buf.write("\u02f9\7v\2\2\u02f9t\3\2\2\2\u02fa\u02fb\7a\2\2\u02fb")
|
||||
buf.write("\u02fc\7V\2\2\u02fc\u02fd\7j\2\2\u02fd\u02fe\7t\2\2\u02fe")
|
||||
buf.write("\u02ff\7g\2\2\u02ff\u0300\7c\2\2\u0300\u0301\7f\2\2\u0301")
|
||||
buf.write("\u0302\7a\2\2\u0302\u0303\7n\2\2\u0303\u0304\7q\2\2\u0304")
|
||||
buf.write("\u0305\7e\2\2\u0305\u0306\7c\2\2\u0306\u0307\7n\2\2\u0307")
|
||||
buf.write("v\3\2\2\2\u0308\u0309\7*\2\2\u0309x\3\2\2\2\u030a\u030b")
|
||||
buf.write("\7+\2\2\u030bz\3\2\2\2\u030c\u030d\7]\2\2\u030d|\3\2\2")
|
||||
buf.write("\2\u030e\u030f\7_\2\2\u030f~\3\2\2\2\u0310\u0311\7}\2")
|
||||
buf.write("\2\u0311\u0080\3\2\2\2\u0312\u0313\7\177\2\2\u0313\u0082")
|
||||
buf.write("\3\2\2\2\u0314\u0315\7>\2\2\u0315\u0084\3\2\2\2\u0316")
|
||||
buf.write("\u0317\7>\2\2\u0317\u0318\7?\2\2\u0318\u0086\3\2\2\2\u0319")
|
||||
buf.write("\u031a\7@\2\2\u031a\u0088\3\2\2\2\u031b\u031c\7@\2\2\u031c")
|
||||
buf.write("\u031d\7?\2\2\u031d\u008a\3\2\2\2\u031e\u031f\7>\2\2\u031f")
|
||||
buf.write("\u0320\7>\2\2\u0320\u008c\3\2\2\2\u0321\u0322\7@\2\2\u0322")
|
||||
buf.write("\u0323\7@\2\2\u0323\u008e\3\2\2\2\u0324\u0325\7-\2\2\u0325")
|
||||
buf.write("\u0090\3\2\2\2\u0326\u0327\7-\2\2\u0327\u0328\7-\2\2\u0328")
|
||||
buf.write("\u0092\3\2\2\2\u0329\u032a\7/\2\2\u032a\u0094\3\2\2\2")
|
||||
buf.write("\u032b\u032c\7/\2\2\u032c\u032d\7/\2\2\u032d\u0096\3\2")
|
||||
buf.write("\2\2\u032e\u032f\7,\2\2\u032f\u0098\3\2\2\2\u0330\u0331")
|
||||
buf.write("\7\61\2\2\u0331\u009a\3\2\2\2\u0332\u0333\7\'\2\2\u0333")
|
||||
buf.write("\u009c\3\2\2\2\u0334\u0335\7(\2\2\u0335\u009e\3\2\2\2")
|
||||
buf.write("\u0336\u0337\7~\2\2\u0337\u00a0\3\2\2\2\u0338\u0339\7")
|
||||
buf.write("(\2\2\u0339\u033a\7(\2\2\u033a\u00a2\3\2\2\2\u033b\u033c")
|
||||
buf.write("\7~\2\2\u033c\u033d\7~\2\2\u033d\u00a4\3\2\2\2\u033e\u033f")
|
||||
buf.write("\7`\2\2\u033f\u00a6\3\2\2\2\u0340\u0341\7#\2\2\u0341\u00a8")
|
||||
buf.write("\3\2\2\2\u0342\u0343\7\u0080\2\2\u0343\u00aa\3\2\2\2\u0344")
|
||||
buf.write("\u0345\7A\2\2\u0345\u00ac\3\2\2\2\u0346\u0347\7<\2\2\u0347")
|
||||
buf.write("\u00ae\3\2\2\2\u0348\u0349\7=\2\2\u0349\u00b0\3\2\2\2")
|
||||
buf.write("\u034a\u034b\7.\2\2\u034b\u00b2\3\2\2\2\u034c\u034d\7")
|
||||
buf.write("?\2\2\u034d\u00b4\3\2\2\2\u034e\u034f\7,\2\2\u034f\u0350")
|
||||
buf.write("\7?\2\2\u0350\u00b6\3\2\2\2\u0351\u0352\7\61\2\2\u0352")
|
||||
buf.write("\u0353\7?\2\2\u0353\u00b8\3\2\2\2\u0354\u0355\7\'\2\2")
|
||||
buf.write("\u0355\u0356\7?\2\2\u0356\u00ba\3\2\2\2\u0357\u0358\7")
|
||||
buf.write("-\2\2\u0358\u0359\7?\2\2\u0359\u00bc\3\2\2\2\u035a\u035b")
|
||||
buf.write("\7/\2\2\u035b\u035c\7?\2\2\u035c\u00be\3\2\2\2\u035d\u035e")
|
||||
buf.write("\7>\2\2\u035e\u035f\7>\2\2\u035f\u0360\7?\2\2\u0360\u00c0")
|
||||
buf.write("\3\2\2\2\u0361\u0362\7@\2\2\u0362\u0363\7@\2\2\u0363\u0364")
|
||||
buf.write("\7?\2\2\u0364\u00c2\3\2\2\2\u0365\u0366\7(\2\2\u0366\u0367")
|
||||
buf.write("\7?\2\2\u0367\u00c4\3\2\2\2\u0368\u0369\7`\2\2\u0369\u036a")
|
||||
buf.write("\7?\2\2\u036a\u00c6\3\2\2\2\u036b\u036c\7~\2\2\u036c\u036d")
|
||||
buf.write("\7?\2\2\u036d\u00c8\3\2\2\2\u036e\u036f\7?\2\2\u036f\u0370")
|
||||
buf.write("\7?\2\2\u0370\u00ca\3\2\2\2\u0371\u0372\7#\2\2\u0372\u0373")
|
||||
buf.write("\7?\2\2\u0373\u00cc\3\2\2\2\u0374\u0375\7/\2\2\u0375\u0376")
|
||||
buf.write("\7@\2\2\u0376\u00ce\3\2\2\2\u0377\u0378\7\60\2\2\u0378")
|
||||
buf.write("\u00d0\3\2\2\2\u0379\u037a\7\60\2\2\u037a\u037b\7\60\2")
|
||||
buf.write("\2\u037b\u037c\7\60\2\2\u037c\u00d2\3\2\2\2\u037d\u0382")
|
||||
buf.write("\5\u00d5k\2\u037e\u0381\5\u00d5k\2\u037f\u0381\5\u00d9")
|
||||
buf.write("m\2\u0380\u037e\3\2\2\2\u0380\u037f\3\2\2\2\u0381\u0384")
|
||||
buf.write("\3\2\2\2\u0382\u0380\3\2\2\2\u0382\u0383\3\2\2\2\u0383")
|
||||
buf.write("\u00d4\3\2\2\2\u0384\u0382\3\2\2\2\u0385\u0388\5\u00d7")
|
||||
buf.write("l\2\u0386\u0388\5\u00dbn\2\u0387\u0385\3\2\2\2\u0387\u0386")
|
||||
buf.write("\3\2\2\2\u0388\u00d6\3\2\2\2\u0389\u038a\t\2\2\2\u038a")
|
||||
buf.write("\u00d8\3\2\2\2\u038b\u038c\t\3\2\2\u038c\u00da\3\2\2\2")
|
||||
buf.write("\u038d\u038e\7^\2\2\u038e\u038f\7w\2\2\u038f\u0390\3\2")
|
||||
buf.write("\2\2\u0390\u0398\5\u00ddo\2\u0391\u0392\7^\2\2\u0392\u0393")
|
||||
buf.write("\7W\2\2\u0393\u0394\3\2\2\2\u0394\u0395\5\u00ddo\2\u0395")
|
||||
buf.write("\u0396\5\u00ddo\2\u0396\u0398\3\2\2\2\u0397\u038d\3\2")
|
||||
buf.write("\2\2\u0397\u0391\3\2\2\2\u0398\u00dc\3\2\2\2\u0399\u039a")
|
||||
buf.write("\5\u00efx\2\u039a\u039b\5\u00efx\2\u039b\u039c\5\u00ef")
|
||||
buf.write("x\2\u039c\u039d\5\u00efx\2\u039d\u00de\3\2\2\2\u039e\u03a2")
|
||||
buf.write("\5\u00e1q\2\u039f\u03a2\5\u00f9}\2\u03a0\u03a2\5\u010f")
|
||||
buf.write("\u0088\2\u03a1\u039e\3\2\2\2\u03a1\u039f\3\2\2\2\u03a1")
|
||||
buf.write("\u03a0\3\2\2\2\u03a2\u00e0\3\2\2\2\u03a3\u03a5\5\u00e3")
|
||||
buf.write("r\2\u03a4\u03a6\5\u00f1y\2\u03a5\u03a4\3\2\2\2\u03a5\u03a6")
|
||||
buf.write("\3\2\2\2\u03a6\u03b0\3\2\2\2\u03a7\u03a9\5\u00e5s\2\u03a8")
|
||||
buf.write("\u03aa\5\u00f1y\2\u03a9\u03a8\3\2\2\2\u03a9\u03aa\3\2")
|
||||
buf.write("\2\2\u03aa\u03b0\3\2\2\2\u03ab\u03ad\5\u00e7t\2\u03ac")
|
||||
buf.write("\u03ae\5\u00f1y\2\u03ad\u03ac\3\2\2\2\u03ad\u03ae\3\2")
|
||||
buf.write("\2\2\u03ae\u03b0\3\2\2\2\u03af\u03a3\3\2\2\2\u03af\u03a7")
|
||||
buf.write("\3\2\2\2\u03af\u03ab\3\2\2\2\u03b0\u00e2\3\2\2\2\u03b1")
|
||||
buf.write("\u03b5\5\u00ebv\2\u03b2\u03b4\5\u00d9m\2\u03b3\u03b2\3")
|
||||
buf.write("\2\2\2\u03b4\u03b7\3\2\2\2\u03b5\u03b3\3\2\2\2\u03b5\u03b6")
|
||||
buf.write("\3\2\2\2\u03b6\u00e4\3\2\2\2\u03b7\u03b5\3\2\2\2\u03b8")
|
||||
buf.write("\u03bc\7\62\2\2\u03b9\u03bb\5\u00edw\2\u03ba\u03b9\3\2")
|
||||
buf.write("\2\2\u03bb\u03be\3\2\2\2\u03bc\u03ba\3\2\2\2\u03bc\u03bd")
|
||||
buf.write("\3\2\2\2\u03bd\u00e6\3\2\2\2\u03be\u03bc\3\2\2\2\u03bf")
|
||||
buf.write("\u03c1\5\u00e9u\2\u03c0\u03c2\5\u00efx\2\u03c1\u03c0\3")
|
||||
buf.write("\2\2\2\u03c2\u03c3\3\2\2\2\u03c3\u03c1\3\2\2\2\u03c3\u03c4")
|
||||
buf.write("\3\2\2\2\u03c4\u00e8\3\2\2\2\u03c5\u03c6\7\62\2\2\u03c6")
|
||||
buf.write("\u03c7\t\4\2\2\u03c7\u00ea\3\2\2\2\u03c8\u03c9\t\5\2\2")
|
||||
buf.write("\u03c9\u00ec\3\2\2\2\u03ca\u03cb\t\6\2\2\u03cb\u00ee\3")
|
||||
buf.write("\2\2\2\u03cc\u03cd\t\7\2\2\u03cd\u00f0\3\2\2\2\u03ce\u03d0")
|
||||
buf.write("\5\u00f3z\2\u03cf\u03d1\5\u00f5{\2\u03d0\u03cf\3\2\2\2")
|
||||
buf.write("\u03d0\u03d1\3\2\2\2\u03d1\u03de\3\2\2\2\u03d2\u03d3\5")
|
||||
buf.write("\u00f3z\2\u03d3\u03d4\5\u00f7|\2\u03d4\u03de\3\2\2\2\u03d5")
|
||||
buf.write("\u03d7\5\u00f5{\2\u03d6\u03d8\5\u00f3z\2\u03d7\u03d6\3")
|
||||
buf.write("\2\2\2\u03d7\u03d8\3\2\2\2\u03d8\u03de\3\2\2\2\u03d9\u03db")
|
||||
buf.write("\5\u00f7|\2\u03da\u03dc\5\u00f3z\2\u03db\u03da\3\2\2\2")
|
||||
buf.write("\u03db\u03dc\3\2\2\2\u03dc\u03de\3\2\2\2\u03dd\u03ce\3")
|
||||
buf.write("\2\2\2\u03dd\u03d2\3\2\2\2\u03dd\u03d5\3\2\2\2\u03dd\u03d9")
|
||||
buf.write("\3\2\2\2\u03de\u00f2\3\2\2\2\u03df\u03e0\t\b\2\2\u03e0")
|
||||
buf.write("\u00f4\3\2\2\2\u03e1\u03e2\t\t\2\2\u03e2\u00f6\3\2\2\2")
|
||||
buf.write("\u03e3\u03e4\7n\2\2\u03e4\u03e8\7n\2\2\u03e5\u03e6\7N")
|
||||
buf.write("\2\2\u03e6\u03e8\7N\2\2\u03e7\u03e3\3\2\2\2\u03e7\u03e5")
|
||||
buf.write("\3\2\2\2\u03e8\u00f8\3\2\2\2\u03e9\u03ec\5\u00fb~\2\u03ea")
|
||||
buf.write("\u03ec\5\u00fd\177\2\u03eb\u03e9\3\2\2\2\u03eb\u03ea\3")
|
||||
buf.write("\2\2\2\u03ec\u00fa\3\2\2\2\u03ed\u03ef\5\u00ff\u0080\2")
|
||||
buf.write("\u03ee\u03f0\5\u0101\u0081\2\u03ef\u03ee\3\2\2\2\u03ef")
|
||||
buf.write("\u03f0\3\2\2\2\u03f0\u03f2\3\2\2\2\u03f1\u03f3\5\u010d")
|
||||
buf.write("\u0087\2\u03f2\u03f1\3\2\2\2\u03f2\u03f3\3\2\2\2\u03f3")
|
||||
buf.write("\u03fa\3\2\2\2\u03f4\u03f5\5\u0105\u0083\2\u03f5\u03f7")
|
||||
buf.write("\5\u0101\u0081\2\u03f6\u03f8\5\u010d\u0087\2\u03f7\u03f6")
|
||||
buf.write("\3\2\2\2\u03f7\u03f8\3\2\2\2\u03f8\u03fa\3\2\2\2\u03f9")
|
||||
buf.write("\u03ed\3\2\2\2\u03f9\u03f4\3\2\2\2\u03fa\u00fc\3\2\2\2")
|
||||
buf.write("\u03fb\u03fc\5\u00e9u\2\u03fc\u03fd\5\u0107\u0084\2\u03fd")
|
||||
buf.write("\u03ff\5\u0109\u0085\2\u03fe\u0400\5\u010d\u0087\2\u03ff")
|
||||
buf.write("\u03fe\3\2\2\2\u03ff\u0400\3\2\2\2\u0400\u0408\3\2\2\2")
|
||||
buf.write("\u0401\u0402\5\u00e9u\2\u0402\u0403\5\u010b\u0086\2\u0403")
|
||||
buf.write("\u0405\5\u0109\u0085\2\u0404\u0406\5\u010d\u0087\2\u0405")
|
||||
buf.write("\u0404\3\2\2\2\u0405\u0406\3\2\2\2\u0406\u0408\3\2\2\2")
|
||||
buf.write("\u0407\u03fb\3\2\2\2\u0407\u0401\3\2\2\2\u0408\u00fe\3")
|
||||
buf.write("\2\2\2\u0409\u040b\5\u0105\u0083\2\u040a\u0409\3\2\2\2")
|
||||
buf.write("\u040a\u040b\3\2\2\2\u040b\u040c\3\2\2\2\u040c\u040d\7")
|
||||
buf.write("\60\2\2\u040d\u0412\5\u0105\u0083\2\u040e\u040f\5\u0105")
|
||||
buf.write("\u0083\2\u040f\u0410\7\60\2\2\u0410\u0412\3\2\2\2\u0411")
|
||||
buf.write("\u040a\3\2\2\2\u0411\u040e\3\2\2\2\u0412\u0100\3\2\2\2")
|
||||
buf.write("\u0413\u0415\7g\2\2\u0414\u0416\5\u0103\u0082\2\u0415")
|
||||
buf.write("\u0414\3\2\2\2\u0415\u0416\3\2\2\2\u0416\u0417\3\2\2\2")
|
||||
buf.write("\u0417\u041e\5\u0105\u0083\2\u0418\u041a\7G\2\2\u0419")
|
||||
buf.write("\u041b\5\u0103\u0082\2\u041a\u0419\3\2\2\2\u041a\u041b")
|
||||
buf.write("\3\2\2\2\u041b\u041c\3\2\2\2\u041c\u041e\5\u0105\u0083")
|
||||
buf.write("\2\u041d\u0413\3\2\2\2\u041d\u0418\3\2\2\2\u041e\u0102")
|
||||
buf.write("\3\2\2\2\u041f\u0420\t\n\2\2\u0420\u0104\3\2\2\2\u0421")
|
||||
buf.write("\u0423\5\u00d9m\2\u0422\u0421\3\2\2\2\u0423\u0424\3\2")
|
||||
buf.write("\2\2\u0424\u0422\3\2\2\2\u0424\u0425\3\2\2\2\u0425\u0106")
|
||||
buf.write("\3\2\2\2\u0426\u0428\5\u010b\u0086\2\u0427\u0426\3\2\2")
|
||||
buf.write("\2\u0427\u0428\3\2\2\2\u0428\u0429\3\2\2\2\u0429\u042a")
|
||||
buf.write("\7\60\2\2\u042a\u042f\5\u010b\u0086\2\u042b\u042c\5\u010b")
|
||||
buf.write("\u0086\2\u042c\u042d\7\60\2\2\u042d\u042f\3\2\2\2\u042e")
|
||||
buf.write("\u0427\3\2\2\2\u042e\u042b\3\2\2\2\u042f\u0108\3\2\2\2")
|
||||
buf.write("\u0430\u0432\7r\2\2\u0431\u0433\5\u0103\u0082\2\u0432")
|
||||
buf.write("\u0431\3\2\2\2\u0432\u0433\3\2\2\2\u0433\u0434\3\2\2\2")
|
||||
buf.write("\u0434\u043b\5\u0105\u0083\2\u0435\u0437\7R\2\2\u0436")
|
||||
buf.write("\u0438\5\u0103\u0082\2\u0437\u0436\3\2\2\2\u0437\u0438")
|
||||
buf.write("\3\2\2\2\u0438\u0439\3\2\2\2\u0439\u043b\5\u0105\u0083")
|
||||
buf.write("\2\u043a\u0430\3\2\2\2\u043a\u0435\3\2\2\2\u043b\u010a")
|
||||
buf.write("\3\2\2\2\u043c\u043e\5\u00efx\2\u043d\u043c\3\2\2\2\u043e")
|
||||
buf.write("\u043f\3\2\2\2\u043f\u043d\3\2\2\2\u043f\u0440\3\2\2\2")
|
||||
buf.write("\u0440\u010c\3\2\2\2\u0441\u0442\t\13\2\2\u0442\u010e")
|
||||
buf.write("\3\2\2\2\u0443\u0444\7)\2\2\u0444\u0445\5\u0111\u0089")
|
||||
buf.write("\2\u0445\u0446\7)\2\2\u0446\u045a\3\2\2\2\u0447\u0448")
|
||||
buf.write("\7N\2\2\u0448\u0449\7)\2\2\u0449\u044a\3\2\2\2\u044a\u044b")
|
||||
buf.write("\5\u0111\u0089\2\u044b\u044c\7)\2\2\u044c\u045a\3\2\2")
|
||||
buf.write("\2\u044d\u044e\7w\2\2\u044e\u044f\7)\2\2\u044f\u0450\3")
|
||||
buf.write("\2\2\2\u0450\u0451\5\u0111\u0089\2\u0451\u0452\7)\2\2")
|
||||
buf.write("\u0452\u045a\3\2\2\2\u0453\u0454\7W\2\2\u0454\u0455\7")
|
||||
buf.write(")\2\2\u0455\u0456\3\2\2\2\u0456\u0457\5\u0111\u0089\2")
|
||||
buf.write("\u0457\u0458\7)\2\2\u0458\u045a\3\2\2\2\u0459\u0443\3")
|
||||
buf.write("\2\2\2\u0459\u0447\3\2\2\2\u0459\u044d\3\2\2\2\u0459\u0453")
|
||||
buf.write("\3\2\2\2\u045a\u0110\3\2\2\2\u045b\u045d\5\u0113\u008a")
|
||||
buf.write("\2\u045c\u045b\3\2\2\2\u045d\u045e\3\2\2\2\u045e\u045c")
|
||||
buf.write("\3\2\2\2\u045e\u045f\3\2\2\2\u045f\u0112\3\2\2\2\u0460")
|
||||
buf.write("\u0463\n\f\2\2\u0461\u0463\5\u0115\u008b\2\u0462\u0460")
|
||||
buf.write("\3\2\2\2\u0462\u0461\3\2\2\2\u0463\u0114\3\2\2\2\u0464")
|
||||
buf.write("\u0469\5\u0117\u008c\2\u0465\u0469\5\u0119\u008d\2\u0466")
|
||||
buf.write("\u0469\5\u011b\u008e\2\u0467\u0469\5\u00dbn\2\u0468\u0464")
|
||||
buf.write("\3\2\2\2\u0468\u0465\3\2\2\2\u0468\u0466\3\2\2\2\u0468")
|
||||
buf.write("\u0467\3\2\2\2\u0469\u0116\3\2\2\2\u046a\u046b\7^\2\2")
|
||||
buf.write("\u046b\u046c\t\r\2\2\u046c\u0118\3\2\2\2\u046d\u046e\7")
|
||||
buf.write("^\2\2\u046e\u0479\5\u00edw\2\u046f\u0470\7^\2\2\u0470")
|
||||
buf.write("\u0471\5\u00edw\2\u0471\u0472\5\u00edw\2\u0472\u0479\3")
|
||||
buf.write("\2\2\2\u0473\u0474\7^\2\2\u0474\u0475\5\u00edw\2\u0475")
|
||||
buf.write("\u0476\5\u00edw\2\u0476\u0477\5\u00edw\2\u0477\u0479\3")
|
||||
buf.write("\2\2\2\u0478\u046d\3\2\2\2\u0478\u046f\3\2\2\2\u0478\u0473")
|
||||
buf.write("\3\2\2\2\u0479\u011a\3\2\2\2\u047a\u047b\7^\2\2\u047b")
|
||||
buf.write("\u047c\7z\2\2\u047c\u047e\3\2\2\2\u047d\u047f\5\u00ef")
|
||||
buf.write("x\2\u047e\u047d\3\2\2\2\u047f\u0480\3\2\2\2\u0480\u047e")
|
||||
buf.write("\3\2\2\2\u0480\u0481\3\2\2\2\u0481\u011c\3\2\2\2\u0482")
|
||||
buf.write("\u0484\5\u011f\u0090\2\u0483\u0482\3\2\2\2\u0483\u0484")
|
||||
buf.write("\3\2\2\2\u0484\u0485\3\2\2\2\u0485\u0487\7$\2\2\u0486")
|
||||
buf.write("\u0488\5\u0121\u0091\2\u0487\u0486\3\2\2\2\u0487\u0488")
|
||||
buf.write("\3\2\2\2\u0488\u0489\3\2\2\2\u0489\u048a\7$\2\2\u048a")
|
||||
buf.write("\u011e\3\2\2\2\u048b\u048c\7w\2\2\u048c\u048f\7:\2\2\u048d")
|
||||
buf.write("\u048f\t\16\2\2\u048e\u048b\3\2\2\2\u048e\u048d\3\2\2")
|
||||
buf.write("\2\u048f\u0120\3\2\2\2\u0490\u0492\5\u0123\u0092\2\u0491")
|
||||
buf.write("\u0490\3\2\2\2\u0492\u0493\3\2\2\2\u0493\u0491\3\2\2\2")
|
||||
buf.write("\u0493\u0494\3\2\2\2\u0494\u0122\3\2\2\2\u0495\u0498\n")
|
||||
buf.write("\17\2\2\u0496\u0498\5\u0115\u008b\2\u0497\u0495\3\2\2")
|
||||
buf.write("\2\u0497\u0496\3\2\2\2\u0498\u0124\3\2\2\2\u0499\u049b")
|
||||
buf.write("\7%\2\2\u049a\u049c\5\u0129\u0095\2\u049b\u049a\3\2\2")
|
||||
buf.write("\2\u049b\u049c\3\2\2\2\u049c\u049d\3\2\2\2\u049d\u049f")
|
||||
buf.write("\5\u00e3r\2\u049e\u04a0\5\u0129\u0095\2\u049f\u049e\3")
|
||||
buf.write("\2\2\2\u049f\u04a0\3\2\2\2\u04a0\u04a1\3\2\2\2\u04a1\u04a5")
|
||||
buf.write("\5\u011d\u008f\2\u04a2\u04a4\n\20\2\2\u04a3\u04a2\3\2")
|
||||
buf.write("\2\2\u04a4\u04a7\3\2\2\2\u04a5\u04a3\3\2\2\2\u04a5\u04a6")
|
||||
buf.write("\3\2\2\2\u04a6\u04a8\3\2\2\2\u04a7\u04a5\3\2\2\2\u04a8")
|
||||
buf.write("\u04a9\b\u0093\2\2\u04a9\u0126\3\2\2\2\u04aa\u04ac\7%")
|
||||
buf.write("\2\2\u04ab\u04ad\5\u0129\u0095\2\u04ac\u04ab\3\2\2\2\u04ac")
|
||||
buf.write("\u04ad\3\2\2\2\u04ad\u04ae\3\2\2\2\u04ae\u04af\7r\2\2")
|
||||
buf.write("\u04af\u04b0\7t\2\2\u04b0\u04b1\7c\2\2\u04b1\u04b2\7i")
|
||||
buf.write("\2\2\u04b2\u04b3\7o\2\2\u04b3\u04b4\7c\2\2\u04b4\u04b5")
|
||||
buf.write("\3\2\2\2\u04b5\u04b9\5\u0129\u0095\2\u04b6\u04b8\n\20")
|
||||
buf.write("\2\2\u04b7\u04b6\3\2\2\2\u04b8\u04bb\3\2\2\2\u04b9\u04b7")
|
||||
buf.write("\3\2\2\2\u04b9\u04ba\3\2\2\2\u04ba\u04bc\3\2\2\2\u04bb")
|
||||
buf.write("\u04b9\3\2\2\2\u04bc\u04bd\b\u0094\2\2\u04bd\u0128\3\2")
|
||||
buf.write("\2\2\u04be\u04c0\t\21\2\2\u04bf\u04be\3\2\2\2\u04c0\u04c1")
|
||||
buf.write("\3\2\2\2\u04c1\u04bf\3\2\2\2\u04c1\u04c2\3\2\2\2\u04c2")
|
||||
buf.write("\u04c3\3\2\2\2\u04c3\u04c4\b\u0095\2\2\u04c4\u012a\3\2")
|
||||
buf.write("\2\2\u04c5\u04c7\7\17\2\2\u04c6\u04c8\7\f\2\2\u04c7\u04c6")
|
||||
buf.write("\3\2\2\2\u04c7\u04c8\3\2\2\2\u04c8\u04cb\3\2\2\2\u04c9")
|
||||
buf.write("\u04cb\7\f\2\2\u04ca\u04c5\3\2\2\2\u04ca\u04c9\3\2\2\2")
|
||||
buf.write("\u04cb\u04cc\3\2\2\2\u04cc\u04cd\b\u0096\2\2\u04cd\u012c")
|
||||
buf.write("\3\2\2\2\u04ce\u04cf\7\61\2\2\u04cf\u04d0\7,\2\2\u04d0")
|
||||
buf.write("\u04d4\3\2\2\2\u04d1\u04d3\13\2\2\2\u04d2\u04d1\3\2\2")
|
||||
buf.write("\2\u04d3\u04d6\3\2\2\2\u04d4\u04d5\3\2\2\2\u04d4\u04d2")
|
||||
buf.write("\3\2\2\2\u04d5\u04d7\3\2\2\2\u04d6\u04d4\3\2\2\2\u04d7")
|
||||
buf.write("\u04d8\7,\2\2\u04d8\u04d9\7\61\2\2\u04d9\u04da\3\2\2\2")
|
||||
buf.write("\u04da\u04db\b\u0097\2\2\u04db\u012e\3\2\2\2\u04dc\u04dd")
|
||||
buf.write("\7\61\2\2\u04dd\u04de\7\61\2\2\u04de\u04e2\3\2\2\2\u04df")
|
||||
buf.write("\u04e1\n\20\2\2\u04e0\u04df\3\2\2\2\u04e1\u04e4\3\2\2")
|
||||
buf.write("\2\u04e2\u04e0\3\2\2\2\u04e2\u04e3\3\2\2\2\u04e3\u04e5")
|
||||
buf.write("\3\2\2\2\u04e4\u04e2\3\2\2\2\u04e5\u04e6\b\u0098\2\2\u04e6")
|
||||
buf.write("\u0130\3\2\2\2=\2\u0380\u0382\u0387\u0397\u03a1\u03a5")
|
||||
buf.write("\u03a9\u03ad\u03af\u03b5\u03bc\u03c3\u03d0\u03d7\u03db")
|
||||
buf.write("\u03dd\u03e7\u03eb\u03ef\u03f2\u03f7\u03f9\u03ff\u0405")
|
||||
buf.write("\u0407\u040a\u0411\u0415\u041a\u041d\u0424\u0427\u042e")
|
||||
buf.write("\u0432\u0437\u043a\u043f\u0459\u045e\u0462\u0468\u0478")
|
||||
buf.write("\u0480\u0483\u0487\u048e\u0493\u0497\u049b\u049f\u04a5")
|
||||
buf.write("\u04ac\u04b9\u04c1\u04c7\u04ca\u04d4\u04e2\3\b\2\2")
|
||||
return buf.getvalue()
|
||||
|
||||
|
||||
class CLexer(Lexer):
|
||||
|
||||
atn = ATNDeserializer().deserialize(serializedATN())
|
||||
|
||||
decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ]
|
||||
|
||||
|
||||
T__0 = 1
|
||||
T__1 = 2
|
||||
T__2 = 3
|
||||
T__3 = 4
|
||||
T__4 = 5
|
||||
T__5 = 6
|
||||
T__6 = 7
|
||||
T__7 = 8
|
||||
T__8 = 9
|
||||
T__9 = 10
|
||||
T__10 = 11
|
||||
T__11 = 12
|
||||
T__12 = 13
|
||||
T__13 = 14
|
||||
Auto = 15
|
||||
Break = 16
|
||||
Case = 17
|
||||
Char = 18
|
||||
Const = 19
|
||||
Continue = 20
|
||||
Default = 21
|
||||
Do = 22
|
||||
Double = 23
|
||||
Else = 24
|
||||
Enum = 25
|
||||
Extern = 26
|
||||
Float = 27
|
||||
For = 28
|
||||
Goto = 29
|
||||
If = 30
|
||||
Inline = 31
|
||||
Int = 32
|
||||
Long = 33
|
||||
Register = 34
|
||||
Restrict = 35
|
||||
Return = 36
|
||||
Short = 37
|
||||
Signed = 38
|
||||
Sizeof = 39
|
||||
Static = 40
|
||||
Struct = 41
|
||||
Switch = 42
|
||||
Typedef = 43
|
||||
Union = 44
|
||||
Unsigned = 45
|
||||
Void = 46
|
||||
Volatile = 47
|
||||
While = 48
|
||||
Alignas = 49
|
||||
Alignof = 50
|
||||
Atomic = 51
|
||||
Bool = 52
|
||||
Complex = 53
|
||||
Generic = 54
|
||||
Imaginary = 55
|
||||
Noreturn = 56
|
||||
StaticAssert = 57
|
||||
ThreadLocal = 58
|
||||
LeftParen = 59
|
||||
RightParen = 60
|
||||
LeftBracket = 61
|
||||
RightBracket = 62
|
||||
LeftBrace = 63
|
||||
RightBrace = 64
|
||||
Less = 65
|
||||
LessEqual = 66
|
||||
Greater = 67
|
||||
GreaterEqual = 68
|
||||
LeftShift = 69
|
||||
RightShift = 70
|
||||
Plus = 71
|
||||
PlusPlus = 72
|
||||
Minus = 73
|
||||
MinusMinus = 74
|
||||
Star = 75
|
||||
Div = 76
|
||||
Mod = 77
|
||||
And = 78
|
||||
Or = 79
|
||||
AndAnd = 80
|
||||
OrOr = 81
|
||||
Caret = 82
|
||||
Not = 83
|
||||
Tilde = 84
|
||||
Question = 85
|
||||
Colon = 86
|
||||
Semi = 87
|
||||
Comma = 88
|
||||
Assign = 89
|
||||
StarAssign = 90
|
||||
DivAssign = 91
|
||||
ModAssign = 92
|
||||
PlusAssign = 93
|
||||
MinusAssign = 94
|
||||
LeftShiftAssign = 95
|
||||
RightShiftAssign = 96
|
||||
AndAssign = 97
|
||||
XorAssign = 98
|
||||
OrAssign = 99
|
||||
Equal = 100
|
||||
NotEqual = 101
|
||||
Arrow = 102
|
||||
Dot = 103
|
||||
Ellipsis = 104
|
||||
Identifier = 105
|
||||
Constant = 106
|
||||
StringLiteral = 107
|
||||
LineDirective = 108
|
||||
PragmaDirective = 109
|
||||
Whitespace = 110
|
||||
Newline = 111
|
||||
BlockComment = 112
|
||||
LineComment = 113
|
||||
|
||||
modeNames = [ "DEFAULT_MODE" ]
|
||||
|
||||
literalNames = [ "<INVALID>",
|
||||
"'__extension__'", "'__builtin_va_arg'", "'__builtin_offsetof'",
|
||||
"'__m128'", "'__m128d'", "'__m128i'", "'__typeof__'", "'__inline__'",
|
||||
"'__stdcall'", "'__declspec'", "'__asm'", "'__attribute__'",
|
||||
"'__asm__'", "'__volatile__'", "'auto'", "'break'", "'case'",
|
||||
"'char'", "'const'", "'continue'", "'default'", "'do'", "'double'",
|
||||
"'else'", "'enum'", "'extern'", "'float'", "'for'", "'goto'",
|
||||
"'if'", "'inline'", "'int'", "'long'", "'register'", "'restrict'",
|
||||
"'return'", "'short'", "'signed'", "'sizeof'", "'static'", "'struct'",
|
||||
"'switch'", "'typedef'", "'union'", "'unsigned'", "'void'",
|
||||
"'volatile'", "'while'", "'_Alignas'", "'_Alignof'", "'_Atomic'",
|
||||
"'_Bool'", "'_Complex'", "'_Generic'", "'_Imaginary'", "'_Noreturn'",
|
||||
"'_Static_assert'", "'_Thread_local'", "'('", "')'", "'['",
|
||||
"']'", "'{'", "'}'", "'<'", "'<='", "'>'", "'>='", "'<<'", "'>>'",
|
||||
"'+'", "'++'", "'-'", "'--'", "'*'", "'/'", "'%'", "'&'", "'|'",
|
||||
"'&&'", "'||'", "'^'", "'!'", "'~'", "'?'", "':'", "';'", "','",
|
||||
"'='", "'*='", "'/='", "'%='", "'+='", "'-='", "'<<='", "'>>='",
|
||||
"'&='", "'^='", "'|='", "'=='", "'!='", "'->'", "'.'", "'...'" ]
|
||||
|
||||
symbolicNames = [ "<INVALID>",
|
||||
"Auto", "Break", "Case", "Char", "Const", "Continue", "Default",
|
||||
"Do", "Double", "Else", "Enum", "Extern", "Float", "For", "Goto",
|
||||
"If", "Inline", "Int", "Long", "Register", "Restrict", "Return",
|
||||
"Short", "Signed", "Sizeof", "Static", "Struct", "Switch", "Typedef",
|
||||
"Union", "Unsigned", "Void", "Volatile", "While", "Alignas",
|
||||
"Alignof", "Atomic", "Bool", "Complex", "Generic", "Imaginary",
|
||||
"Noreturn", "StaticAssert", "ThreadLocal", "LeftParen", "RightParen",
|
||||
"LeftBracket", "RightBracket", "LeftBrace", "RightBrace", "Less",
|
||||
"LessEqual", "Greater", "GreaterEqual", "LeftShift", "RightShift",
|
||||
"Plus", "PlusPlus", "Minus", "MinusMinus", "Star", "Div", "Mod",
|
||||
"And", "Or", "AndAnd", "OrOr", "Caret", "Not", "Tilde", "Question",
|
||||
"Colon", "Semi", "Comma", "Assign", "StarAssign", "DivAssign",
|
||||
"ModAssign", "PlusAssign", "MinusAssign", "LeftShiftAssign",
|
||||
"RightShiftAssign", "AndAssign", "XorAssign", "OrAssign", "Equal",
|
||||
"NotEqual", "Arrow", "Dot", "Ellipsis", "Identifier", "Constant",
|
||||
"StringLiteral", "LineDirective", "PragmaDirective", "Whitespace",
|
||||
"Newline", "BlockComment", "LineComment" ]
|
||||
|
||||
ruleNames = [ "T__0", "T__1", "T__2", "T__3", "T__4", "T__5", "T__6",
|
||||
"T__7", "T__8", "T__9", "T__10", "T__11", "T__12", "T__13",
|
||||
"Auto", "Break", "Case", "Char", "Const", "Continue",
|
||||
"Default", "Do", "Double", "Else", "Enum", "Extern", "Float",
|
||||
"For", "Goto", "If", "Inline", "Int", "Long", "Register",
|
||||
"Restrict", "Return", "Short", "Signed", "Sizeof", "Static",
|
||||
"Struct", "Switch", "Typedef", "Union", "Unsigned", "Void",
|
||||
"Volatile", "While", "Alignas", "Alignof", "Atomic", "Bool",
|
||||
"Complex", "Generic", "Imaginary", "Noreturn", "StaticAssert",
|
||||
"ThreadLocal", "LeftParen", "RightParen", "LeftBracket",
|
||||
"RightBracket", "LeftBrace", "RightBrace", "Less", "LessEqual",
|
||||
"Greater", "GreaterEqual", "LeftShift", "RightShift",
|
||||
"Plus", "PlusPlus", "Minus", "MinusMinus", "Star", "Div",
|
||||
"Mod", "And", "Or", "AndAnd", "OrOr", "Caret", "Not",
|
||||
"Tilde", "Question", "Colon", "Semi", "Comma", "Assign",
|
||||
"StarAssign", "DivAssign", "ModAssign", "PlusAssign",
|
||||
"MinusAssign", "LeftShiftAssign", "RightShiftAssign",
|
||||
"AndAssign", "XorAssign", "OrAssign", "Equal", "NotEqual",
|
||||
"Arrow", "Dot", "Ellipsis", "Identifier", "IdentifierNondigit",
|
||||
"Nondigit", "Digit", "UniversalCharacterName", "HexQuad",
|
||||
"Constant", "IntegerConstant", "DecimalConstant", "OctalConstant",
|
||||
"HexadecimalConstant", "HexadecimalPrefix", "NonzeroDigit",
|
||||
"OctalDigit", "HexadecimalDigit", "IntegerSuffix", "UnsignedSuffix",
|
||||
"LongSuffix", "LongLongSuffix", "FloatingConstant", "DecimalFloatingConstant",
|
||||
"HexadecimalFloatingConstant", "FractionalConstant", "ExponentPart",
|
||||
"Sign", "DigitSequence", "HexadecimalFractionalConstant",
|
||||
"BinaryExponentPart", "HexadecimalDigitSequence", "FloatingSuffix",
|
||||
"CharacterConstant", "CCharSequence", "CChar", "EscapeSequence",
|
||||
"SimpleEscapeSequence", "OctalEscapeSequence", "HexadecimalEscapeSequence",
|
||||
"StringLiteral", "EncodingPrefix", "SCharSequence", "SChar",
|
||||
"LineDirective", "PragmaDirective", "Whitespace", "Newline",
|
||||
"BlockComment", "LineComment" ]
|
||||
|
||||
grammarFileName = "C.bnf"
|
||||
|
||||
def __init__(self, input=None):
|
||||
super().__init__(input)
|
||||
self.checkVersion("4.5.1")
|
||||
self._interp = LexerATNSimulator(self, self.atn, self.decisionsToDFA, PredictionContextCache())
|
||||
self._actions = None
|
||||
self._predicates = None
|
||||
|
File diff suppressed because it is too large
Load Diff
|
@ -76,6 +76,7 @@
|
|||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-surefire-plugin</artifactId>
|
||||
<version>2.12.4</version>
|
||||
<configuration>
|
||||
<systemPropertyVariables>
|
||||
<antlr-python2-runtime>../../antlr4-python2/src</antlr-python2-runtime>
|
||||
|
|
|
@ -98,6 +98,13 @@
|
|||
<phase>package</phase>
|
||||
<configuration>
|
||||
<dependencyReducedPomLocation>${project.build.directory}/dependency-reduced-pom.xml</dependencyReducedPomLocation>
|
||||
<transformers>
|
||||
<transformer implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">
|
||||
<manifestEntries>
|
||||
<Main-Class>org.antlr.v4.Tool</Main-Class>
|
||||
</manifestEntries>
|
||||
</transformer>
|
||||
</transformers>
|
||||
</configuration>
|
||||
<goals>
|
||||
<goal>shade</goal>
|
||||
|
|
|
@ -931,6 +931,7 @@ namespace <file.genPackage> {
|
|||
<endif>
|
||||
<namedActions.header>
|
||||
using System;
|
||||
using System.Text;
|
||||
using Antlr4.Runtime;
|
||||
using Antlr4.Runtime.Atn;
|
||||
using Antlr4.Runtime.Misc;
|
||||
|
@ -986,8 +987,14 @@ public partial class <csIdentifier.(lexer.name)> : <superClass; null="Lexer"> {
|
|||
|
||||
|
||||
SerializedATN(model) ::= <<
|
||||
public static readonly string _serializedATN =
|
||||
"<model.serialized; wrap={"+<\n><\t>"}>";
|
||||
private static string _serializedATN = _serializeATN();
|
||||
private static string _serializeATN()
|
||||
{
|
||||
StringBuilder sb = new StringBuilder();
|
||||
sb.Append("<model.serialized; wrap={");<\n><\t>sb.Append("}>");
|
||||
return sb.ToString();
|
||||
}
|
||||
|
||||
public static readonly ATN _ATN =
|
||||
new ATNDeserializer().Deserialize(_serializedATN.ToCharArray());
|
||||
>>
|
||||
|
|
|
@ -134,9 +134,9 @@ var decisionsToDFA = atn.decisionToState.map( function(ds, index) { return new a
|
|||
|
||||
var sharedContextCache = new antlr4.PredictionContextCache();
|
||||
|
||||
var literalNames = [ <parser.literalNames:{t | <t>}; null="'null'", separator=", ", wrap, anchor> ];
|
||||
var literalNames = [ <parser.literalNames:{t | <t>}; null="null", separator=", ", wrap, anchor> ];
|
||||
|
||||
var symbolicNames = [ <parser.symbolicNames:{t | <t>}; null="'null'", separator=", ", wrap, anchor> ];
|
||||
var symbolicNames = [ <parser.symbolicNames:{t | <t>}; null="null", separator=", ", wrap, anchor> ];
|
||||
|
||||
var ruleNames = [ <parser.ruleNames:{r | "<r>"}; separator=", ", wrap, anchor> ];
|
||||
|
||||
|
@ -825,9 +825,9 @@ function <lexer.name>(input) {
|
|||
|
||||
<lexer.name>.modeNames = [ <lexer.modes:{m| "<m>"}; separator=", ", wrap, anchor> ];
|
||||
|
||||
<lexer.name>.literalNames = [ <lexer.literalNames:{t | <t>}; null="'null'", separator=", ", wrap, anchor> ];
|
||||
<lexer.name>.literalNames = [ <lexer.literalNames:{t | <t>}; null="null", separator=", ", wrap, anchor> ];
|
||||
|
||||
<lexer.name>.symbolicNames = [ <lexer.symbolicNames:{t | <t>}; null="'null'", separator=", ", wrap, anchor> ];
|
||||
<lexer.name>.symbolicNames = [ <lexer.symbolicNames:{t | <t>}; null="null", separator=", ", wrap, anchor> ];
|
||||
|
||||
<lexer.name>.ruleNames = [ <lexer.ruleNames:{r | "<r>"}; separator=", ", wrap, anchor> ];
|
||||
|
||||
|
|
Loading…
Reference in New Issue