forked from jasder/antlr
Merge branch 'master' into support_intel_compiler
This commit is contained in:
commit
14880cb3b4
|
@ -3,9 +3,9 @@
|
|||
set -euo pipefail
|
||||
|
||||
cache_dir="$HOME/Library/Caches/Antlr4"
|
||||
dotnet_url='https://download.microsoft.com/download/B/9/F/B9F1AF57-C14A-4670-9973-CDF47209B5BF/dotnet-dev-osx-x64.1.0.4.pkg'
|
||||
dotnet_url='https://download.microsoft.com/download/F/4/F/F4FCB6EC-5F05-4DF8-822C-FF013DF1B17F/dotnet-dev-osx-x64.1.1.4.pkg'
|
||||
dotnet_file=$(basename "$dotnet_url")
|
||||
dotnet_shasum='63b5d99028cd8b2454736076106c96ba7d05f0fc'
|
||||
dotnet_shasum='dc46d93716db8bea8cc3c668088cc9e39384b5a4'
|
||||
|
||||
thisdir=$(dirname "$0")
|
||||
|
||||
|
|
|
@ -2,10 +2,16 @@ version: '4.7.1-SNAPSHOT+AppVeyor.{build}'
|
|||
cache:
|
||||
- '%USERPROFILE%\.m2'
|
||||
- '%USERPROFILE%\.nuget\packages -> **\project.json'
|
||||
image: Visual Studio 2017
|
||||
build: off
|
||||
build_script:
|
||||
- mvn -DskipTests install --batch-mode
|
||||
- msbuild runtime/CSharp/runtime/CSharp/Antlr4.vs2013.sln /logger:"C:\Program Files\AppVeyor\BuildAgent\Appveyor.MSBuildLogger.dll" /verbosity:detailed
|
||||
- msbuild /target:restore /target:rebuild /property:Configuration=Release /logger:"C:\Program Files\AppVeyor\BuildAgent\Appveyor.MSBuildLogger.dll" /verbosity:detailed runtime/CSharp/runtime/CSharp/Antlr4.dotnet.sln
|
||||
- msbuild ./runtime-testsuite/target/classes/CSharp/runtime/CSharp/Antlr4.vs2013.sln /logger:"C:\Program Files\AppVeyor\BuildAgent\Appveyor.MSBuildLogger.dll" /verbosity:detailed
|
||||
after_build:
|
||||
- msbuild /target:pack /property:Configuration=Release /verbosity:detailed runtime/CSharp/runtime/CSharp/Antlr4.dotnet.sln
|
||||
test_script:
|
||||
- mvn install -Dantlr-python2-python="C:\Python27\python.exe" -Dantlr-python3-python="C:\Python35\python.exe" -Dantlr-javascript-nodejs="C:\Program Files (x86)\nodejs\node.exe" --batch-mode
|
||||
artifacts:
|
||||
- path: 'runtime\**\*.nupkg'
|
||||
name: NuGet
|
|
@ -165,6 +165,7 @@ YYYY/MM/DD, github id, Full name, email
|
|||
2017/08/29, Eddy Reyes, eddy@mindsight.io
|
||||
2017/09/09, brauliobz, Bráulio Bezerra, brauliobezerra@gmail.com
|
||||
2017/09/11, sachinjain024, Sachin Jain, sachinjain024@gmail.com
|
||||
2017/09/25, kaedvann, Rostislav Listerenko, r.listerenko@gmail.com
|
||||
2017/10/06, bramp, Andrew Brampton, brampton@gmail.com
|
||||
2017/10/15, simkimsia, Sim Kim Sia, kimcity@gmail.com
|
||||
2017/10/27, Griffon26, Maurice van der Pot, griffon26@kfk4ever.com
|
||||
|
@ -172,3 +173,4 @@ YYYY/MM/DD, github id, Full name, email
|
|||
2017/10/29, gendalph, Максим Прохоренко, Maxim\dotProhorenko@gm@il.com
|
||||
2017/11/02, jasonmoo, Jason Mooberry, jason.mooberry@gmail.com
|
||||
2017/11/05, ajaypanyala, Ajay Panyala, ajay.panyala@gmail.com
|
||||
2017/11/24, zqlu.cn, Zhiqiang Lu, zqlu.cn@gmail.com
|
|
@ -1,8 +1,14 @@
|
|||
# Parser and lexer interpreters
|
||||
# Parser and Lexer Interpreters
|
||||
|
||||
*Since ANTLR 4.2*
|
||||
|
||||
For small parsing tasks it is sometimes convenient to use ANTLR in interpreted mode, rather than generating a parser in a particular target, compiling it and running it as part of your application. Here's some sample code that creates lexer and parser Grammar objects and then creates interpreters. Once we have a ParserInterpreter, we can use it to parse starting in any rule we like, given a rule index (which the Grammar can provide).
|
||||
For small parsing tasks it is sometimes convenient to use ANTLR in interpreted mode, rather than generating a parser in a particular target, compiling it and running it as part of your application. Here's some sample code that creates lexer and parser Grammar objects and then creates interpreters. Once we have a ParserInterpreter, we can use it to parse starting in any rule we like, given a rule index (which the grammar + the parser can provide).
|
||||
|
||||
## Action Code
|
||||
|
||||
Since interpreters don't use generated parsers + lexers they cannot execute any action code (including predicates). That means the interpreter runs as if there were no predicates at all. If your grammar requires action code in order to parse correctly you will not be able to test it using this approach.
|
||||
|
||||
## Java Target Interpreter Setup
|
||||
|
||||
```java
|
||||
LexerGrammar lg = new LexerGrammar(
|
||||
|
@ -76,4 +82,49 @@ ParseTree t = parse(fileName, XMLLexerGrammar, XMLParserGrammar, "document");
|
|||
|
||||
This is also how we will integrate instantaneous parsing into ANTLRWorks2 and development environment plug-ins.
|
||||
|
||||
See [TestParserInterpreter.java](https://github.com/antlr/antlr4/blob/master/tool-testsuite/test/org/antlr/v4/test/tool/TestParserInterpreter.java).
|
||||
See [TestParserInterpreter.java](../tool-testsuite/test/org/antlr/v4/test/tool/TestParserInterpreter.java).
|
||||
|
||||
## Non-Java Target Interpreter Setup
|
||||
The ANTLR4 runtimes do not contain any grammar parsing classes (they are in the ANTLR4 tool jar). Hence we cannot use `LexerGrammar` and `Grammar` to parse grammars for the interpreter. Instead we directly instantiate `LexerInterpreter` and `ParserInterpreter` objects. They require some data (namely symbol information and the ATNs) which only the ANTLR4 tool can give us. However, on each generation run ANTLR not only produces your parser + lexer files but also interpreter data files (*.interp) which contain all you need to feed the interpreters.
|
||||
|
||||
A support class (`InterpreterDataReader`) is used to load the data for your convenience, which makes this very easy to use. Btw. even the Java target go this route instead of using the non-runtime classes `Grammar` and `LexerGrammar`. Sometimes it might not be feasible to use the tool jar for whatever reason.
|
||||
|
||||
Here's how the setup looks like (C++ example):
|
||||
|
||||
```cpp
|
||||
/**
|
||||
* sourceFileName - name of the file with content to parse
|
||||
* lexerName - the name of your lexer (arbitrary, that's what is used in error messages)
|
||||
* parserName - ditto for the parser
|
||||
* lexerDataFileName - the lexer interpeter data file name (e.g. `<path>/ExprLexer.interp`)
|
||||
* parserDataFileName - ditto for the parser (e.g. `<path>/Expr.interp`)
|
||||
* startRule - the name of the rule to start parsing at
|
||||
*/
|
||||
void parse(std::string const& sourceFileName,
|
||||
std::string const& lexerName, std::string const& parserName,
|
||||
std::string const& lexerDataFileName, std::string const& parserDataFileName,
|
||||
std::string const& startRule) {
|
||||
|
||||
InterpreterData lexerData = InterpreterDataReader::parseFile(lexerDataFileName);
|
||||
InterpreterData parserData = InterpreterDataReader::parseFile(parserDataFileName);
|
||||
|
||||
ANTLRFileStream input(sourceFileName);
|
||||
LexerInterpreter lexEngine(lexerName, lexerData.vocabulary, lexerData.ruleNames,
|
||||
lexerData.channels, lexerData.modes, lexerData.atn, &input);
|
||||
CommonTokenStream tokens(&lexEngine);
|
||||
|
||||
/* Remove comment to print the tokens.
|
||||
tokens.fill();
|
||||
std::cout << "INPUT:" << std::endl;
|
||||
for (auto token : tokens.getTokens()) {
|
||||
std::cout << token->toString() << std::endl;
|
||||
}
|
||||
*/
|
||||
|
||||
ParserInterpreter parser(parserName, parserData.vocabulary, parserData.ruleNames,
|
||||
parserData.atn, &tokens);
|
||||
tree::ParseTree *tree = parser.parse(parser.getRuleIndex(startRule));
|
||||
|
||||
std::cout << "parse tree: " << tree->toStringTree(&parser) << std::endl;
|
||||
}
|
||||
```
|
||||
|
|
|
@ -32,8 +32,7 @@ Edit the repository looking for 4.5 or whatever and update it. Bump version in t
|
|||
* runtime/Python3/setup.py
|
||||
* runtime/Python3/src/antlr4/Recognizer.py
|
||||
* runtime/CSharp/runtime/CSharp/Antlr4.Runtime/Properties/AssemblyInfo.cs
|
||||
* runtime/CSharp/build/version.ps1
|
||||
* runtime/CSharp/runtime/CSharp/Package.nuspec
|
||||
* runtime/CSharp/runtime/CSharp/Antlr4.Runtime/Antlr4.Runtime.dotnet.csproj
|
||||
* runtime/JavaScript/src/antlr4/package.json
|
||||
* runtime/JavaScript/src/antlr4/Recognizer.js
|
||||
* runtime/Cpp/VERSION
|
||||
|
@ -245,75 +244,55 @@ popd
|
|||
|
||||
### CSharp
|
||||
|
||||
*Publishing to Nuget from Linux/MacOSX*
|
||||
*Publishing to Nuget from Windows*
|
||||
|
||||
**Install the pre-requisites**
|
||||
|
||||
Of course you need Mono and `nuget` to be installed. On mac:
|
||||
|
||||
- mono - on mac, `brew install mono`
|
||||
- nuget - on mac, `brew install nuget` or you can [download nuget.exe](https://dist.nuget.org/win-x86-commandline/latest/nuget.exe)
|
||||
- .NET build tools - can be loaded from [here](https://www.visualstudio.com/downloads/)
|
||||
- nuget - download [nuget.exe](https://www.nuget.org/downloads)
|
||||
- dotnet - follow [the instructions here](https://www.microsoft.com/net/core)
|
||||
|
||||
From the shell on mac, you can check all is ok by typing
|
||||
Alternatively, you can install Visual Studio 2017 and make sure to check boxes with .NET Core SDK.
|
||||
|
||||
```bash
|
||||
nuget
|
||||
You also need to enable .NET Framework 3.5 support in Windows "Programs and Features".
|
||||
|
||||
If everything is ok, the following command will restore nuget packages, build Antlr for .NET Standard and .NET 3.5 and create nuget package:
|
||||
|
||||
```PS
|
||||
msbuild /target:restore /target:rebuild /target:pack /property:Configuration=Release .\Antlr4.dotnet.sln /verbosity:minimal
|
||||
```
|
||||
|
||||
This should display the nuget help.
|
||||
This should display something like this:
|
||||
|
||||
**Creating and packaging the assembly**
|
||||
|
||||
```bash
|
||||
$ cd runtime/CSharp/runtime/CSharp
|
||||
$ ./build-nuget-package.sh
|
||||
...
|
||||
Build succeeded.
|
||||
0 Warning(s)
|
||||
0 Error(s)
|
||||
Attempting to build package from 'Package.nuspec'.
|
||||
Successfully created package '/path/to/antlr/.../Antlr4.Runtime.Standard.4.7.0.nupkg'.
|
||||
```
|
||||
Microsoft (R) Build Engine version 15.4.8.50001 for .NET Framework
|
||||
Copyright (C) Microsoft Corporation. All rights reserved.
|
||||
|
||||
This should display: Successfully created package *<package-path>*
|
||||
|
||||
Alternately, you may want to build ANTLR using Xamarin Studio Community (free).
|
||||
Restoring packages for C:\Code\antlr4-fork\runtime\CSharp\runtime\CSharp\Antlr4.Runtime\Antlr4.Runtime.dotnet.csproj...
|
||||
Generating MSBuild file C:\Code\antlr4-fork\runtime\CSharp\runtime\CSharp\Antlr4.Runtime\obj\Antlr4.Runtime.dotnet.csproj.nuget.g.props.
|
||||
Generating MSBuild file C:\Code\antlr4-fork\runtime\CSharp\runtime\CSharp\Antlr4.Runtime\obj\Antlr4.Runtime.dotnet.csproj.nuget.g.targets.
|
||||
Restore completed in 427.62 ms for C:\Code\antlr4-fork\runtime\CSharp\runtime\CSharp\Antlr4.Runtime\Antlr4.Runtime.dotnet.csproj.
|
||||
Antlr4.Runtime.dotnet -> C:\Code\antlr4-fork\runtime\CSharp\runtime\CSharp\Antlr4.Runtime\lib\Release\netstandard1.3\Antlr4.Runtime.Standard.dll
|
||||
Antlr4.Runtime.dotnet -> C:\Code\antlr4-fork\runtime\CSharp\runtime\CSharp\Antlr4.Runtime\lib\Release\net35\Antlr4.Runtime.Standard.dll
|
||||
Successfully created package 'C:\Code\antlr4-fork\runtime\CSharp\runtime\CSharp\Antlr4.Runtime\lib\Release\Antlr4.Runtime.Standard.4.7.2.nupkg'.
|
||||
```
|
||||
|
||||
**Publishing to NuGet**
|
||||
|
||||
You need to be a NuGet owner for "ANTLR 4 Standard Runtime"
|
||||
As a registered NuGet user, you can then manually upload the package spec here (`runtime/CSharp/runtime/CSharp/Package.nuspec`): [https://www.nuget.org/packages/manage/upload](https://www.nuget.org/packages/manage/upload)
|
||||
As a registered NuGet user, you can then manually upload the package here: [https://www.nuget.org/packages/manage/upload](https://www.nuget.org/packages/manage/upload)
|
||||
|
||||
Alternately, you can publish from the cmd line. You need to get your NuGet key from [https://www.nuget.org/account#](https://www.nuget.org/account#) and then from the cmd line, you can then type:
|
||||
|
||||
```bash
|
||||
```cmd
|
||||
nuget push Antlr4.Runtime.Standard.<version>.nupkg <your-key> -Source https://www.nuget.org/api/v2/package
|
||||
```
|
||||
|
||||
**Creating DLLs**
|
||||
|
||||
```bash
|
||||
cd ~/antlr/code/antlr4/runtime/CSharp/runtime/CSharp
|
||||
# kill previous ones manually as "xbuild /t:Clean" didn't seem to do it
|
||||
rm Antlr4.Runtime/bin/net20/Release/Antlr4.Runtime.dll
|
||||
rm Antlr4.Runtime/obj/net20/Release/Antlr4.Runtime.dll
|
||||
# build
|
||||
xbuild /p:Configuration=Release Antlr4.Runtime/Antlr4.Runtime.mono.csproj
|
||||
# zip it up to get a version number on zip filename
|
||||
zip --junk-paths /tmp/antlr-csharp-runtime-4.7.zip Antlr4.Runtime/obj/net20/Release/Antlr4.Runtime.Standard.dll
|
||||
cp /tmp/antlr-csharp-runtime-4.7.zip ~/antlr/sites/website-antlr4/download
|
||||
```
|
||||
|
||||
Move target to website
|
||||
|
||||
```bash
|
||||
pushd ~/antlr/sites/website-antlr4/download
|
||||
git add antlr-csharp-runtime-4.7.zip
|
||||
git commit -a -m 'update C# runtime'
|
||||
git push origin gh-pages
|
||||
popd
|
||||
```
|
||||
Nuget packages are also accessible as artifacts of [AppVeyor builds](https://ci.appveyor.com/project/parrt/antlr4/build/artifacts).
|
||||
|
||||
### Python
|
||||
|
||||
|
|
|
@ -10,6 +10,7 @@ import org.antlr.v4.runtime.CommonTokenStream;
|
|||
import org.antlr.v4.runtime.Token;
|
||||
import org.antlr.v4.runtime.TokenSource;
|
||||
import org.antlr.v4.runtime.WritableToken;
|
||||
import org.antlr.v4.runtime.misc.Utils;
|
||||
import org.antlr.v4.test.runtime.ErrorQueue;
|
||||
import org.antlr.v4.test.runtime.RuntimeTestSupport;
|
||||
import org.antlr.v4.test.runtime.StreamVacuum;
|
||||
|
@ -337,7 +338,9 @@ public class BaseCSharpTest implements RuntimeTestSupport /*, SpecialRuntimeTest
|
|||
}
|
||||
|
||||
public String execRecognizer() {
|
||||
compile();
|
||||
boolean success = compile();
|
||||
assertTrue(success);
|
||||
|
||||
String output = execTest();
|
||||
if ( output!=null && output.length()==0 ) {
|
||||
output = null;
|
||||
|
@ -354,6 +357,7 @@ public class BaseCSharpTest implements RuntimeTestSupport /*, SpecialRuntimeTest
|
|||
return false;
|
||||
return true;
|
||||
} catch(Exception e) {
|
||||
e.printStackTrace(System.err);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
@ -362,6 +366,7 @@ public class BaseCSharpTest implements RuntimeTestSupport /*, SpecialRuntimeTest
|
|||
try {
|
||||
return buildDotnetProject();
|
||||
} catch(Exception e) {
|
||||
e.printStackTrace(System.err);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
@ -390,10 +395,15 @@ public class BaseCSharpTest implements RuntimeTestSupport /*, SpecialRuntimeTest
|
|||
stdoutVacuum.join();
|
||||
stderrVacuum.join();
|
||||
// xbuild sends errors to output, so check exit code
|
||||
boolean success = process.exitValue()==0;
|
||||
int exitValue = process.exitValue();
|
||||
boolean success = (exitValue == 0);
|
||||
if ( !success ) {
|
||||
this.stderrDuringParse = stdoutVacuum.toString();
|
||||
System.err.println("buildProject stderrVacuum: "+ this.stderrDuringParse);
|
||||
String stderrString = stderrVacuum.toString();
|
||||
System.err.println("buildProject command: " + Utils.join(args, " "));
|
||||
System.err.println("buildProject exitValue: " + exitValue);
|
||||
System.err.println("buildProject stdout: " + stderrDuringParse);
|
||||
System.err.println("buildProject stderr: " + stderrString);
|
||||
}
|
||||
return success;
|
||||
}
|
||||
|
@ -505,6 +515,7 @@ public class BaseCSharpTest implements RuntimeTestSupport /*, SpecialRuntimeTest
|
|||
runtimeProjPath
|
||||
};
|
||||
boolean success = runProcess(args, tmpdir);
|
||||
assertTrue(success);
|
||||
|
||||
// restore project
|
||||
args = new String[] {
|
||||
|
@ -514,6 +525,7 @@ public class BaseCSharpTest implements RuntimeTestSupport /*, SpecialRuntimeTest
|
|||
"--no-dependencies"
|
||||
};
|
||||
success = runProcess(args, tmpdir);
|
||||
assertTrue(success);
|
||||
|
||||
// build test
|
||||
args = new String[] {
|
||||
|
@ -525,6 +537,7 @@ public class BaseCSharpTest implements RuntimeTestSupport /*, SpecialRuntimeTest
|
|||
"--no-dependencies"
|
||||
};
|
||||
success = runProcess(args, tmpdir);
|
||||
assertTrue(success);
|
||||
}
|
||||
catch(Exception e) {
|
||||
e.printStackTrace(System.err);
|
||||
|
@ -535,6 +548,10 @@ public class BaseCSharpTest implements RuntimeTestSupport /*, SpecialRuntimeTest
|
|||
}
|
||||
|
||||
private boolean runProcess(String[] args, String path) throws Exception {
|
||||
return runProcess(args, path, 0);
|
||||
}
|
||||
|
||||
private boolean runProcess(String[] args, String path, int retries) throws Exception {
|
||||
ProcessBuilder pb = new ProcessBuilder(args);
|
||||
pb.directory(new File(path));
|
||||
Process process = pb.start();
|
||||
|
@ -545,10 +562,28 @@ public class BaseCSharpTest implements RuntimeTestSupport /*, SpecialRuntimeTest
|
|||
process.waitFor();
|
||||
stdoutVacuum.join();
|
||||
stderrVacuum.join();
|
||||
boolean success = process.exitValue()==0;
|
||||
int exitValue = process.exitValue();
|
||||
boolean success = (exitValue == 0);
|
||||
if ( !success ) {
|
||||
this.stderrDuringParse = stderrVacuum.toString();
|
||||
System.err.println("runProcess stderrVacuum: "+ this.stderrDuringParse);
|
||||
System.err.println("runProcess command: " + Utils.join(args, " "));
|
||||
System.err.println("runProcess exitValue: " + exitValue);
|
||||
System.err.println("runProcess stdoutVacuum: " + stdoutVacuum.toString());
|
||||
System.err.println("runProcess stderrVacuum: " + stderrDuringParse);
|
||||
}
|
||||
if (exitValue == 132) {
|
||||
// Retry after SIGILL. We are seeing this intermittently on
|
||||
// macOS (issue #2078).
|
||||
if (retries < 3) {
|
||||
System.err.println("runProcess retrying; " + retries +
|
||||
" retries so far");
|
||||
return runProcess(args, path, retries + 1);
|
||||
}
|
||||
else {
|
||||
System.err.println("runProcess giving up after " + retries +
|
||||
" retries");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return success;
|
||||
}
|
||||
|
@ -577,9 +612,28 @@ public class BaseCSharpTest implements RuntimeTestSupport /*, SpecialRuntimeTest
|
|||
ProcessBuilder pb = new ProcessBuilder(args);
|
||||
pb.directory(tmpdirFile);
|
||||
Process process = pb.start();
|
||||
StreamVacuum stdoutVacuum = new StreamVacuum(process.getInputStream());
|
||||
StreamVacuum stderrVacuum = new StreamVacuum(process.getErrorStream());
|
||||
stdoutVacuum.start();
|
||||
stderrVacuum.start();
|
||||
process.waitFor();
|
||||
stdoutVacuum.join();
|
||||
stderrVacuum.join();
|
||||
String writtenOutput = TestOutputReading.read(output);
|
||||
this.stderrDuringParse = TestOutputReading.read(errorOutput);
|
||||
int exitValue = process.exitValue();
|
||||
String stdoutString = stdoutVacuum.toString().trim();
|
||||
String stderrString = stderrVacuum.toString().trim();
|
||||
if (exitValue != 0) {
|
||||
System.err.println("execTest command: " + Utils.join(args, " "));
|
||||
System.err.println("execTest exitValue: " + exitValue);
|
||||
}
|
||||
if (!stdoutString.isEmpty()) {
|
||||
System.err.println("execTest stdoutVacuum: " + stdoutString);
|
||||
}
|
||||
if (!stderrString.isEmpty()) {
|
||||
System.err.println("execTest stderrVacuum: " + stderrString);
|
||||
}
|
||||
return writtenOutput;
|
||||
}
|
||||
catch (Exception e) {
|
||||
|
|
|
@ -281,7 +281,7 @@ public class BaseSwiftTest implements RuntimeTestSupport {
|
|||
"\n" +
|
||||
"do {\n" +
|
||||
"let args = CommandLine.arguments\n" +
|
||||
"let input = ANTLRFileStream(args[1])\n" +
|
||||
"let input = try ANTLRFileStream(args[1])\n" +
|
||||
"let lex = <lexerName>(input)\n" +
|
||||
"let tokens = CommonTokenStream(lex)\n" +
|
||||
"<createParser>\n" +
|
||||
|
@ -327,7 +327,7 @@ public class BaseSwiftTest implements RuntimeTestSupport {
|
|||
|
||||
"setbuf(stdout, nil)\n" +
|
||||
"let args = CommandLine.arguments\n" +
|
||||
"let input = ANTLRFileStream(args[1])\n" +
|
||||
"let input = try ANTLRFileStream(args[1])\n" +
|
||||
"let lex = <lexerName>(input)\n" +
|
||||
"let tokens = CommonTokenStream(lex)\n" +
|
||||
|
||||
|
|
|
@ -1,20 +1,20 @@
|
|||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<Company>The ANTLR Organization</Company>
|
||||
<Version>4.7.1</Version>
|
||||
<TargetFramework>netstandard1.3</TargetFramework>
|
||||
<DefineConstants>$(DefineConstants);DOTNETCORE;NET35PLUS;NET40PLUS;NET45PLUS</DefineConstants>
|
||||
<Version>4.7.2</Version>
|
||||
<NeutralLanguage>en-US</NeutralLanguage>
|
||||
<TargetFrameworks>netstandard1.3;net35</TargetFrameworks>
|
||||
<NoWarn>$(NoWarn);CS1591;CS1574;CS1580</NoWarn>
|
||||
<GenerateDocumentationFile>true</GenerateDocumentationFile>
|
||||
<AssemblyName>Antlr4.Runtime.Core</AssemblyName>
|
||||
<AssemblyName>Antlr4.Runtime.Standard</AssemblyName>
|
||||
<AssemblyOriginatorKeyFile>../../Antlr4.snk</AssemblyOriginatorKeyFile>
|
||||
<SignAssembly>true</SignAssembly>
|
||||
<PublicSign Condition=" '$(OS)' != 'Windows_NT' ">true</PublicSign>
|
||||
<PackageId>Antlr4.Runtime.Core</PackageId>
|
||||
<Title>ANTLR 4 .NET Core Runtime</Title>
|
||||
<PackageId>Antlr4.Runtime.Standard</PackageId>
|
||||
<Title>ANTLR 4 .NET Standard Runtime</Title>
|
||||
<Authors>Eric Vergnaud, Terence Parr, Sam Harwell</Authors>
|
||||
<Description>The .NET Core C# ANTLR 4 runtime from the ANTLR Organization</Description>
|
||||
<Summary>The runtime library for parsers generated by the C# target of the standard ANTLR 4 tool.</Summary>
|
||||
<Copyright>Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.</Copyright>
|
||||
<PackageRequireLicenseAcceptance>true</PackageRequireLicenseAcceptance>
|
||||
<PackageLicenseUrl>https://github.com/antlr/antlr4/blob/master/LICENSE.txt</PackageLicenseUrl>
|
||||
|
@ -32,6 +32,7 @@
|
|||
<GenerateAssemblyVersionAttribute>false</GenerateAssemblyVersionAttribute>
|
||||
<GenerateAssemblyFileVersionAttribute>false</GenerateAssemblyFileVersionAttribute>
|
||||
<GenerateAssemblyInformationalVersionAttribute>false</GenerateAssemblyInformationalVersionAttribute>
|
||||
<RootNamespace>Antlr4.Runtime</RootNamespace>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Debug|AnyCPU' ">
|
||||
<DebugSymbols>true</DebugSymbols>
|
||||
|
@ -43,5 +44,10 @@
|
|||
<Optimize>true</Optimize>
|
||||
<OutputPath>lib\Release</OutputPath>
|
||||
</PropertyGroup>
|
||||
|
||||
<PropertyGroup Condition="'$(TargetFramework)'=='netstandard1.3'">
|
||||
<DefineConstants>DOTNETCORE;NET35PLUS;NET40PLUS;NET45PLUS</DefineConstants>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(TargetFramework)'=='net35'">
|
||||
<DefineConstants>NET35PLUS</DefineConstants>
|
||||
</PropertyGroup>
|
||||
</Project>
|
||||
|
|
|
@ -1,25 +0,0 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<package xmlns="http://schemas.microsoft.com/packaging/2011/08/nuspec.xsd">
|
||||
<metadata>
|
||||
<id>Antlr4.Runtime.Standard</id>
|
||||
<version>4.7.1</version>
|
||||
<language>en-us</language>
|
||||
<title>ANTLR 4 Standard Runtime</title>
|
||||
<description>The standard C# ANTLR 4 runtime from the ANTLR Organization</description>
|
||||
<summary>The runtime library for parsers generated by the C# target of the standard ANTLR 4 tool.</summary>
|
||||
<authors>Eric Vergnaud, Terence Parr, Sam Harwell</authors>
|
||||
<owners>The ANTLR Organization</owners>
|
||||
<releaseNotes>https://github.com/antlr/antlr4/releases</releaseNotes>
|
||||
<requireLicenseAcceptance>true</requireLicenseAcceptance>
|
||||
<licenseUrl>https://github.com/antlr/antlr4/blob/master/LICENSE.txt</licenseUrl>
|
||||
<projectUrl>https://github.com/antlr/antlr4</projectUrl>
|
||||
<iconUrl>https://raw.github.com/antlr/website-antlr4/master/images/icons/antlr.png</iconUrl>
|
||||
<copyright>Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.</copyright>
|
||||
<tags>antlr parsing grammar</tags>
|
||||
<dependencies />
|
||||
</metadata>
|
||||
<files>
|
||||
<file src="Antlr4.Runtime/lib/Release/Antlr4.Runtime.Standard.dll" target="lib/net35/"/>
|
||||
<file src="Antlr4.Runtime/lib/Release/netstandard1.3/Antlr4.Runtime.Core.dll" target="lib/netstandard/"/>
|
||||
</files>
|
||||
</package>
|
|
@ -1,14 +0,0 @@
|
|||
#!/bin/sh
|
||||
|
||||
# Build a .NET 3.5 compatible DLL using mono
|
||||
# This step can be done by the `dotnet` cli once https://github.com/Microsoft/msbuild/issues/1333 is resolved.
|
||||
echo "Step 1: Building .NET 3.5 DLL"
|
||||
xbuild /p:Configuration=Release Antlr4.mono.sln
|
||||
|
||||
# Build a .NET core DLL using the `dotnet` cli from microsoft
|
||||
echo "Step 2: Building .NET Core DLL"
|
||||
dotnet restore Antlr4.dotnet.sln
|
||||
dotnet build -c Release Antlr4.dotnet.sln
|
||||
|
||||
echo "Step 3: Packaging both DLLs into a single nuget package"
|
||||
nuget pack Package.nuspec
|
|
@ -426,6 +426,7 @@
|
|||
<ClCompile Include="src\LexerInterpreter.cpp" />
|
||||
<ClCompile Include="src\LexerNoViableAltException.cpp" />
|
||||
<ClCompile Include="src\ListTokenSource.cpp" />
|
||||
<ClCompile Include="src\misc\InterpreterDataReader.cpp" />
|
||||
<ClCompile Include="src\misc\Interval.cpp" />
|
||||
<ClCompile Include="src\misc\IntervalSet.cpp" />
|
||||
<ClCompile Include="src\misc\MurmurHash.cpp" />
|
||||
|
@ -580,6 +581,7 @@
|
|||
<ClInclude Include="src\LexerInterpreter.h" />
|
||||
<ClInclude Include="src\LexerNoViableAltException.h" />
|
||||
<ClInclude Include="src\ListTokenSource.h" />
|
||||
<ClInclude Include="src\misc\InterpreterDataReader.h" />
|
||||
<ClInclude Include="src\misc\Interval.h" />
|
||||
<ClInclude Include="src\misc\IntervalSet.h" />
|
||||
<ClInclude Include="src\misc\MurmurHash.h" />
|
||||
|
|
|
@ -537,6 +537,9 @@
|
|||
<ClInclude Include="src\tree\IterativeParseTreeWalker.h">
|
||||
<Filter>Header Files\tree</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="src\misc\InterpreterDataReader.h">
|
||||
<Filter>Header Files</Filter>
|
||||
</ClInclude>
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<ClCompile Include="src\ANTLRFileStream.cpp">
|
||||
|
@ -938,6 +941,9 @@
|
|||
<ClCompile Include="src\tree\IterativeParseTreeWalker.cpp">
|
||||
<Filter>Source Files\tree</Filter>
|
||||
</ClCompile>
|
||||
<ClCompile Include="src\misc\InterpreterDataReader.cpp">
|
||||
<Filter>Source Files</Filter>
|
||||
</ClCompile>
|
||||
<ClCompile Include="src\ANTLRErrorListener.cpp">
|
||||
<Filter>Source Files</Filter>
|
||||
</ClCompile>
|
||||
|
|
|
@ -848,6 +848,12 @@
|
|||
27B36AC91DACE7AF0069C868 /* RuleContextWithAltNum.h in Headers */ = {isa = PBXBuildFile; fileRef = 27B36AC51DACE7AF0069C868 /* RuleContextWithAltNum.h */; };
|
||||
27B36ACA1DACE7AF0069C868 /* RuleContextWithAltNum.h in Headers */ = {isa = PBXBuildFile; fileRef = 27B36AC51DACE7AF0069C868 /* RuleContextWithAltNum.h */; };
|
||||
27B36ACB1DACE7AF0069C868 /* RuleContextWithAltNum.h in Headers */ = {isa = PBXBuildFile; fileRef = 27B36AC51DACE7AF0069C868 /* RuleContextWithAltNum.h */; };
|
||||
27C375841EA1059C00B5883C /* InterpreterDataReader.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 27C375821EA1059C00B5883C /* InterpreterDataReader.cpp */; };
|
||||
27C375851EA1059C00B5883C /* InterpreterDataReader.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 27C375821EA1059C00B5883C /* InterpreterDataReader.cpp */; };
|
||||
27C375861EA1059C00B5883C /* InterpreterDataReader.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 27C375821EA1059C00B5883C /* InterpreterDataReader.cpp */; };
|
||||
27C375871EA1059C00B5883C /* InterpreterDataReader.h in Headers */ = {isa = PBXBuildFile; fileRef = 27C375831EA1059C00B5883C /* InterpreterDataReader.h */; };
|
||||
27C375881EA1059C00B5883C /* InterpreterDataReader.h in Headers */ = {isa = PBXBuildFile; fileRef = 27C375831EA1059C00B5883C /* InterpreterDataReader.h */; };
|
||||
27C375891EA1059C00B5883C /* InterpreterDataReader.h in Headers */ = {isa = PBXBuildFile; fileRef = 27C375831EA1059C00B5883C /* InterpreterDataReader.h */; };
|
||||
27D414521DEB0D3D00D0F3F9 /* IterativeParseTreeWalker.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 27D414501DEB0D3D00D0F3F9 /* IterativeParseTreeWalker.cpp */; };
|
||||
27D414531DEB0D3D00D0F3F9 /* IterativeParseTreeWalker.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 27D414501DEB0D3D00D0F3F9 /* IterativeParseTreeWalker.cpp */; };
|
||||
27D414541DEB0D3D00D0F3F9 /* IterativeParseTreeWalker.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 27D414501DEB0D3D00D0F3F9 /* IterativeParseTreeWalker.cpp */; };
|
||||
|
@ -1204,6 +1210,8 @@
|
|||
27AC52CF1CE773A80093AAAB /* antlr4-runtime.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = "antlr4-runtime.h"; sourceTree = "<group>"; };
|
||||
27B36AC41DACE7AF0069C868 /* RuleContextWithAltNum.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = RuleContextWithAltNum.cpp; sourceTree = "<group>"; };
|
||||
27B36AC51DACE7AF0069C868 /* RuleContextWithAltNum.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = RuleContextWithAltNum.h; sourceTree = "<group>"; };
|
||||
27C375821EA1059C00B5883C /* InterpreterDataReader.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = InterpreterDataReader.cpp; sourceTree = "<group>"; };
|
||||
27C375831EA1059C00B5883C /* InterpreterDataReader.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = InterpreterDataReader.h; sourceTree = "<group>"; };
|
||||
27D414501DEB0D3D00D0F3F9 /* IterativeParseTreeWalker.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = IterativeParseTreeWalker.cpp; sourceTree = "<group>"; };
|
||||
27D414511DEB0D3D00D0F3F9 /* IterativeParseTreeWalker.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = IterativeParseTreeWalker.h; sourceTree = "<group>"; };
|
||||
27DB448B1D045537007E790B /* XPath.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = XPath.cpp; sourceTree = "<group>"; wrapsLines = 0; };
|
||||
|
@ -1523,6 +1531,8 @@
|
|||
276E5CC91CDB57AA003FF4B4 /* misc */ = {
|
||||
isa = PBXGroup;
|
||||
children = (
|
||||
27C375821EA1059C00B5883C /* InterpreterDataReader.cpp */,
|
||||
27C375831EA1059C00B5883C /* InterpreterDataReader.h */,
|
||||
276E5CCA1CDB57AA003FF4B4 /* Interval.cpp */,
|
||||
276E5CCB1CDB57AA003FF4B4 /* Interval.h */,
|
||||
276E5CCC1CDB57AA003FF4B4 /* IntervalSet.cpp */,
|
||||
|
@ -1710,6 +1720,7 @@
|
|||
276E5DA51CDB57AA003FF4B4 /* BlockEndState.h in Headers */,
|
||||
276E5E831CDB57AA003FF4B4 /* ProfilingATNSimulator.h in Headers */,
|
||||
276E5D991CDB57AA003FF4B4 /* BasicBlockStartState.h in Headers */,
|
||||
27C375891EA1059C00B5883C /* InterpreterDataReader.h in Headers */,
|
||||
276E5E9B1CDB57AA003FF4B4 /* RuleTransition.h in Headers */,
|
||||
276E60031CDB57AA003FF4B4 /* ParseTreeProperty.h in Headers */,
|
||||
276E5D8D1CDB57AA003FF4B4 /* ATNType.h in Headers */,
|
||||
|
@ -1859,6 +1870,7 @@
|
|||
276E5E2B1CDB57AA003FF4B4 /* LL1Analyzer.h in Headers */,
|
||||
27DB44BA1D0463DA007E790B /* XPathElement.h in Headers */,
|
||||
276E5D7A1CDB57AA003FF4B4 /* ATNSerializer.h in Headers */,
|
||||
27C375881EA1059C00B5883C /* InterpreterDataReader.h in Headers */,
|
||||
276E5EAC1CDB57AA003FF4B4 /* SingletonPredictionContext.h in Headers */,
|
||||
276E5E191CDB57AA003FF4B4 /* LexerPushModeAction.h in Headers */,
|
||||
276E5ECA1CDB57AA003FF4B4 /* Transition.h in Headers */,
|
||||
|
@ -2035,6 +2047,7 @@
|
|||
276E5E811CDB57AA003FF4B4 /* ProfilingATNSimulator.h in Headers */,
|
||||
276E5D971CDB57AA003FF4B4 /* BasicBlockStartState.h in Headers */,
|
||||
276E5E991CDB57AA003FF4B4 /* RuleTransition.h in Headers */,
|
||||
27C375871EA1059C00B5883C /* InterpreterDataReader.h in Headers */,
|
||||
276E60011CDB57AA003FF4B4 /* ParseTreeProperty.h in Headers */,
|
||||
276E5D8B1CDB57AA003FF4B4 /* ATNType.h in Headers */,
|
||||
276E5FFB1CDB57AA003FF4B4 /* ParseTreeListener.h in Headers */,
|
||||
|
@ -2284,6 +2297,7 @@
|
|||
276E5F551CDB57AA003FF4B4 /* LexerNoViableAltException.cpp in Sources */,
|
||||
2793DCB81F08099C00A84290 /* LexerAction.cpp in Sources */,
|
||||
276E5E561CDB57AA003FF4B4 /* PlusBlockStartState.cpp in Sources */,
|
||||
27C375861EA1059C00B5883C /* InterpreterDataReader.cpp in Sources */,
|
||||
276E5E1D1CDB57AA003FF4B4 /* LexerSkipAction.cpp in Sources */,
|
||||
276E5EBC1CDB57AA003FF4B4 /* StarLoopEntryState.cpp in Sources */,
|
||||
276E5D721CDB57AA003FF4B4 /* ATNDeserializer.cpp in Sources */,
|
||||
|
@ -2437,6 +2451,7 @@
|
|||
276E5F541CDB57AA003FF4B4 /* LexerNoViableAltException.cpp in Sources */,
|
||||
2793DCB71F08099C00A84290 /* LexerAction.cpp in Sources */,
|
||||
276E5E551CDB57AA003FF4B4 /* PlusBlockStartState.cpp in Sources */,
|
||||
27C375851EA1059C00B5883C /* InterpreterDataReader.cpp in Sources */,
|
||||
276E5E1C1CDB57AA003FF4B4 /* LexerSkipAction.cpp in Sources */,
|
||||
276E5EBB1CDB57AA003FF4B4 /* StarLoopEntryState.cpp in Sources */,
|
||||
276E5D711CDB57AA003FF4B4 /* ATNDeserializer.cpp in Sources */,
|
||||
|
@ -2590,6 +2605,7 @@
|
|||
276E5DD01CDB57AA003FF4B4 /* ErrorInfo.cpp in Sources */,
|
||||
2793DCB61F08099C00A84290 /* LexerAction.cpp in Sources */,
|
||||
276E5F531CDB57AA003FF4B4 /* LexerNoViableAltException.cpp in Sources */,
|
||||
27C375841EA1059C00B5883C /* InterpreterDataReader.cpp in Sources */,
|
||||
276E5E541CDB57AA003FF4B4 /* PlusBlockStartState.cpp in Sources */,
|
||||
276E5E1B1CDB57AA003FF4B4 /* LexerSkipAction.cpp in Sources */,
|
||||
276E5EBA1CDB57AA003FF4B4 /* StarLoopEntryState.cpp in Sources */,
|
||||
|
|
|
@ -38,7 +38,7 @@ LexerInterpreter::LexerInterpreter(const std::string &grammarFileName, const dfa
|
|||
for (size_t i = 0; i < atn.getNumberOfDecisions(); ++i) {
|
||||
_decisionToDFA.push_back(dfa::DFA(_atn.getDecisionState(i), i));
|
||||
}
|
||||
_interpreter = new atn::LexerATNSimulator(_atn, _decisionToDFA, _sharedContextCache); /* mem-check: deleted in d-tor */
|
||||
_interpreter = new atn::LexerATNSimulator(this, _atn, _decisionToDFA, _sharedContextCache); /* mem-check: deleted in d-tor */
|
||||
}
|
||||
|
||||
LexerInterpreter::~LexerInterpreter()
|
||||
|
|
|
@ -39,7 +39,7 @@ namespace antlr4 {
|
|||
// @deprecated
|
||||
std::vector<std::string> _tokenNames;
|
||||
const std::vector<std::string> &_ruleNames;
|
||||
const std::vector<std::string> &_channelNames;
|
||||
const std::vector<std::string> &_channelNames;
|
||||
const std::vector<std::string> &_modeNames;
|
||||
std::vector<dfa::DFA> _decisionToDFA;
|
||||
|
||||
|
|
|
@ -54,8 +54,10 @@ namespace antlr4 {
|
|||
// UTF-32 encoded.
|
||||
#if defined(_MSC_VER) && _MSC_VER == 1900
|
||||
i32string _data; // Custom type for VS 2015.
|
||||
typedef __int32 storage_type;
|
||||
#else
|
||||
std::u32string _data;
|
||||
typedef char32_t storage_type;
|
||||
#endif
|
||||
|
||||
/// <summary>
|
||||
|
|
|
@ -123,6 +123,7 @@
|
|||
#include "dfa/DFASerializer.h"
|
||||
#include "dfa/DFAState.h"
|
||||
#include "dfa/LexerDFASerializer.h"
|
||||
#include "misc/InterpreterDataReader.h"
|
||||
#include "misc/Interval.h"
|
||||
#include "misc/IntervalSet.h"
|
||||
#include "misc/MurmurHash.h"
|
||||
|
|
|
@ -0,0 +1,124 @@
|
|||
/* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.
|
||||
* Use of this file is governed by the BSD 3-clause license that
|
||||
* can be found in the LICENSE.txt file in the project root.
|
||||
*/
|
||||
|
||||
#include "atn/ATN.h"
|
||||
#include "atn/ATNDeserializer.h"
|
||||
#include "Vocabulary.h"
|
||||
|
||||
#include "misc/InterpreterDataReader.h"
|
||||
|
||||
using namespace antlr4::dfa;
|
||||
using namespace antlr4::atn;
|
||||
using namespace antlr4::misc;
|
||||
|
||||
InterpreterData::InterpreterData(std::vector<std::string> const& literalNames, std::vector<std::string> const& symbolicNames)
|
||||
: vocabulary(literalNames, symbolicNames) {
|
||||
}
|
||||
|
||||
InterpreterData InterpreterDataReader::parseFile(std::string const& fileName) {
|
||||
// The structure of the data file is very simple. Everything is line based with empty lines
|
||||
// separating the different parts. For lexers the layout is:
|
||||
// token literal names:
|
||||
// ...
|
||||
//
|
||||
// token symbolic names:
|
||||
// ...
|
||||
//
|
||||
// rule names:
|
||||
// ...
|
||||
//
|
||||
// channel names:
|
||||
// ...
|
||||
//
|
||||
// mode names:
|
||||
// ...
|
||||
//
|
||||
// atn:
|
||||
// <a single line with comma separated int values> enclosed in a pair of squared brackets.
|
||||
//
|
||||
// Data for a parser does not contain channel and mode names.
|
||||
|
||||
std::ifstream input(fileName);
|
||||
if (!input.good())
|
||||
return {};
|
||||
|
||||
std::vector<std::string> literalNames;
|
||||
std::vector<std::string> symbolicNames;
|
||||
|
||||
std::string line;
|
||||
|
||||
std::getline(input, line, '\n');
|
||||
assert(line == "token literal names:");
|
||||
while (true) {
|
||||
std::getline(input, line, '\n');
|
||||
if (line.empty())
|
||||
break;
|
||||
|
||||
literalNames.push_back(line == "null" ? "" : line);
|
||||
};
|
||||
|
||||
std::getline(input, line, '\n');
|
||||
assert(line == "token symbolic names:");
|
||||
while (true) {
|
||||
std::getline(input, line, '\n');
|
||||
if (line.empty())
|
||||
break;
|
||||
|
||||
symbolicNames.push_back(line == "null" ? "" : line);
|
||||
};
|
||||
InterpreterData result(literalNames, symbolicNames);
|
||||
|
||||
std::getline(input, line, '\n');
|
||||
assert(line == "rule names:");
|
||||
while (true) {
|
||||
std::getline(input, line, '\n');
|
||||
if (line.empty())
|
||||
break;
|
||||
|
||||
result.ruleNames.push_back(line);
|
||||
};
|
||||
|
||||
std::getline(input, line, '\n');
|
||||
if (line == "channel names:") {
|
||||
while (true) {
|
||||
std::getline(input, line, '\n');
|
||||
if (line.empty())
|
||||
break;
|
||||
|
||||
result.channels.push_back(line);
|
||||
};
|
||||
|
||||
std::getline(input, line, '\n');
|
||||
assert(line == "mode names:");
|
||||
while (true) {
|
||||
std::getline(input, line, '\n');
|
||||
if (line.empty())
|
||||
break;
|
||||
|
||||
result.modes.push_back(line);
|
||||
};
|
||||
}
|
||||
|
||||
std::vector<uint16_t> serializedATN;
|
||||
|
||||
std::getline(input, line, '\n');
|
||||
assert(line == "atn:");
|
||||
std::getline(input, line, '\n');
|
||||
std::stringstream tokenizer(line);
|
||||
std::string value;
|
||||
while (tokenizer.good()) {
|
||||
std::getline(tokenizer, value, ',');
|
||||
unsigned long number;
|
||||
if (value[0] == '[')
|
||||
number = std::strtoul(&value[1], nullptr, 10);
|
||||
else
|
||||
number = std::strtoul(value.c_str(), nullptr, 10);
|
||||
serializedATN.push_back(static_cast<uint16_t>(number));
|
||||
}
|
||||
|
||||
ATNDeserializer deserializer;
|
||||
result.atn = deserializer.deserialize(serializedATN);
|
||||
return result;
|
||||
}
|
|
@ -0,0 +1,31 @@
|
|||
/* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.
|
||||
* Use of this file is governed by the BSD 3-clause license that
|
||||
* can be found in the LICENSE.txt file in the project root.
|
||||
*/
|
||||
|
||||
#pragma once
|
||||
|
||||
#include "antlr4-common.h"
|
||||
|
||||
namespace antlr4 {
|
||||
namespace misc {
|
||||
|
||||
struct InterpreterData {
|
||||
atn::ATN atn;
|
||||
dfa::Vocabulary vocabulary;
|
||||
std::vector<std::string> ruleNames;
|
||||
std::vector<std::string> channels; // Only valid for lexer grammars.
|
||||
std::vector<std::string> modes; // ditto
|
||||
|
||||
InterpreterData() {}; // For invalid content.
|
||||
InterpreterData(std::vector<std::string> const& literalNames, std::vector<std::string> const& symbolicNames);
|
||||
};
|
||||
|
||||
// A class to read plain text interpreter data produced by ANTLR.
|
||||
class ANTLR4CPP_PUBLIC InterpreterDataReader {
|
||||
public:
|
||||
static InterpreterData parseFile(std::string const& fileName);
|
||||
};
|
||||
|
||||
} // namespace atn
|
||||
} // namespace antlr4
|
|
@ -52,6 +52,7 @@ namespace antlr4 {
|
|||
class WritableToken;
|
||||
|
||||
namespace misc {
|
||||
class InterpreterDataReader;
|
||||
class Interval;
|
||||
class IntervalSet;
|
||||
class MurmurHash;
|
||||
|
|
|
@ -0,0 +1,141 @@
|
|||
/*
|
||||
* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.
|
||||
* Use of this file is governed by the BSD 3-clause license that
|
||||
* can be found in the LICENSE.txt file in the project root.
|
||||
*/
|
||||
|
||||
package org.antlr.v4.runtime.misc;
|
||||
|
||||
import org.antlr.v4.runtime.Vocabulary;
|
||||
import org.antlr.v4.runtime.VocabularyImpl;
|
||||
import org.antlr.v4.runtime.atn.ATN;
|
||||
import org.antlr.v4.runtime.atn.ATNDeserializer;
|
||||
import org.antlr.v4.runtime.dfa.DFA;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
import java.util.ArrayList;
|
||||
import java.io.BufferedReader;
|
||||
import java.io.FileReader;
|
||||
|
||||
// A class to read plain text interpreter data produced by ANTLR.
|
||||
public class InterpreterDataReader {
|
||||
|
||||
public static class InterpreterData {
|
||||
ATN atn;
|
||||
Vocabulary vocabulary;
|
||||
List<String> ruleNames;
|
||||
List<String> channels; // Only valid for lexer grammars.
|
||||
List<String> modes; // ditto
|
||||
};
|
||||
|
||||
/**
|
||||
* The structure of the data file is very simple. Everything is line based with empty lines
|
||||
* separating the different parts. For lexers the layout is:
|
||||
* token literal names:
|
||||
* ...
|
||||
*
|
||||
* token symbolic names:
|
||||
* ...
|
||||
*
|
||||
* rule names:
|
||||
* ...
|
||||
*
|
||||
* channel names:
|
||||
* ...
|
||||
*
|
||||
* mode names:
|
||||
* ...
|
||||
*
|
||||
* atn:
|
||||
* <a single line with comma separated int values> enclosed in a pair of squared brackets.
|
||||
*
|
||||
* Data for a parser does not contain channel and mode names.
|
||||
*/
|
||||
public static InterpreterData parseFile(String fileName) {
|
||||
InterpreterData result = new InterpreterData();
|
||||
result.ruleNames = new ArrayList<String>();
|
||||
|
||||
try (BufferedReader br = new BufferedReader(new FileReader(fileName))) {
|
||||
String line;
|
||||
List<String> literalNames = new ArrayList<String>();
|
||||
List<String> symbolicNames = new ArrayList<String>();
|
||||
|
||||
line = br.readLine();
|
||||
if ( !line.equals("token literal names:") )
|
||||
throw new RuntimeException("Unexpected data entry");
|
||||
while ((line = br.readLine()) != null) {
|
||||
if ( line.isEmpty() )
|
||||
break;
|
||||
literalNames.add(line.equals("null") ? "" : line);
|
||||
}
|
||||
|
||||
line = br.readLine();
|
||||
if ( !line.equals("token symbolic names:") )
|
||||
throw new RuntimeException("Unexpected data entry");
|
||||
while ((line = br.readLine()) != null) {
|
||||
if ( line.isEmpty() )
|
||||
break;
|
||||
symbolicNames.add(line.equals("null") ? "" : line);
|
||||
}
|
||||
|
||||
result.vocabulary = new VocabularyImpl(literalNames.toArray(new String[0]), symbolicNames.toArray(new String[0]));
|
||||
|
||||
line = br.readLine();
|
||||
if ( !line.equals("rule names:") )
|
||||
throw new RuntimeException("Unexpected data entry");
|
||||
while ((line = br.readLine()) != null) {
|
||||
if ( line.isEmpty() )
|
||||
break;
|
||||
result.ruleNames.add(line);
|
||||
}
|
||||
|
||||
if ( line.equals("channel names:") ) { // Additional lexer data.
|
||||
result.channels = new ArrayList<String>();
|
||||
while ((line = br.readLine()) != null) {
|
||||
if ( line.isEmpty() )
|
||||
break;
|
||||
result.channels.add(line);
|
||||
}
|
||||
|
||||
line = br.readLine();
|
||||
if ( !line.equals("mode names:") )
|
||||
throw new RuntimeException("Unexpected data entry");
|
||||
result.modes = new ArrayList<String>();
|
||||
while ((line = br.readLine()) != null) {
|
||||
if ( line.isEmpty() )
|
||||
break;
|
||||
result.modes.add(line);
|
||||
}
|
||||
}
|
||||
|
||||
line = br.readLine();
|
||||
if ( !line.equals("atn:") )
|
||||
throw new RuntimeException("Unexpected data entry");
|
||||
line = br.readLine();
|
||||
String[] elements = line.split(",");
|
||||
char[] serializedATN = new char[elements.length];
|
||||
|
||||
for (int i = 0; i < elements.length; ++i) {
|
||||
int value;
|
||||
String element = elements[i];
|
||||
if ( element.startsWith("[") )
|
||||
value = Integer.parseInt(element.substring(1).trim());
|
||||
else if ( element.endsWith("]") )
|
||||
value = Integer.parseInt(element.substring(0, element.length() - 1).trim());
|
||||
else
|
||||
value = Integer.parseInt(element.trim());
|
||||
serializedATN[i] = (char)value;
|
||||
}
|
||||
|
||||
ATNDeserializer deserializer = new ATNDeserializer();
|
||||
result.atn = deserializer.deserialize(serializedATN);
|
||||
}
|
||||
catch (java.io.IOException e) {
|
||||
// We just swallow the error and return empty objects instead.
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
}
|
|
@ -267,7 +267,7 @@ DefaultErrorStrategy.prototype.reportNoViableAlternative = function(recognizer,
|
|||
if (e.startToken.type===Token.EOF) {
|
||||
input = "<EOF>";
|
||||
} else {
|
||||
input = tokens.getText(new Interval(e.startToken, e.offendingToken));
|
||||
input = tokens.getText(new Interval(e.startToken.tokenIndex, e.offendingToken.tokenIndex));
|
||||
}
|
||||
} else {
|
||||
input = "<unknown input>";
|
||||
|
@ -753,4 +753,4 @@ BailErrorStrategy.prototype.sync = function(recognizer) {
|
|||
};
|
||||
|
||||
exports.BailErrorStrategy = BailErrorStrategy;
|
||||
exports.DefaultErrorStrategy = DefaultErrorStrategy;
|
||||
exports.DefaultErrorStrategy = DefaultErrorStrategy;
|
||||
|
|
|
@ -9,30 +9,18 @@
|
|||
import Foundation
|
||||
|
||||
public class ANTLRFileStream: ANTLRInputStream {
|
||||
internal var fileName: String
|
||||
private let fileName: String
|
||||
|
||||
public convenience override init(_ fileName: String) {
|
||||
self.init(fileName, nil)
|
||||
}
|
||||
|
||||
public init(_ fileName: String, _ encoding: String.Encoding?) {
|
||||
public init(_ fileName: String, _ encoding: String.Encoding? = nil) throws {
|
||||
self.fileName = fileName
|
||||
super.init()
|
||||
load(fileName, encoding)
|
||||
}
|
||||
|
||||
public func load(_ fileName: String, _ encoding: String.Encoding?) {
|
||||
if encoding != nil {
|
||||
data = Utils.readFile(fileName, encoding!)
|
||||
} else {
|
||||
data = Utils.readFile(fileName)
|
||||
}
|
||||
self.n = data.count
|
||||
let fileContents = try String(contentsOfFile: fileName, encoding: encoding ?? .utf8)
|
||||
data = Array(fileContents)
|
||||
n = data.count
|
||||
}
|
||||
|
||||
override
|
||||
public func getSourceName() -> String {
|
||||
return fileName
|
||||
}
|
||||
|
||||
}
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -34,19 +34,6 @@ public class Utils {
|
|||
}
|
||||
|
||||
|
||||
public static func readFile(_ path: String, _ encoding: String.Encoding = String.Encoding.utf8) -> [Character] {
|
||||
|
||||
var fileContents: String
|
||||
|
||||
do {
|
||||
fileContents = try String(contentsOfFile: path, encoding: encoding)
|
||||
} catch {
|
||||
return [Character]()
|
||||
}
|
||||
|
||||
return Array(fileContents.characters)
|
||||
}
|
||||
|
||||
public static func toMap(_ keys: [String]) -> Dictionary<String, Int> {
|
||||
var m = Dictionary<String, Int>()
|
||||
for (index,v) in keys.enumerated() {
|
||||
|
|
|
@ -1,41 +0,0 @@
|
|||
///
|
||||
/// Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.
|
||||
/// Use of this file is governed by the BSD 3-clause license that
|
||||
/// can be found in the LICENSE.txt file in the project root.
|
||||
///
|
||||
|
||||
//
|
||||
// NSUUIDExtension.swift
|
||||
// objc2swiftwithswith
|
||||
//
|
||||
// Created by janyou on 15/9/8.
|
||||
//
|
||||
|
||||
import Foundation
|
||||
|
||||
|
||||
extension UUID {
|
||||
|
||||
public init(mostSigBits: Int64, leastSigBits: Int64) {
|
||||
let uuid: String = ""
|
||||
self.init(uuidString: uuid)!
|
||||
}
|
||||
|
||||
private func toUUID(_ mostSigBits: Int64, _ leastSigBits: Int64) -> String {
|
||||
|
||||
return (digits(mostSigBits >> 32, 8) + "-" +
|
||||
digits(mostSigBits >> 16, 4) + "-" +
|
||||
digits(mostSigBits, 4) + "-" +
|
||||
digits(leastSigBits >> 48, 4) + "-" +
|
||||
digits(leastSigBits, 12))
|
||||
}
|
||||
|
||||
private func digits(_ val: Int64, _ digits: Int) -> String {
|
||||
let hi = Int64(1) << Int64(digits * 4)
|
||||
let intLiteral = hi | (val & (hi - 1))
|
||||
let s: String = String(Character(UnicodeScalar(UInt32(intLiteral))!))
|
||||
return s[1 ..< s.length]
|
||||
}
|
||||
|
||||
|
||||
}
|
|
@ -0,0 +1,23 @@
|
|||
///
|
||||
/// Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.
|
||||
/// Use of this file is governed by the BSD 3-clause license that
|
||||
/// can be found in the LICENSE.txt file in the project root.
|
||||
///
|
||||
|
||||
import Foundation
|
||||
|
||||
|
||||
extension UUID {
|
||||
public init(mostSigBits: Int64, leastSigBits: Int64) {
|
||||
let bytes = UnsafeMutablePointer<UInt8>.allocate(capacity: 16)
|
||||
defer {
|
||||
bytes.deallocate(capacity: 16)
|
||||
}
|
||||
bytes.withMemoryRebound(to: Int64.self, capacity: 2) {
|
||||
$0.pointee = leastSigBits
|
||||
$0.advanced(by: 1).pointee = mostSigBits
|
||||
}
|
||||
let u = NSUUID(uuidBytes: bytes)
|
||||
self.init(uuidString: u.uuidString)!
|
||||
}
|
||||
}
|
|
@ -17,5 +17,12 @@
|
|||
/// regular tokens of the text surrounding the tags.
|
||||
///
|
||||
|
||||
public class Chunk {
|
||||
public class Chunk: Equatable {
|
||||
public static func ==(lhs: Chunk, rhs: Chunk) -> Bool {
|
||||
return lhs.isEqual(rhs)
|
||||
}
|
||||
|
||||
public func isEqual(_ other: Chunk) -> Bool {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
|
|
@ -387,7 +387,7 @@ public class ParseTreePatternMatcher {
|
|||
|
||||
let ntags = starts.count
|
||||
for i in 0..<ntags {
|
||||
if starts[i] != stops[i] {
|
||||
if starts[i] >= stops[i] {
|
||||
throw ANTLRError.illegalArgument(msg: "tag delimiters out of order in pattern: " + pattern)
|
||||
}
|
||||
}
|
||||
|
@ -408,7 +408,7 @@ public class ParseTreePatternMatcher {
|
|||
// copy inside of <tag>
|
||||
let tag = pattern[starts[i] + start.length ..< stops[i]]
|
||||
var ruleOrToken = tag
|
||||
var label = ""
|
||||
var label: String?
|
||||
let colon = tag.indexOf(":")
|
||||
if colon >= 0 {
|
||||
label = tag[0 ..< colon]
|
||||
|
@ -417,7 +417,7 @@ public class ParseTreePatternMatcher {
|
|||
chunks.append(try TagChunk(label, ruleOrToken))
|
||||
if i + 1 < ntags {
|
||||
// copy from end of <tag> to start of next
|
||||
let text = pattern[stops[i] + stop.length ..< starts[i] + 1]
|
||||
let text = pattern[stops[i] + stop.length ..< starts[i + 1]]
|
||||
chunks.append(TextChunk(text))
|
||||
}
|
||||
}
|
||||
|
|
|
@ -87,10 +87,19 @@ public class TagChunk: Chunk, CustomStringConvertible {
|
|||
/// returned as just the tag name.
|
||||
///
|
||||
public var description: String {
|
||||
if label != nil {
|
||||
return label! + ":" + tag
|
||||
if let label = label {
|
||||
return "\(label):\(tag)"
|
||||
}
|
||||
else {
|
||||
return tag
|
||||
}
|
||||
}
|
||||
|
||||
return tag
|
||||
|
||||
override public func isEqual(_ other: Chunk) -> Bool {
|
||||
guard let other = other as? TagChunk else {
|
||||
return false
|
||||
}
|
||||
return tag == other.tag && label == other.label
|
||||
}
|
||||
}
|
||||
|
|
|
@ -36,15 +36,19 @@ public class TextChunk: Chunk, CustomStringConvertible {
|
|||
return text
|
||||
}
|
||||
|
||||
///
|
||||
///
|
||||
///
|
||||
///
|
||||
/// The implementation for _org.antlr.v4.runtime.tree.pattern.TextChunk_ returns the result of
|
||||
/// _#getText()_ in single quotes.
|
||||
///
|
||||
|
||||
|
||||
///
|
||||
public var description: String {
|
||||
return "'" + text + "'"
|
||||
return "'\(text)'"
|
||||
}
|
||||
|
||||
|
||||
override public func isEqual(_ other: Chunk) -> Bool {
|
||||
guard let other = other as? TextChunk else {
|
||||
return false
|
||||
}
|
||||
return text == other.text
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,34 @@
|
|||
/// Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.
|
||||
/// Use of this file is governed by the BSD 3-clause license that
|
||||
/// can be found in the LICENSE.txt file in the project root.
|
||||
|
||||
import Foundation
|
||||
import XCTest
|
||||
import Antlr4
|
||||
|
||||
class ParseTreePatternMatcherTests: XCTestCase {
|
||||
|
||||
func testSplit() throws {
|
||||
try doSplitTest("", [TextChunk("")])
|
||||
try doSplitTest("Foo", [TextChunk("Foo")])
|
||||
try doSplitTest("<ID> = <e:expr> ;",
|
||||
[TagChunk("ID"), TextChunk(" = "), TagChunk("e", "expr"), TextChunk(" ;")])
|
||||
try doSplitTest("\\<ID\\> = <e:expr> ;",
|
||||
[TextChunk("<ID> = "), TagChunk("e", "expr"), TextChunk(" ;")])
|
||||
}
|
||||
}
|
||||
|
||||
private func doSplitTest(_ input: String, _ expected: [Chunk]) throws {
|
||||
let matcher = try makeMatcher()
|
||||
XCTAssertEqual(try matcher.split(input), expected)
|
||||
}
|
||||
|
||||
private func makeMatcher() throws -> ParseTreePatternMatcher {
|
||||
// The lexer and parser here aren't actually used. They're just here
|
||||
// so that ParseTreePatternMatcher can be constructed, but in this file
|
||||
// we're currently only testing methods that don't depend on them.
|
||||
let lexer = Lexer()
|
||||
let ts = BufferedTokenStream(lexer)
|
||||
let parser = try Parser(ts)
|
||||
return ParseTreePatternMatcher(lexer, parser)
|
||||
}
|
|
@ -27,6 +27,8 @@ import org.antlr.v4.parse.ToolANTLRParser;
|
|||
import org.antlr.v4.parse.v3TreeGrammarException;
|
||||
import org.antlr.v4.runtime.RuntimeMetaData;
|
||||
import org.antlr.v4.runtime.misc.LogManager;
|
||||
import org.antlr.v4.runtime.misc.IntegerList;
|
||||
import org.antlr.v4.runtime.atn.ATNSerializer;
|
||||
import org.antlr.v4.semantics.SemanticPipeline;
|
||||
import org.antlr.v4.tool.ANTLRMessage;
|
||||
import org.antlr.v4.tool.ANTLRToolListener;
|
||||
|
@ -118,25 +120,25 @@ public class Tool {
|
|||
public boolean exact_output_dir = false;
|
||||
|
||||
public static Option[] optionDefs = {
|
||||
new Option("outputDirectory", "-o", OptionArgType.STRING, "specify output directory where all output is generated"),
|
||||
new Option("libDirectory", "-lib", OptionArgType.STRING, "specify location of grammars, tokens files"),
|
||||
new Option("generate_ATN_dot", "-atn", "generate rule augmented transition network diagrams"),
|
||||
new Option("grammarEncoding", "-encoding", OptionArgType.STRING, "specify grammar file encoding; e.g., euc-jp"),
|
||||
new Option("msgFormat", "-message-format", OptionArgType.STRING, "specify output style for messages in antlr, gnu, vs2005"),
|
||||
new Option("longMessages", "-long-messages", "show exception details when available for errors and warnings"),
|
||||
new Option("gen_listener", "-listener", "generate parse tree listener (default)"),
|
||||
new Option("gen_listener", "-no-listener", "don't generate parse tree listener"),
|
||||
new Option("gen_visitor", "-visitor", "generate parse tree visitor"),
|
||||
new Option("gen_visitor", "-no-visitor", "don't generate parse tree visitor (default)"),
|
||||
new Option("genPackage", "-package", OptionArgType.STRING, "specify a package/namespace for the generated code"),
|
||||
new Option("gen_dependencies", "-depend", "generate file dependencies"),
|
||||
new Option("", "-D<option>=value", "set/override a grammar-level option"),
|
||||
new Option("warnings_are_errors", "-Werror", "treat warnings as errors"),
|
||||
new Option("launch_ST_inspector", "-XdbgST", "launch StringTemplate visualizer on generated code"),
|
||||
new Option("outputDirectory", "-o", OptionArgType.STRING, "specify output directory where all output is generated"),
|
||||
new Option("libDirectory", "-lib", OptionArgType.STRING, "specify location of grammars, tokens files"),
|
||||
new Option("generate_ATN_dot", "-atn", "generate rule augmented transition network diagrams"),
|
||||
new Option("grammarEncoding", "-encoding", OptionArgType.STRING, "specify grammar file encoding; e.g., euc-jp"),
|
||||
new Option("msgFormat", "-message-format", OptionArgType.STRING, "specify output style for messages in antlr, gnu, vs2005"),
|
||||
new Option("longMessages", "-long-messages", "show exception details when available for errors and warnings"),
|
||||
new Option("gen_listener", "-listener", "generate parse tree listener (default)"),
|
||||
new Option("gen_listener", "-no-listener", "don't generate parse tree listener"),
|
||||
new Option("gen_visitor", "-visitor", "generate parse tree visitor"),
|
||||
new Option("gen_visitor", "-no-visitor", "don't generate parse tree visitor (default)"),
|
||||
new Option("genPackage", "-package", OptionArgType.STRING, "specify a package/namespace for the generated code"),
|
||||
new Option("gen_dependencies", "-depend", "generate file dependencies"),
|
||||
new Option("", "-D<option>=value", "set/override a grammar-level option"),
|
||||
new Option("warnings_are_errors", "-Werror", "treat warnings as errors"),
|
||||
new Option("launch_ST_inspector", "-XdbgST", "launch StringTemplate visualizer on generated code"),
|
||||
new Option("ST_inspector_wait_for_close", "-XdbgSTWait", "wait for STViz to close before continuing"),
|
||||
new Option("force_atn", "-Xforce-atn", "use the ATN simulator for all predictions"),
|
||||
new Option("log", "-Xlog", "dump lots of logging info to antlr-timestamp.log"),
|
||||
new Option("exact_output_dir", "-Xexact-output-dir", "all output goes into -o dir regardless of paths/package"),
|
||||
new Option("force_atn", "-Xforce-atn", "use the ATN simulator for all predictions"),
|
||||
new Option("log", "-Xlog", "dump lots of logging info to antlr-timestamp.log"),
|
||||
new Option("exact_output_dir", "-Xexact-output-dir", "all output goes into -o dir regardless of paths/package"),
|
||||
};
|
||||
|
||||
// helper vars for option management
|
||||
|
@ -395,6 +397,8 @@ public class Tool {
|
|||
|
||||
if ( generate_ATN_dot ) generateATNs(g);
|
||||
|
||||
if ( g.tool.getNumErrors()==0 ) generateInterpreterData(g);
|
||||
|
||||
// PERFORM GRAMMAR ANALYSIS ON ATN: BUILD DECISION DFAs
|
||||
AnalysisPipeline anal = new AnalysisPipeline(g);
|
||||
anal.process();
|
||||
|
@ -698,6 +702,64 @@ public class Tool {
|
|||
}
|
||||
}
|
||||
|
||||
private void generateInterpreterData(Grammar g) {
|
||||
StringBuilder content = new StringBuilder();
|
||||
|
||||
content.append("token literal names:\n");
|
||||
String[] names = g.getTokenLiteralNames();
|
||||
for (String name : names) {
|
||||
content.append(name + "\n");
|
||||
}
|
||||
content.append("\n");
|
||||
|
||||
content.append("token symbolic names:\n");
|
||||
names = g.getTokenSymbolicNames();
|
||||
for (String name : names) {
|
||||
content.append(name + "\n");
|
||||
}
|
||||
content.append("\n");
|
||||
|
||||
content.append("rule names:\n");
|
||||
names = g.getRuleNames();
|
||||
for (String name : names) {
|
||||
content.append(name + "\n");
|
||||
}
|
||||
content.append("\n");
|
||||
|
||||
if ( g.isLexer() ) {
|
||||
content.append("channel names:\n");
|
||||
content.append("DEFAULT_TOKEN_CHANNEL\n");
|
||||
content.append("HIDDEN\n");
|
||||
for (String channel : g.channelValueToNameList) {
|
||||
content.append(channel + "\n");
|
||||
}
|
||||
content.append("\n");
|
||||
|
||||
content.append("mode names:\n");
|
||||
for (String mode : ((LexerGrammar)g).modes.keySet()) {
|
||||
content.append(mode + "\n");
|
||||
}
|
||||
}
|
||||
content.append("\n");
|
||||
|
||||
IntegerList serializedATN = ATNSerializer.getSerialized(g.atn);
|
||||
content.append("atn:\n");
|
||||
content.append(serializedATN.toString());
|
||||
|
||||
try {
|
||||
Writer fw = getOutputFileWriter(g, g.name + ".interp");
|
||||
try {
|
||||
fw.write(content.toString());
|
||||
}
|
||||
finally {
|
||||
fw.close();
|
||||
}
|
||||
}
|
||||
catch (IOException ioe) {
|
||||
errMgr.toolError(ErrorType.CANNOT_WRITE_FILE, ioe);
|
||||
}
|
||||
}
|
||||
|
||||
/** This method is used by all code generators to create new output
|
||||
* files. If the outputDir set by -o is not present it will be created.
|
||||
* The final filename is sensitive to the output directory and
|
||||
|
|
Loading…
Reference in New Issue