This commit is contained in:
Jan Mikkelsen 2017-10-28 16:08:54 +11:00
commit 0f6082eb15
133 changed files with 3407 additions and 3139 deletions

View File

@ -1,5 +1,8 @@
root = true
[*]
tab_width = 4
[*.{java,stg}]
charset = utf-8
insert_final_newline = true

View File

@ -2,30 +2,26 @@ sudo: true
language: java
cache:
directories:
- $HOME/.m2
- $HOME/Library/Caches/Homebrew
stages:
- smoke-test
- main-test
- extended-test
matrix:
include:
- os: linux
compiler: clang
jdk: oraclejdk7
jdk: openjdk7
env:
- TARGET=cpp
- CXX=g++-5
- GROUP=ALL
addons:
apt:
sources:
- ubuntu-toolchain-r-test
- llvm-toolchain-precise-3.7
packages:
- g++-5
- uuid-dev
- clang-3.7
- os: osx
compiler: clang
osx_image: xcode8.1
env:
- TARGET=cpp
- GROUP=LEXER
stage: main-test
addons:
apt:
sources:
@ -35,106 +31,150 @@ matrix:
- g++-5
- uuid-dev
- clang-3.7
- os: osx
compiler: clang
osx_image: xcode8.1
env:
- TARGET=cpp
- GROUP=PARSER
addons:
apt:
sources:
- ubuntu-toolchain-r-test
- llvm-toolchain-precise-3.7
packages:
- g++-5
- uuid-dev
- clang-3.7
- os: osx
compiler: clang
osx_image: xcode8.1
env:
- TARGET=cpp
- GROUP=RECURSION
addons:
apt:
sources:
- ubuntu-toolchain-r-test
- llvm-toolchain-precise-3.7
packages:
- g++-5
- uuid-dev
- clang-3.7
- os: osx
compiler: clang
osx_image: xcode8.1
env:
- TARGET=swift
- GROUP=LEXER
- os: osx
compiler: clang
osx_image: xcode8.1
env:
- TARGET=swift
- GROUP=PARSER
- os: osx
compiler: clang
osx_image: xcode8.1
env:
- TARGET=swift
- GROUP=RECURSION
- os: linux
compiler: clang
jdk: openjdk7
env:
- TARGET=cpp
- CXX=g++-5
- GROUP=PARSER
stage: main-test
addons:
apt:
sources:
- ubuntu-toolchain-r-test
- llvm-toolchain-precise-3.7
packages:
- g++-5
- uuid-dev
- clang-3.7
- os: linux
compiler: clang
jdk: openjdk7
env:
- TARGET=cpp
- CXX=g++-5
- GROUP=RECURSION
stage: main-test
addons:
apt:
sources:
- ubuntu-toolchain-r-test
- llvm-toolchain-precise-3.7
packages:
- g++-5
- uuid-dev
- clang-3.7
- os: osx
compiler: clang
osx_image: xcode9
env:
- TARGET=cpp
- GROUP=LEXER
stage: extended-test
- os: osx
compiler: clang
osx_image: xcode9
env:
- TARGET=cpp
- GROUP=PARSER
stage: extended-test
- os: osx
compiler: clang
osx_image: xcode9
env:
- TARGET=cpp
- GROUP=RECURSION
stage: extended-test
- os: osx
compiler: clang
osx_image: xcode9
env:
- TARGET=swift
- GROUP=LEXER
stage: main-test
- os: osx
compiler: clang
osx_image: xcode9
env:
- TARGET=swift
- GROUP=PARSER
stage: main-test
- os: osx
compiler: clang
osx_image: xcode9
env:
- TARGET=swift
- GROUP=RECURSION
stage: main-test
- os: linux
dist: trusty
compiler: clang
env:
- TARGET=swift
- GROUP=ALL
stage: extended-test
- os: osx
osx_image: xcode8.2
osx_image: xcode9
env:
- TARGET=dotnet
- GROUP=LEXER
stage: extended-test
- os: osx
osx_image: xcode8.2
osx_image: xcode9
env:
- TARGET=dotnet
- GROUP=PARSER
stage: extended-test
- os: osx
osx_image: xcode8.2
osx_image: xcode9
env:
- TARGET=dotnet
- GROUP=RECURSION
stage: extended-test
- os: linux
jdk: oraclejdk7
jdk: openjdk7
env: TARGET=java
stage: extended-test
- os: linux
jdk: openjdk8
env: TARGET=java
stage: extended-test
- os: linux
jdk: oraclejdk8
env: TARGET=java
stage: smoke-test
- os: linux
jdk: oraclejdk7
jdk: openjdk7
env: TARGET=csharp
stage: extended-test
- os: linux
jdk: oraclejdk8
dist: trusty
env:
- TARGET=dotnet
- GROUP=LEXER
stage: main-test
- os: linux
jdk: oraclejdk8
jdk: openjdk8
dist: trusty
env:
- TARGET=dotnet
- GROUP=PARSER
stage: main-test
- os: linux
jdk: oraclejdk8
dist: trusty
env:
- TARGET=dotnet
- GROUP=RECURSION
stage: main-test
- os: linux
jdk: oraclejdk7
jdk: openjdk7
env: TARGET=python2
stage: extended-test
- os: linux
jdk: oraclejdk7
jdk: openjdk7
env: TARGET=python3
addons:
apt:
@ -142,16 +182,20 @@ matrix:
- deadsnakes # source required so it finds the package definition below
packages:
- python3.5
stage: main-test
- os: linux
jdk: oraclejdk7
dist: trusty
jdk: openjdk8
env: TARGET=javascript
stage: main-test
- os: linux
jdk: oraclejdk7
dist: trusty
jdk: openjdk8
env: TARGET=go
stage: main-test
before_install:
- ./.travis/before-install-$TRAVIS_OS_NAME-$TARGET.sh
- f="./.travis/before-install-$TRAVIS_OS_NAME-$TARGET.sh"; ! [ -x "$f" ] || "$f"
script:
- cd runtime-testsuite; ../.travis/run-tests-$TARGET.sh
- cd runtime-testsuite; travis_wait 40 ../.travis/run-tests-$TARGET.sh

View File

@ -1,14 +1,12 @@
set -euo pipefail
# make sure we use trusty repositories (travis by default uses precise)
curl https://repogen.simplylinux.ch/txt/trusty/sources_c4aa56bd26c0f54f391d8fae3e687ef5f6e97c26.txt | sudo tee /etc/apt/sources.list
# install dependencies
# some packages below will be update, swift assumes newer versions
# of, for example, sqlite3 and libicu, without the update some
# tools will not work
sudo apt-get update
sudo apt-get install clang libicu-dev libxml2 sqlite3
sudo apt-get install clang-3.6 libxml2
sudo update-alternatives --install /usr/bin/clang clang /usr/bin/clang-3.6 100
# This would fix a know linker issue mentioned in:
# https://bugs.swift.org/browse/SR-2299

View File

@ -1,13 +0,0 @@
#!/bin/bash
set -euo pipefail
thisdir=$(dirname "$0")
brew update
brew install cmake
# Work around apparent rvm bug that is in Travis's Xcode image.
# https://github.com/direnv/direnv/issues/210
# https://github.com/travis-ci/travis-ci/issues/6307
shell_session_update() { :; }

View File

@ -4,9 +4,7 @@ set -euo pipefail
thisdir=$(dirname "$0")
# pre-requisites for dotnet core
brew update
brew install openssl
# OpenSSL setup for dotnet core
mkdir -p /usr/local/lib
ln -s /usr/local/opt/openssl/lib/libcrypto.1.0.0.dylib /usr/local/lib/
ln -s /usr/local/opt/openssl/lib/libssl.1.0.0.dylib /usr/local/lib/
@ -19,9 +17,3 @@ sudo installer -pkg /tmp/dotnet-dev-osx-x64.1.0.4.pkg -target /
# make the link
ln -s /usr/local/share/dotnet/dotnet /usr/local/bin/
# Work around apparent rvm bug that is in Travis's Xcode image.
# https://github.com/direnv/direnv/issues/210
# https://github.com/travis-ci/travis-ci/issues/6307
shell_session_update() { :; }

View File

@ -1,12 +0,0 @@
#!/bin/bash
set -euo pipefail
thisdir=$(dirname "$0")
brew update
# Work around apparent rvm bug that is in Travis's Xcode image.
# https://github.com/direnv/direnv/issues/210
# https://github.com/travis-ci/travis-ci/issues/6307
shell_session_update() { :; }

View File

@ -4,7 +4,7 @@
# here since environment variables doesn't pass
# across scripts
if [ $TRAVIS_OS_NAME == "linux" ]; then
export SWIFT_VERSION=swift-3.1.1
export SWIFT_VERSION=swift-4.0
export SWIFT_HOME=$(pwd)/swift/$SWIFT_VERSION-RELEASE-ubuntu14.04/usr/bin/
export PATH=$SWIFT_HOME:$PATH

View File

@ -1,8 +1,8 @@
version: '4.6-SNAPSHOT+AppVeyor.{build}'
os: Windows Server 2012
version: '4.7.1-SNAPSHOT+AppVeyor.{build}'
build: off
build_script:
- mvn -DskipTests install -q --batch-mode
- mvn -DskipTests install --batch-mode
- msbuild runtime/CSharp/runtime/CSharp/Antlr4.vs2013.sln /logger:"C:\Program Files\AppVeyor\BuildAgent\Appveyor.MSBuildLogger.dll" /verbosity:detailed
- msbuild ./runtime-testsuite/target/classes/CSharp/runtime/CSharp/Antlr4.vs2013.sln /logger:"C:\Program Files\AppVeyor\BuildAgent\Appveyor.MSBuildLogger.dll" /verbosity:detailed
test_script:
- mvn install -q -Dantlr-python2-python="C:\Python27\python.exe" -Dantlr-python3-python="C:\Python35\python.exe" -Dantlr-javascript-nodejs="C:\Program Files (x86)\nodejs\node.exe" --batch-mode
build:
verbosity: minimal
- mvn install -Dantlr-python2-python="C:\Python27\python.exe" -Dantlr-python3-python="C:\Python35\python.exe" -Dantlr-javascript-nodejs="C:\Program Files (x86)\nodejs\node.exe" --batch-mode

View File

@ -151,6 +151,21 @@ YYYY/MM/DD, github id, Full name, email
2017/06/11, erikbra, Erik A. Brandstadmoen, erik@brandstadmoen.net
2017/06/10, jm-mikkelsen, Jan Martin Mikkelsen, janm@transactionware.com
2017/06/25, alimg, Alim Gökkaya, alim.gokkaya@gmail.com
2017/06/28, jBugman, Sergey Parshukov, codedby@bugman.me
2017/07/09, neatnerd, Mike Arshinskiy, neatnerd@users.noreply.github.com
2017/07/11, dhalperi, Daniel Halperin, daniel@halper.in
2017/07/17, vaibhavaingankar09, Vaibhav Vaingankar, vbhvvaingankar9@gmail.com
2017/07/23, venkatperi, Venkat Peri, venkatperi@gmail.com
2017/07/27, shirou, WAKAYAMA Shirou, shirou.faw@gmail.com
2017/07/09, neatnerd, Mike Arshinskiy, neatnerd@users.noreply.github.com
2017/07/09, neatnerd, Mike Arshinskiy, neatnerd@users.noreply.github.com
2017/07/27, matthauck, Matt Hauck, matthauck@gmail.com
2017/07/27, shirou, WAKAYAMA Shirou, shirou.faw@gmail.com
2017/08/20, tiagomazzutti, Tiago Mazzutti, tiagomzt@gmail.com
2017/08/20, milanaleksic, Milan Aleksic, milanaleksic@gmail.com
2017/08/29, Eddy Reyes, eddy@mindsight.io
2017/09/09, brauliobz, Bráulio Bezerra, brauliobezerra@gmail.com
2017/09/11, sachinjain024, Sachin Jain, sachinjain024@gmail.com
2017/10/06, bramp, Andrew Brampton, brampton@gmail.com
2017/10/15, simkimsia, Sim Kim Sia, kimcity@gmail.com
2017/10/27, Griffon26, Maurice van der Pot, griffon26@kfk4ever.com
2017/05/29, rlfnb, Ralf Neeb, rlfnb@rlfnb.de

View File

@ -6,7 +6,7 @@ Hi and welcome to the version 4 release of ANTLR! It's named after the fearless
ANTLR is really two things: a tool that translates your grammar to a parser/lexer in Java (or other target language) and the runtime needed by the generated parsers/lexers. Even if you are using the ANTLR Intellij plug-in or ANTLRWorks to run the ANTLR tool, the generated code will still need the runtime library.
The first thing you should do is probably download and install a development tool plug-in. Even if you only use such tools for editing, they are great. Then, follow the instructions below to get the runtime environment available to your system to run generated parsers/lexers. In what follows, I talk about antlr-4.5.3-complete.jar, which has the tool and the runtime and any other support libraries (e.g., ANTLR v4 is written in v3).
The first thing you should do is probably download and install a development tool plug-in. Even if you only use such tools for editing, they are great. Then, follow the instructions below to get the runtime environment available to your system to run generated parsers/lexers. In what follows, I talk about antlr-4.7-complete.jar, which has the tool and the runtime and any other support libraries (e.g., ANTLR v4 is written in v3).
If you are going to integrate ANTLR into your existing build system using mvn, ant, or want to get ANTLR into your IDE such as eclipse or intellij, see Integrating ANTLR into Development Systems.
@ -16,19 +16,21 @@ If you are going to integrate ANTLR into your existing build system using mvn, a
1. Download
```
$ cd /usr/local/lib
$ curl -O http://www.antlr.org/download/antlr-4.5.3-complete.jar
$ curl -O http://www.antlr.org/download/antlr-4.7-complete.jar
```
Or just download in browser from website:
[http://www.antlr.org/download.html](http://www.antlr.org/download.html)
and put it somewhere rational like `/usr/local/lib`.
2. Add `antlr-4.5.3-complete.jar` to your `CLASSPATH`:
2. Add `antlr-4.7-complete.jar` to your `CLASSPATH`:
```
$ export CLASSPATH=".:/usr/local/lib/antlr-4.5.3-complete.jar:$CLASSPATH"
$ export CLASSPATH=".:/usr/local/lib/antlr-4.7-complete.jar:$CLASSPATH"
```
It's also a good idea to put this in your `.bash_profile` or whatever your startup script is.
3. Create aliases for the ANTLR Tool, and `TestRig`.
```
$ alias antlr4='java -Xmx500M -cp "/usr/local/lib/antlr-4.5.3-complete.jar:$CLASSPATH" org.antlr.v4.Tool'
$ alias antlr4='java -Xmx500M -cp "/usr/local/lib/antlr-4.7-complete.jar:$CLASSPATH" org.antlr.v4.Tool'
$ alias grun='java org.antlr.v4.gui.TestRig'
```
@ -39,11 +41,11 @@ $ alias grun='java org.antlr.v4.gui.TestRig'
0. Install Java (version 1.6 or higher)
1. Download antlr-4.5.3-complete.jar (or whatever version) from [http://www.antlr.org/download/](http://www.antlr.org/download/)
Save to your directory for 3rd party Java libraries, say `C:\Javalib`
2. Add `antlr-4.5-complete.jar` to CLASSPATH, either:
2. Add `antlr-4.5.3-complete.jar` to CLASSPATH, either:
* Permanently: Using System Properties dialog > Environment variables > Create or append to `CLASSPATH` variable
* Temporarily, at command line:
```
SET CLASSPATH=.;C:\Javalib\antlr-4.5.3-complete.jar;%CLASSPATH%
SET CLASSPATH=.;C:\Javalib\antlr-4.7-complete.jar;%CLASSPATH%
```
3. Create short convenient commands for the ANTLR Tool, and TestRig, using batch files or doskey commands:
* Batch files (in directory in system PATH) antlr4.bat and grun.bat
@ -65,7 +67,7 @@ Either launch org.antlr.v4.Tool directly:
```
$ java org.antlr.v4.Tool
ANTLR Parser Generator Version 4.5.3
ANTLR Parser Generator Version 4.7
-o ___ specify output directory where all output is generated
-lib ___ specify location of .tokens files
...
@ -74,8 +76,8 @@ ANTLR Parser Generator Version 4.5.3
or use -jar option on java:
```
$ java -jar /usr/local/lib/antlr-4.5.3-complete.jar
ANTLR Parser Generator Version 4.5.3
$ java -jar /usr/local/lib/antlr-4.7-complete.jar
ANTLR Parser Generator Version 4.7
-o ___ specify output directory where all output is generated
-lib ___ specify location of .tokens files
...

Binary file not shown.

After

Width:  |  Height:  |  Size: 379 KiB

View File

@ -1,9 +1,15 @@
# ANTLR4 Language Target, Runtime for Swift
## Performance Note
To use ANTLR4 Swift target in production environment, make sure to turn on compiler optimizations by following [these instructions](https://github.com/apple/swift-package-manager/blob/master/Documentation/Usage.md#build-configurations) if you use SwiftPM to build your project. If you are using Xcode to build your project, it's unlikely you will not use `release` build for production build.
Conclusion is, you need to turn on `release` mode (which will have all the optimization pre configured for you) so the ANTLR4 Swift target can have reasonable parsing speed.
## Install ANTLR4
Make sure you have the ANTLR
installed.[The getting started guide](getting-started.md) should get
installed. [The getting started guide](getting-started.md) should get
you started.
## Create a Swift lexer or parser
@ -18,82 +24,120 @@ For a full list of antlr4 tool options, please visit the
## Build your Swift project with ANTLR runtime
The following instructions are assuming Xcode as the IDE:
### Note
* __Add parser/lexer to project__. Make sure the parsers/lexers
We use __boot.py__ script located at the root of the Swift runtime folder
`antlr4/runtime/Swift` to provide additional support for both Xcode-based
projects and SPM-based projects. Below sections are organized for both of
the flavors. If you want to quickly get started, try:
```
python boot.py --help
```
for information about this script.
### Xcode Projects
Note that even if you are otherwise using ANTLR from a binary distribution,
you should compile the ANTLR Swift runtime from source, because the Swift
language does not yet have a stable ABI.
ANTLR uses Swift Package Manager to generate Xcode project files. Note that
Swift Package Manager does not currently support iOS, watchOS, or tvOS, so
if you wish to use those platforms, you will need to alter the project build
settings manually as appropriate.
#### Download source code for ANTLR
```
git clone https://github.com/antlr/antlr4
```
#### Generate Xcode project for ANTLR runtime
The `boot.py` script includes a wrapper around `swift package
generate-xcodeproj`. Use this to generate `Antlr4.xcodeproj` for the ANTLR
Swift runtime. (using _swift package generate-xcodeproj_ is not recommended)
since the project is dependent on some parser files generated by _boot.py_.
```
cd antlr4/runtime/Swift
python boot.py --gen-xcodeproj
```
#### Import ANTLR Swift runtime into your project
Open your own project in Xcode.
Open Finder in the `runtime/Swift` directory:
```
# From antlr4/runtime/Swift
open .
```
Drag `Antlr4.xcodeproj` into your project.
After this is done, your Xcode project navigator will be something like the
screenshot below. In this example, your own project is "Smalltalk", and you
will be able to see `Antlr4.xcodeproj` shown as a contained project.
<img src=images/xcodenav.png width="300">
#### Edit the build settings if necessary
Swift Package Manager currently does not support iOS, watchOS, or tvOS. If
you wish to build for those platforms, you will need to alter the project
build settings manually.
#### Add generated parser and lexer to project
Make sure the parsers/lexers
generated in __step 2__ are added to the project. To do this, you can
drag the generated files from Finder to the Xcode IDE. Remember to
check __Copy items if needed__ to make sure the files are actually
moved into the project folder instead of symbolic links (see the
screenshot below). After moving you will be able to see your files in
the project navigator. But when you open one of the files, you will
see Xcode complaining the module "Antlr4" could not be found at the
import statement. This is expected, since we still need the ANTLR
Swift runtime for those missing symbols.
the project navigator. Make sure that the Target Membership settings
are correct for your project.
<img src=images/dragfile.png width="500">
* __Download ANTLR runtime__. Due to unstable ABI of Swift language,
there will not be a single "library" for the Swift ANTLR runtime for
now. To get Swift ANTLR runtime, clone the ANTLR repository. Open it
in finder. From the root directory of the repo, go to runtime/Swift
folder. You will see the Xcode project manifest file:
__Antlr4.xcodeproj__.
#### Add the ANTLR Swift runtime as a dependency
* __Import ANTLR Swift runtime into project__. Drag Antlr4.xcodeproj
into your project, after this is done, your Xcode project navigator
will be something like the screenshot below. In this case, your own
project is "Smalltalk", and you will be able to see the
Antlr4.xcodeproj shown as a contained project. The error message will
still be there, that's because we still need to tell Xcode how to find
the runtime.
<img src=images/xcodenav.png width="300">
* __Build ANTLR runtime__. By expanding the "Products" folder in the
inner project (Antlr4.xcodeproj), you will see two Antlr4.framework
files. ".framework" file is the swift version of ".jar", ".a" as in
JAVA, C/C++ Initially those two files should be red, that's because
they are not built. To build, click the "target selection" button
right next to your Xcode run button. And in the drop down select the
target you want to build. And you will see the two Antlr4.framework
files are for iOS and OSX, as shown below. After target selection,
press "CMD+B", and Xcode will build the framework for you. Then you
will see one of the frameworks become black.
<img src=images/targetselection.png width="500">
* __Add dependencies__. Simply adding ANTLR Swift runtime and build
the artifact is not enough. You still need to specify
dependencies. Click your own project (Smalltalk), and you will see
project setting page. Go to "Build Phase", and inside it make sure
your ANTLR Swift runtime framework is added to both "__Target
Dependencies__" and "__Link Binary With Libraries__" sections, as
shown below. After correctly added dependencies, the error message for
importing library will be gone.
Select your own project in Xcode and go to the Build Phases settings panel.
Add the ANTLR runtime under __Target Dependencies__ and __Link Binary With
Libraries__.
<img src=images/xcodedep.png width="800">
## Example playground
#### Build your project
The Swift runtime includes an Xcode playground to get started with.
The runtime and generated grammar should now build correctly.
First go to the ANTLR4 repository, and open
`runtime/Swift/Antlr4.xcworkspace` in Xcode. Select "Antlr4 OSX > My
Mac" as the build target, and build the project as normal. The
playground should then be active.
### Swift Package Manager Projects
The playground includes a simple grammar called "Hello", and an
example for walking the parse tree. You should see in the playground
output that it is printing messages for each node in the parse tree as
it walks.
Since we cannot have a separate repository for Swift target (see issue [#1774](https://github.com/antlr/antlr4/issues/1774)),
and Swift is currently not ABI stable. We currently support support SPM-based
projects by creating temporary local repository.
The grammar is defined in the playground's `Resources/Hello.g4`. The
parser was generated from the grammar using ANTLR like this:
For people using [Swift Package Manager](https://swift.org/package-manager/),
the __boot.py__ script supports generating local repository that can be used
as a dependency to your project. Simply run:
```
antlr4 -Dlanguage=Swift -visitor -o ../Sources/Autogen Hello.g4
```
python boot.py --gen-spm-module
```
The example tree walker is in Sources/HelloWalker.swift.
The prompt will show something like below:
<img src=images/gen_spm_module.png width="800">
Put the SPM directive that contains the url to temporary repository to your
project's Package.swift. And run `swift build` in your project.
The project is generated in your system's `/tmp/` directory, if you find it
inconvenient, consider copy that generated ANTLR repository to some place
that won't be cleaned automatically and update `url` parameter in your
`Package.swift` file.

View File

@ -72,7 +72,7 @@ TokenStartColumnEquals(i) ::= <%self._tokenStartCharPositionInLine == <i>%>
ImportListener(X) ::= ""
GetExpectedTokenNames() ::= "try self.getExpectedTokens().toString(self.tokenNames)"
GetExpectedTokenNames() ::= "try self.getExpectedTokens().toString(self.getVocabulary())"
RuleInvocationStack() ::= "getRuleInvocationStack().description.replacingOccurrences(of: \"\\\"\", with: \"\")"

View File

@ -23,6 +23,7 @@ public class TestCodePointCharStream {
CodePointCharStream s = CharStreams.fromString("");
assertEquals(0, s.size());
assertEquals(0, s.index());
assertEquals("", s.toString());
}
@Test

View File

@ -145,7 +145,7 @@ public class BaseSwiftTest implements RuntimeTestSupport {
String projectName = "testcase-" + System.currentTimeMillis();
String projectDir = getTmpDir() + "/" + projectName;
buildProject(projectDir);
buildProject(projectDir, projectName);
return execTest(projectDir, projectName);
}
@ -183,12 +183,12 @@ public class BaseSwiftTest implements RuntimeTestSupport {
Collections.addAll(this.sourceFiles, files);
}
private void buildProject(String projectDir) {
private void buildProject(String projectDir, String projectName) {
mkdir(projectDir);
fastFailRunProcess(projectDir, SWIFT_CMD, "package", "init", "--type", "executable");
for (String sourceFile: sourceFiles) {
String absPath = getTmpDir() + "/" + sourceFile;
fastFailRunProcess(getTmpDir(), "mv", "-f", absPath, projectDir + "/Sources/");
fastFailRunProcess(getTmpDir(), "mv", "-f", absPath, projectDir + "/Sources/" + projectName);
}
fastFailRunProcess(getTmpDir(), "mv", "-f", "input", projectDir);
@ -201,7 +201,7 @@ public class BaseSwiftTest implements RuntimeTestSupport {
"-Xlinker", "-rpath",
"-Xlinker", dylibPath);
if (buildResult.b.length() > 0) {
throw new RuntimeException("unit test build failed: " + buildResult.b);
throw new RuntimeException("unit test build failed: " + buildResult.a + "\n" + buildResult.b);
}
} catch (IOException | InterruptedException e) {
e.printStackTrace();
@ -251,7 +251,7 @@ public class BaseSwiftTest implements RuntimeTestSupport {
addSourceFiles("main.swift");
String projectName = "testcase-" + System.currentTimeMillis();
String projectDir = getTmpDir() + "/" + projectName;
buildProject(projectDir);
buildProject(projectDir, projectName);
return execTest(projectDir, projectName);
}

View File

@ -1092,7 +1092,10 @@ nextTransition_continue: ;
protected internal Guid ReadUUID()
{
byte[] d = BitConverter.GetBytes (ReadLong ());
Array.Reverse(d);
if(BitConverter.IsLittleEndian)
{
Array.Reverse(d);
}
short c = (short)ReadInt();
short b = (short)ReadInt();
int a = ReadInt32();

View File

@ -33,6 +33,7 @@ endif()
if(CMAKE_VERSION VERSION_EQUAL "3.3.0" OR
CMAKE_VERSION VERSION_GREATER "3.3.0")
CMAKE_POLICY(SET CMP0059 OLD)
CMAKE_POLICY(SET CMP0054 OLD)
endif()
if(CMAKE_SYSTEM_NAME MATCHES "Linux")
@ -61,7 +62,11 @@ if (WITH_DEMO)
endif()
endif(WITH_DEMO)
set(MY_CXX_WARNING_FLAGS " -Wall -pedantic -W")
if (MSVC_VERSION)
set(MY_CXX_WARNING_FLAGS " /W4")
else()
set(MY_CXX_WARNING_FLAGS " -Wall -pedantic -W")
endif()
# Initialize CXXFLAGS.
if("${CMAKE_VERSION}" VERSION_GREATER 3.1.0)
@ -75,11 +80,18 @@ else()
set(CMAKE_CXX_FLAGS_RELWITHDEBINFO "${CMAKE_CXX_FLAGS_RELWITHDEBINFO} -std=c++11")
endif()
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall ${MY_CXX_WARNING_FLAGS}")
set(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} -O0 -g ${MY_CXX_WARNING_FLAGS}")
set(CMAKE_CXX_FLAGS_MINSIZEREL "${CMAKE_CXX_FLAGS_MINSIZEREL} -Os -DNDEBUG ${MY_CXX_WARNING_FLAGS}")
set(CMAKE_CXX_FLAGS_RELEASE "${CMAKE_CXX_FLAGS_RELEASE} -O3 -DNDEBUG ${MY_CXX_WARNING_FLGAS}")
set(CMAKE_CXX_FLAGS_RELWITHDEBINFO "${CMAKE_CXX_FLAGS_RELWITHDEBINFO} -O2 -g ${MY_CXX_WARNING_FLAGS}")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${MY_CXX_WARNING_FLAGS}")
if (MSVC_VERSION)
set(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} /Od /Zi /MP ${MY_CXX_WARNING_FLAGS}")
set(CMAKE_CXX_FLAGS_MINSIZEREL "${CMAKE_CXX_FLAGS_MINSIZEREL} /O1 /Oi /Ob2 /Gy /MP /DNDEBUG ${MY_CXX_WARNING_FLAGS}")
set(CMAKE_CXX_FLAGS_RELEASE "${CMAKE_CXX_FLAGS_RELEASE} /O2 /Oi /Ob2 /Gy /MP /DNDEBUG ${MY_CXX_WARNING_FLGAS}")
set(CMAKE_CXX_FLAGS_RELWITHDEBINFO "${CMAKE_CXX_FLAGS_RELWITHDEBINFO} /O2 /Oi /Ob2 /Gy /MP /Zi ${MY_CXX_WARNING_FLAGS}")
else()
set(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} -O0 -g ${MY_CXX_WARNING_FLAGS}")
set(CMAKE_CXX_FLAGS_MINSIZEREL "${CMAKE_CXX_FLAGS_MINSIZEREL} -Os -DNDEBUG ${MY_CXX_WARNING_FLAGS}")
set(CMAKE_CXX_FLAGS_RELEASE "${CMAKE_CXX_FLAGS_RELEASE} -O3 -DNDEBUG ${MY_CXX_WARNING_FLGAS}")
set(CMAKE_CXX_FLAGS_RELWITHDEBINFO "${CMAKE_CXX_FLAGS_RELWITHDEBINFO} -O2 -g ${MY_CXX_WARNING_FLAGS}")
endif()
# Compiler-specific C++11 activation.
if ("${CMAKE_CXX_COMPILER_ID}" MATCHES "GNU")
@ -101,6 +113,8 @@ elseif ("${CMAKE_CXX_COMPILER_ID}" MATCHES "Clang" AND CMAKE_SYSTEM_NAME MATCHES
if (WITH_LIBCXX)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -stdlib=libc++")
endif()
elseif ( MSVC_VERSION GREATER 1800 OR MSVC_VERSION EQUAL 1800 )
# Visual Studio 2012+ supports c++11 features
else ()
message(FATAL_ERROR "Your C++ compiler does not support C++11.")
endif ()

View File

@ -44,7 +44,11 @@ elseif(APPLE)
target_link_libraries(antlr4_static ${COREFOUNDATION_LIBRARY})
endif()
set(disabled_compile_warnings "-Wno-overloaded-virtual")
if (MSVC_VERSION)
set(disabled_compile_warnings "/wd4251")
else()
set(disabled_compile_warnings "-Wno-overloaded-virtual")
endif()
if ("${CMAKE_CXX_COMPILER_ID}" MATCHES "Clang")
set(disabled_compile_warnings "${disabled_compile_warnings} -Wno-dollar-in-identifier-extension -Wno-four-char-constants")
elseif("${CMAKE_CXX_COMPILER_ID}" MATCHES "GNU")
@ -57,6 +61,15 @@ if (WIN32)
set(extra_share_compile_flags "-DANTLR4CPP_EXPORTS")
set(extra_static_compile_flags "-DANTLR4CPP_STATIC")
endif(WIN32)
if (MSVC_VERSION)
target_compile_options(antlr4_shared PRIVATE "/MD$<$<CONFIG:Debug>:d>")
target_compile_options(antlr4_static PRIVATE "/MT$<$<CONFIG:Debug>:d>")
endif()
set(static_lib_suffix "")
if (MSVC_VERSION)
set(static_lib_suffix "-static")
endif()
set_target_properties(antlr4_shared
PROPERTIES VERSION ${ANTLR_VERSION}
@ -72,7 +85,7 @@ set_target_properties(antlr4_shared
set_target_properties(antlr4_static
PROPERTIES VERSION ${ANTLR_VERSION}
SOVERSION ${ANTLR_VERSION}
OUTPUT_NAME antlr4-runtime
OUTPUT_NAME "antlr4-runtime${static_lib_suffix}"
ARCHIVE_OUTPUT_DIRECTORY ${LIB_OUTPUT_DIR}
COMPILE_FLAGS "${disabled_compile_warnings} ${extra_static_compile_flags}")

View File

@ -101,7 +101,7 @@ size_t LexerActionExecutor::generateHashCode() const {
for (auto lexerAction : _lexerActions) {
hash = MurmurHash::update(hash, lexerAction);
}
MurmurHash::finish(hash, _lexerActions.size());
hash = MurmurHash::finish(hash, _lexerActions.size());
return hash;
}

View File

@ -15,7 +15,7 @@ namespace atn {
* utility methods for analyzing configuration sets for conflicts and/or
* ambiguities.
*/
enum class ANTLR4CPP_PUBLIC PredictionMode {
enum class PredictionMode {
/**
* The SLL(*) prediction mode. This prediction mode ignores the current
* parser context when making predictions. This is the fastest prediction

View File

@ -0,0 +1,154 @@
// Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.
// Use of this file is governed by the BSD 3-clause license that
// can be found in the LICENSE.txt file in the project root.
package antlr
import (
"testing"
)
type commonTokenStreamTestLexer struct {
*BaseLexer
tokens []Token
i int
}
func (l *commonTokenStreamTestLexer) NextToken() Token {
tmp := l.tokens[l.i]
l.i++
return tmp
}
func TestCommonTokenStreamOffChannel(t *testing.T) {
assert := assertNew(t)
lexEngine := &commonTokenStreamTestLexer{
tokens: []Token{
newTestCommonToken(1, " ", LexerHidden), // 0
newTestCommonToken(1, "x", LexerDefaultTokenChannel), // 1
newTestCommonToken(1, " ", LexerHidden), // 2
newTestCommonToken(1, "=", LexerDefaultTokenChannel), // 3
newTestCommonToken(1, "34", LexerDefaultTokenChannel), // 4
newTestCommonToken(1, " ", LexerHidden), // 5
newTestCommonToken(1, " ", LexerHidden), // 6
newTestCommonToken(1, ";", LexerDefaultTokenChannel), // 7
newTestCommonToken(1, "\n", LexerHidden), // 9
newTestCommonToken(TokenEOF, "", LexerDefaultTokenChannel), // 10
},
}
tokens := NewCommonTokenStream(lexEngine, TokenDefaultChannel)
assert.Equal("x", tokens.LT(1).GetText()) // must skip first off channel token
tokens.Consume()
assert.Equal("=", tokens.LT(1).GetText())
assert.Equal("x", tokens.LT(-1).GetText())
tokens.Consume()
assert.Equal("34", tokens.LT(1).GetText())
assert.Equal("=", tokens.LT(-1).GetText())
tokens.Consume()
assert.Equal(";", tokens.LT(1).GetText())
assert.Equal("34", tokens.LT(-1).GetText())
tokens.Consume()
assert.Equal(TokenEOF, tokens.LT(1).GetTokenType())
assert.Equal(";", tokens.LT(-1).GetText())
assert.Equal("34", tokens.LT(-2).GetText())
assert.Equal("=", tokens.LT(-3).GetText())
assert.Equal("x", tokens.LT(-4).GetText())
}
func TestCommonTokenStreamFetchOffChannel(t *testing.T) {
assert := assertNew(t)
lexEngine := &commonTokenStreamTestLexer{
tokens: []Token{
newTestCommonToken(1, " ", LexerHidden), // 0
newTestCommonToken(1, "x", LexerDefaultTokenChannel), // 1
newTestCommonToken(1, " ", LexerHidden), // 2
newTestCommonToken(1, "=", LexerDefaultTokenChannel), // 3
newTestCommonToken(1, "34", LexerDefaultTokenChannel), // 4
newTestCommonToken(1, " ", LexerHidden), // 5
newTestCommonToken(1, " ", LexerHidden), // 6
newTestCommonToken(1, ";", LexerDefaultTokenChannel), // 7
newTestCommonToken(1, " ", LexerHidden), // 8
newTestCommonToken(1, "\n", LexerHidden), // 9
newTestCommonToken(TokenEOF, "", LexerDefaultTokenChannel), // 10
},
}
tokens := NewCommonTokenStream(lexEngine, TokenDefaultChannel)
tokens.Fill()
assert.Nil(tokens.getHiddenTokensToLeft(0, -1))
assert.Nil(tokens.getHiddenTokensToRight(0, -1))
assert.Equal("[[@0,0:0=' ',<1>,channel=1,0:-1]]", tokensToString(tokens.getHiddenTokensToLeft(1, -1)))
assert.Equal("[[@2,0:0=' ',<1>,channel=1,0:-1]]", tokensToString(tokens.getHiddenTokensToRight(1, -1)))
assert.Nil(tokens.getHiddenTokensToLeft(2, -1))
assert.Nil(tokens.getHiddenTokensToRight(2, -1))
assert.Equal("[[@2,0:0=' ',<1>,channel=1,0:-1]]", tokensToString(tokens.getHiddenTokensToLeft(3, -1)))
assert.Nil(tokens.getHiddenTokensToRight(3, -1))
assert.Nil(tokens.getHiddenTokensToLeft(4, -1))
assert.Equal("[[@5,0:0=' ',<1>,channel=1,0:-1], [@6,0:0=' ',<1>,channel=1,0:-1]]",
tokensToString(tokens.getHiddenTokensToRight(4, -1)))
assert.Nil(tokens.getHiddenTokensToLeft(5, -1))
assert.Equal("[[@6,0:0=' ',<1>,channel=1,0:-1]]",
tokensToString(tokens.getHiddenTokensToRight(5, -1)))
assert.Equal("[[@5,0:0=' ',<1>,channel=1,0:-1]]",
tokensToString(tokens.getHiddenTokensToLeft(6, -1)))
assert.Nil(tokens.getHiddenTokensToRight(6, -1))
assert.Equal("[[@5,0:0=' ',<1>,channel=1,0:-1], [@6,0:0=' ',<1>,channel=1,0:-1]]",
tokensToString(tokens.getHiddenTokensToLeft(7, -1)))
assert.Equal("[[@8,0:0=' ',<1>,channel=1,0:-1], [@9,0:0='\\n',<1>,channel=1,0:-1]]",
tokensToString(tokens.getHiddenTokensToRight(7, -1)))
assert.Nil(tokens.getHiddenTokensToLeft(8, -1))
assert.Equal("[[@9,0:0='\\n',<1>,channel=1,0:-1]]",
tokensToString(tokens.getHiddenTokensToRight(8, -1)))
assert.Equal("[[@8,0:0=' ',<1>,channel=1,0:-1]]",
tokensToString(tokens.getHiddenTokensToLeft(9, -1)))
assert.Nil(tokens.getHiddenTokensToRight(9, -1))
}
type commonTokenStreamTestLexerSingleEOF struct {
*BaseLexer
tokens []Token
i int
}
func (l *commonTokenStreamTestLexerSingleEOF) NextToken() Token {
return newTestCommonToken(TokenEOF, "", LexerDefaultTokenChannel)
}
func TestCommonTokenStreamSingleEOF(t *testing.T) {
assert := assertNew(t)
lexEngine := &commonTokenStreamTestLexerSingleEOF{}
tokens := NewCommonTokenStream(lexEngine, TokenDefaultChannel)
tokens.Fill()
assert.Equal(TokenEOF, tokens.LA(1))
assert.Equal(0, tokens.index)
assert.Equal(1, tokens.Size())
}
func TestCommonTokenStreamCannotConsumeEOF(t *testing.T) {
assert := assertNew(t)
lexEngine := &commonTokenStreamTestLexerSingleEOF{}
tokens := NewCommonTokenStream(lexEngine, TokenDefaultChannel)
tokens.Fill()
assert.Equal(TokenEOF, tokens.LA(1))
assert.Equal(0, tokens.index)
assert.Equal(1, tokens.Size())
assert.Panics(tokens.Consume)
}

View File

@ -21,11 +21,11 @@ type Lexer interface {
Emit() Token
setChannel(int)
pushMode(int)
popMode() int
setType(int)
setMode(int)
SetChannel(int)
PushMode(int)
PopMode() int
SetType(int)
SetMode(int)
}
type BaseLexer struct {
@ -150,7 +150,7 @@ func (b *BaseLexer) GetSourceName() string {
return b.GrammarFileName
}
func (b *BaseLexer) setChannel(v int) {
func (b *BaseLexer) SetChannel(v int) {
b.channel = v
}
@ -250,11 +250,11 @@ func (b *BaseLexer) More() {
b.thetype = LexerMore
}
func (b *BaseLexer) setMode(m int) {
func (b *BaseLexer) SetMode(m int) {
b.mode = m
}
func (b *BaseLexer) pushMode(m int) {
func (b *BaseLexer) PushMode(m int) {
if LexerATNSimulatorDebug {
fmt.Println("pushMode " + strconv.Itoa(m))
}
@ -262,7 +262,7 @@ func (b *BaseLexer) pushMode(m int) {
b.mode = m
}
func (b *BaseLexer) popMode() int {
func (b *BaseLexer) PopMode() int {
if len(b.modeStack) == 0 {
panic("Empty Stack")
}
@ -331,7 +331,7 @@ func (b *BaseLexer) GetType() int {
return b.thetype
}
func (b *BaseLexer) setType(t int) {
func (b *BaseLexer) SetType(t int) {
b.thetype = t
}
@ -361,7 +361,7 @@ func (b *BaseLexer) GetATN() *ATN {
// Return a list of all Token objects in input char stream.
// Forces load of all tokens. Does not include EOF token.
// /
func (b *BaseLexer) getAllTokens() []Token {
func (b *BaseLexer) GetAllTokens() []Token {
vl := b.Virt
tokens := make([]Token, 0)
t := vl.NextToken()

View File

@ -101,7 +101,7 @@ func NewLexerTypeAction(thetype int) *LexerTypeAction {
}
func (l *LexerTypeAction) execute(lexer Lexer) {
lexer.setType(l.thetype)
lexer.SetType(l.thetype)
}
func (l *LexerTypeAction) hash() int {
@ -145,7 +145,7 @@ func NewLexerPushModeAction(mode int) *LexerPushModeAction {
// <p>This action is implemented by calling {@link Lexer//pushMode} with the
// value provided by {@link //getMode}.</p>
func (l *LexerPushModeAction) execute(lexer Lexer) {
lexer.pushMode(l.mode)
lexer.PushMode(l.mode)
}
func (l *LexerPushModeAction) hash() int {
@ -190,7 +190,7 @@ var LexerPopModeActionINSTANCE = NewLexerPopModeAction()
// <p>This action is implemented by calling {@link Lexer//popMode}.</p>
func (l *LexerPopModeAction) execute(lexer Lexer) {
lexer.popMode()
lexer.PopMode()
}
func (l *LexerPopModeAction) String() string {
@ -242,7 +242,7 @@ func NewLexerModeAction(mode int) *LexerModeAction {
// <p>This action is implemented by calling {@link Lexer//mode} with the
// value provided by {@link //getMode}.</p>
func (l *LexerModeAction) execute(lexer Lexer) {
lexer.setMode(l.mode)
lexer.SetMode(l.mode)
}
func (l *LexerModeAction) hash() int {
@ -341,7 +341,7 @@ func NewLexerChannelAction(channel int) *LexerChannelAction {
// <p>This action is implemented by calling {@link Lexer//setChannel} with the
// value provided by {@link //getChannel}.</p>
func (l *LexerChannelAction) execute(lexer Lexer) {
lexer.setChannel(l.channel)
lexer.SetChannel(l.channel)
}
func (l *LexerChannelAction) hash() int {

View File

@ -0,0 +1,98 @@
// Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.
// Use of this file is governed by the BSD 3-clause license that
// can be found in the LICENSE.txt file in the project root.
// These assert functions are borrowed from https://github.com/stretchr/testify/ (MIT License)
package antlr
import (
"fmt"
"reflect"
"testing"
)
type assert struct {
t *testing.T
}
func assertNew(t *testing.T) *assert {
return &assert{
t: t,
}
}
func (a *assert) Equal(expected, actual interface{}) bool {
if !objectsAreEqual(expected, actual) {
return a.Fail(fmt.Sprintf("Not equal:\n"+
"expected: %#v\n"+
" actual: %#v\n", expected, actual))
}
return true
}
func objectsAreEqual(expected, actual interface{}) bool {
if expected == nil || actual == nil {
return expected == actual
}
return reflect.DeepEqual(expected, actual)
}
func (a *assert) Nil(object interface{}) bool {
if isNil(object) {
return true
}
return a.Fail(fmt.Sprintf("Expected nil, but got: %#v", object))
}
func (a *assert) NotNil(object interface{}) bool {
if !isNil(object) {
return true
}
return a.Fail("Expected value not to be nil.")
}
// isNil checks if a specified object is nil or not, without Failing.
func isNil(object interface{}) bool {
if object == nil {
return true
}
value := reflect.ValueOf(object)
kind := value.Kind()
if kind >= reflect.Chan && kind <= reflect.Slice && value.IsNil() {
return true
}
return false
}
func (a *assert) Panics(f func()) bool {
if funcDidPanic, panicValue := didPanic(f); !funcDidPanic {
return a.Fail(fmt.Sprintf("func %#v should panic\n\r\tPanic value:\t%v", f, panicValue))
}
return true
}
// Fail reports a failure through
func (a *assert) Fail(failureMessage string) bool {
a.t.Errorf("%s", failureMessage)
return false
}
// didPanic returns true if the function passed to it panics. Otherwise, it returns false.
func didPanic(f func()) (bool, interface{}) {
didPanic := false
var message interface{}
func() {
defer func() {
if message = recover(); message != nil {
didPanic = true
}
}()
// call the target function
f()
}()
return didPanic, message
}

View File

@ -0,0 +1,107 @@
// Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.
// Use of this file is governed by the BSD 3-clause license that
// can be found in the LICENSE.txt file in the project root.
package antlr
/*
LexerB is a lexer for testing purpose.
This file is generated from this grammer.
lexer grammar LexerB;
ID : 'a'..'z'+;
INT : '0'..'9'+;
SEMI : ';';
ASSIGN : '=';
PLUS : '+';
MULT : '*';
WS : ' '+;
*/
var lexerB_serializedLexerAtn = []uint16{
3, 24715, 42794, 33075, 47597, 16764, 15335, 30598, 22884, 2, 9, 40, 8,
1, 4, 2, 9, 2, 4, 3, 9, 3, 4, 4, 9, 4, 4, 5, 9, 5, 4, 6, 9, 6, 4, 7, 9,
7, 4, 8, 9, 8, 3, 2, 6, 2, 19, 10, 2, 13, 2, 14, 2, 20, 3, 3, 6, 3, 24,
10, 3, 13, 3, 14, 3, 25, 3, 4, 3, 4, 3, 5, 3, 5, 3, 6, 3, 6, 3, 7, 3, 7,
3, 8, 6, 8, 37, 10, 8, 13, 8, 14, 8, 38, 2, 2, 9, 3, 3, 5, 4, 7, 5, 9,
6, 11, 7, 13, 8, 15, 9, 3, 2, 2, 2, 42, 2, 3, 3, 2, 2, 2, 2, 5, 3, 2, 2,
2, 2, 7, 3, 2, 2, 2, 2, 9, 3, 2, 2, 2, 2, 11, 3, 2, 2, 2, 2, 13, 3, 2,
2, 2, 2, 15, 3, 2, 2, 2, 3, 18, 3, 2, 2, 2, 5, 23, 3, 2, 2, 2, 7, 27, 3,
2, 2, 2, 9, 29, 3, 2, 2, 2, 11, 31, 3, 2, 2, 2, 13, 33, 3, 2, 2, 2, 15,
36, 3, 2, 2, 2, 17, 19, 4, 99, 124, 2, 18, 17, 3, 2, 2, 2, 19, 20, 3, 2,
2, 2, 20, 18, 3, 2, 2, 2, 20, 21, 3, 2, 2, 2, 21, 4, 3, 2, 2, 2, 22, 24,
4, 50, 59, 2, 23, 22, 3, 2, 2, 2, 24, 25, 3, 2, 2, 2, 25, 23, 3, 2, 2,
2, 25, 26, 3, 2, 2, 2, 26, 6, 3, 2, 2, 2, 27, 28, 7, 61, 2, 2, 28, 8, 3,
2, 2, 2, 29, 30, 7, 63, 2, 2, 30, 10, 3, 2, 2, 2, 31, 32, 7, 45, 2, 2,
32, 12, 3, 2, 2, 2, 33, 34, 7, 44, 2, 2, 34, 14, 3, 2, 2, 2, 35, 37, 7,
34, 2, 2, 36, 35, 3, 2, 2, 2, 37, 38, 3, 2, 2, 2, 38, 36, 3, 2, 2, 2, 38,
39, 3, 2, 2, 2, 39, 16, 3, 2, 2, 2, 6, 2, 20, 25, 38, 2,
}
var lexerB_lexerDeserializer = NewATNDeserializer(nil)
var lexerB_lexerAtn = lexerB_lexerDeserializer.DeserializeFromUInt16(lexerB_serializedLexerAtn)
var lexerB_lexerChannelNames = []string{
"DEFAULT_TOKEN_CHANNEL", "HIDDEN",
}
var lexerB_lexerModeNames = []string{
"DEFAULT_MODE",
}
var lexerB_lexerLiteralNames = []string{
"", "", "", "';'", "'='", "'+'", "'*'",
}
var lexerB_lexerSymbolicNames = []string{
"", "ID", "INT", "SEMI", "ASSIGN", "PLUS", "MULT", "WS",
}
var lexerB_lexerRuleNames = []string{
"ID", "INT", "SEMI", "ASSIGN", "PLUS", "MULT", "WS",
}
type LexerB struct {
*BaseLexer
channelNames []string
modeNames []string
// TODO: EOF string
}
var lexerB_lexerDecisionToDFA = make([]*DFA, len(lexerB_lexerAtn.DecisionToState))
func init() {
for index, ds := range lexerB_lexerAtn.DecisionToState {
lexerB_lexerDecisionToDFA[index] = NewDFA(ds, index)
}
}
func NewLexerB(input CharStream) *LexerB {
l := new(LexerB)
l.BaseLexer = NewBaseLexer(input)
l.Interpreter = NewLexerATNSimulator(l, lexerB_lexerAtn, lexerB_lexerDecisionToDFA, NewPredictionContextCache())
l.channelNames = lexerB_lexerChannelNames
l.modeNames = lexerB_lexerModeNames
l.RuleNames = lexerB_lexerRuleNames
l.LiteralNames = lexerB_lexerLiteralNames
l.SymbolicNames = lexerB_lexerSymbolicNames
l.GrammarFileName = "LexerB.g4"
// TODO: l.EOF = TokenEOF
return l
}
// LexerB tokens.
const (
LexerBID = 1
LexerBINT = 2
LexerBSEMI = 3
LexerBASSIGN = 4
LexerBPLUS = 5
LexerBMULT = 6
LexerBWS = 7
)

View File

@ -0,0 +1,30 @@
package antlr
import (
"fmt"
"strings"
)
// newTestCommonToken create common token with tokentype, text and channel
// notice: test purpose only
func newTestCommonToken(tokenType int, text string, channel int) *CommonToken {
t := new(CommonToken)
t.BaseToken = new(BaseToken)
t.tokenType = tokenType
t.channel = channel
t.text = text
t.line = 0
t.column = -1
return t
}
// tokensToString returnes []Tokens string
// notice: test purpose only
func tokensToString(tokens []Token) string {
buf := make([]string, len(tokens))
for i, token := range tokens {
buf[i] = fmt.Sprintf("%v", token)
}
return "[" + strings.Join(buf, ", ") + "]"
}

View File

@ -353,12 +353,11 @@ func PrintArrayJavaStyle(sa []string) string {
return buffer.String()
}
// murmur hash
const (
c1_32 = 0xCC9E2D51
c2_32 = 0x1B873593
n1_32 = 0xE6546B64
c1_32 uint = 0xCC9E2D51
c2_32 uint = 0x1B873593
n1_32 uint = 0xE6546B64
)
func murmurInit(seed int) int {
@ -366,23 +365,25 @@ func murmurInit(seed int) int {
}
func murmurUpdate(h1 int, k1 int) int {
k1 *= c1_32
k1 = (k1 << 15) | (k1 >> 17) // rotl32(k1, 15)
k1 *= c2_32
var k1u uint
k1u = uint(k1) * c1_32
k1u = (k1u << 15) | (k1u >> 17) // rotl32(k1u, 15)
k1u *= c2_32
h1 ^= k1
h1 = (h1 << 13) | (h1 >> 19) // rotl32(h1, 13)
h1 = h1*5 + 0xe6546b64
return h1
var h1u = uint(h1) ^ k1u
h1u = (h1u << 13) | (h1u >> 19) // rotl32(h1u, 13)
h1u = h1u*5 + 0xe6546b64
return int(h1u)
}
func murmurFinish(h1 int, numberOfWords int) int {
h1 ^= (numberOfWords * 4)
h1 ^= h1 >> 16
h1 *= 0x85ebca6b
h1 ^= h1 >> 13
h1 *= 0xc2b2ae35
h1 ^= h1 >> 16
var h1u uint = uint(h1)
h1u ^= uint(numberOfWords * 4)
h1u ^= h1u >> 16
h1u *= uint(0x85ebca6b)
h1u ^= h1u >> 13
h1u *= 0xc2b2ae35
h1u ^= h1u >> 16
return h1
return int(h1u)
}

View File

@ -27,6 +27,7 @@
<plugin> <!-- create src jar -->
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-source-plugin</artifactId>
<version>3.0.1</version>
<executions>
<execution>
<goals>

View File

@ -151,8 +151,8 @@ public abstract class CodePointCharStream implements CharStream {
/** Return the UTF-16 encoded string for the given interval */
@Override
public String getText(Interval interval) {
int startIdx = Math.min(interval.a, size - 1);
int len = Math.min(interval.b - interval.a + 1, size);
int startIdx = Math.min(interval.a, size);
int len = Math.min(interval.b - interval.a + 1, size - startIdx);
// We know the maximum code point in byteArray is U+00FF,
// so we can treat this as if it were ISO-8859-1, aka Latin-1,

View File

@ -270,7 +270,7 @@ public class ParserATNSimulator extends ATNSimulator {
public static final boolean retry_debug = false;
/** Just in case this optimization is bad, add an ENV variable to turn it off */
public static final boolean TURN_OFF_LR_LOOP_ENTRY_BRANCH_OPT = Boolean.parseBoolean(System.getenv("TURN_OFF_LR_LOOP_ENTRY_BRANCH_OPT"));
public static final boolean TURN_OFF_LR_LOOP_ENTRY_BRANCH_OPT = Boolean.parseBoolean(getSafeEnv("TURN_OFF_LR_LOOP_ENTRY_BRANCH_OPT"));
protected final Parser parser;
@ -1541,11 +1541,6 @@ public class ParserATNSimulator extends ATNSimulator {
ATNConfig c = getEpsilonTarget(config, t, continueCollecting,
depth == 0, fullCtx, treatEofAsEpsilon);
if ( c!=null ) {
if (!t.isEpsilon() && !closureBusy.add(c)) {
// avoid infinite recursion for EOF* and EOF+
continue;
}
int newDepth = depth;
if ( config.state instanceof RuleStopState) {
assert !fullCtx;
@ -1555,11 +1550,6 @@ public class ParserATNSimulator extends ATNSimulator {
// come in handy and we avoid evaluating context dependent
// preds if this is > 0.
if (!closureBusy.add(c)) {
// avoid infinite recursion for right-recursive rules
continue;
}
if (_dfa != null && _dfa.isPrecedenceDfa()) {
int outermostPrecedenceReturn = ((EpsilonTransition)t).outermostPrecedenceReturn();
if (outermostPrecedenceReturn == _dfa.atnStartState.ruleIndex) {
@ -1568,15 +1558,28 @@ public class ParserATNSimulator extends ATNSimulator {
}
c.reachesIntoOuterContext++;
if (!closureBusy.add(c)) {
// avoid infinite recursion for right-recursive rules
continue;
}
configs.dipsIntoOuterContext = true; // TODO: can remove? only care when we add to set per middle of this method
assert newDepth > Integer.MIN_VALUE;
newDepth--;
if ( debug ) System.out.println("dips into outer ctx: "+c);
}
else if (t instanceof RuleTransition) {
// latch when newDepth goes negative - once we step out of the entry context we can't return
if (newDepth >= 0) {
newDepth++;
else {
if (!t.isEpsilon() && !closureBusy.add(c)) {
// avoid infinite recursion for EOF* and EOF+
continue;
}
if (t instanceof RuleTransition) {
// latch when newDepth goes negative - once we step out of the entry context we can't return
if (newDepth >= 0) {
newDepth++;
}
}
}
@ -2178,4 +2181,14 @@ public class ParserATNSimulator extends ATNSimulator {
public Parser getParser() {
return parser;
}
public static String getSafeEnv(String envName) {
try {
return System.getenv(envName);
}
catch(SecurityException e) {
// use the default value
}
return null;
}
}

View File

@ -401,11 +401,11 @@ DoubleDict.prototype.set = function (a, b, o) {
function escapeWhitespace(s, escapeSpaces) {
s = s.replace("\t", "\\t");
s = s.replace("\n", "\\n");
s = s.replace("\r", "\\r");
s = s.replace(/\t/g, "\\t")
.replace(/\n/g, "\\n")
.replace(/\r/g, "\\r");
if (escapeSpaces) {
s = s.replace(" ", "\u00B7");
s = s.replace(/ /g, "\u00B7");
}
return s;
}
@ -443,4 +443,4 @@ exports.hashStuff = hashStuff;
exports.escapeWhitespace = escapeWhitespace;
exports.arrayToString = arrayToString;
exports.titleCase = titleCase;
exports.equalArrays = equalArrays;
exports.equalArrays = equalArrays;

View File

@ -218,6 +218,13 @@ class Parser (Recognizer):
self._ctx.exitRule(listener)
listener.exitEveryRule(self._ctx)
# Gets the number of syntax errors reported during parsing. This value is
# incremented each time {@link #notifyErrorListeners} is called.
#
# @see #notifyErrorListeners
#
def getNumberOfSyntaxErrors(self):
return self._syntaxErrors
def getTokenFactory(self):
return self._input.tokenSource._factory

View File

@ -36,14 +36,13 @@ class RuleTagToken(Token):
self.tokenIndex = -1 # from 0..n-1 of the token object in the input stream
self.line = 0 # line=1..n of the 1st character
self.column = -1 # beginning of the line at which it occurs, 0..n-1
self.label = label
self.label = unicode(label)
self._text = self.getText() # text of the token.
self.ruleName = ruleName
self.ruleName = unicode(ruleName)
def getText(self):
if self.label is None:
return "<" + self.ruleName + ">"
return u"<" + self.ruleName + u">"
else:
return "<" + self.label + ":" + self.ruleName + ">"
return u"<" + self.label + ":" + self.ruleName + u">"

View File

@ -24,8 +24,8 @@ class TokenTagToken(CommonToken):
#
def __init__(self, tokenName, type, label=None):
super(TokenTagToken, self).__init__(type=type)
self.tokenName = tokenName
self.label = label
self.tokenName = unicode(tokenName)
self.label = unicode(label)
self._text = self.getText()
#
@ -36,9 +36,9 @@ class TokenTagToken(CommonToken):
#
def getText(self):
if self.label is None:
return "<" + self.tokenName + ">"
return u"<" + self.tokenName + u">"
else:
return "<" + self.label + ":" + self.tokenName + ">"
return u"<" + self.label + u":" + self.tokenName + u">"
# <p>The implementation for {@link TokenTagToken} returns a string of the form
# {@code tokenName:type}.</p>

View File

@ -108,13 +108,13 @@ class TerminalNodeImpl(TerminalNode):
return visitor.visitTerminal(self)
def getText(self):
return self.symbol.text
return unicode(self.symbol.text)
def __unicode__(self):
if self.symbol.type == Token.EOF:
return "<EOF>"
return u"<EOF>"
else:
return self.symbol.text
return unicode(self.symbol.text)
# Represents a token that was consumed during resynchronization
# rather than during a valid match operation. For example,

View File

@ -227,6 +227,14 @@ class Parser (Recognizer):
listener.exitEveryRule(self._ctx)
# Gets the number of syntax errors reported during parsing. This value is
# incremented each time {@link #notifyErrorListeners} is called.
#
# @see #notifyErrorListeners
#
def getNumberOfSyntaxErrors(self):
return self._syntaxErrors
def getTokenFactory(self):
return self._input.tokenSource._factory

View File

@ -12,7 +12,7 @@ from antlr4.atn.LexerATNSimulator import LexerATNSimulator
from antlr4.atn.ParserATNSimulator import ParserATNSimulator
from antlr4.atn.PredictionMode import PredictionMode
from antlr4.PredictionContext import PredictionContextCache
from antlr4.ParserRuleContext import ParserRuleContext
from antlr4.ParserRuleContext import RuleContext, ParserRuleContext
from antlr4.tree.Tree import ParseTreeListener, ParseTreeVisitor, ParseTreeWalker, TerminalNode, ErrorNode, RuleNode
from antlr4.error.Errors import RecognitionException, IllegalStateException, NoViableAltException
from antlr4.error.ErrorStrategy import BailErrorStrategy

View File

@ -1 +1,4 @@
.build/
Antlr4.xcodeproj/
Tests/Antlr4Tests/gen/
xcuserdata/

View File

@ -1,3 +1,4 @@
// swift-tools-version:4.0
// Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.
// Use of this file is governed by the BSD 3-clause license that
// can be found in the LICENSE.txt file in the project root.
@ -5,15 +6,19 @@
import PackageDescription
let package = Package(
name: "Antlr4"
)
products.append(
Product(
name: "Antlr4",
type: .Library(.Dynamic),
modules: [
"Antlr4"
]
)
name: "Antlr4",
products: [
.library(
name: "Antlr4",
type: .dynamic,
targets: ["Antlr4"]),
],
targets: [
.target(
name: "Antlr4",
dependencies: []),
.testTarget(
name: "Antlr4Tests",
dependencies: ["Antlr4"]),
]
)

View File

@ -39,12 +39,12 @@ public protocol ANTLRErrorListener: class {
/// the parser was able to recover in line without exiting the
/// surrounding rule.
///
func syntaxError<T:ATNSimulator>(_ recognizer: Recognizer<T>,
_ offendingSymbol: AnyObject?,
_ line: Int,
_ charPositionInLine: Int,
_ msg: String,
_ e: AnyObject?
func syntaxError<T>(_ recognizer: Recognizer<T>,
_ offendingSymbol: AnyObject?,
_ line: Int,
_ charPositionInLine: Int,
_ msg: String,
_ e: AnyObject?
)
///
@ -93,7 +93,7 @@ public protocol ANTLRErrorListener: class {
_ stopIndex: Int,
_ exact: Bool,
_ ambigAlts: BitSet,
_ configs: ATNConfigSet) throws
_ configs: ATNConfigSet)
///
/// This method is called when an SLL conflict occurs and the parser is about
@ -123,7 +123,7 @@ public protocol ANTLRErrorListener: class {
_ startIndex: Int,
_ stopIndex: Int,
_ conflictingAlts: BitSet?,
_ configs: ATNConfigSet) throws
_ configs: ATNConfigSet)
///
/// This method is called by the parser when a full-context prediction has a
@ -168,5 +168,5 @@ public protocol ANTLRErrorListener: class {
_ startIndex: Int,
_ stopIndex: Int,
_ prediction: Int,
_ configs: ATNConfigSet) throws
_ configs: ATNConfigSet)
}

View File

@ -63,7 +63,7 @@ public protocol ANTLRErrorStrategy {
/// - throws: _RecognitionException_ if the error strategy could not recover from
/// the recognition exception
///
func recover(_ recognizer: Parser, _ e: AnyObject) throws
func recover(_ recognizer: Parser, _ e: RecognitionException) throws
///
/// This method provides the error handler with an opportunity to handle
@ -115,5 +115,5 @@ public protocol ANTLRErrorStrategy {
/// - parameter recognizer: the parser instance
/// - parameter e: the recognition exception to report
///
func reportError(_ recognizer: Parser, _ e: AnyObject)
func reportError(_ recognizer: Parser, _ e: RecognitionException)
}

View File

@ -40,14 +40,14 @@ public class BailErrorStrategy: DefaultErrorStrategy {
/// rule function catches. Use _Exception#getCause()_ to get the
/// original _org.antlr.v4.runtime.RecognitionException_.
///
override public func recover(_ recognizer: Parser, _ e: AnyObject) throws {
var context: ParserRuleContext? = recognizer.getContext()
while let contextWrap = context{
override public func recover(_ recognizer: Parser, _ e: RecognitionException) throws {
var context = recognizer.getContext()
while let contextWrap = context {
contextWrap.exception = e
context = (contextWrap.getParent() as? ParserRuleContext)
}
throw ANTLRException.recognition(e: e)
throw ANTLRException.recognition(e: e)
}
///
@ -56,15 +56,14 @@ public class BailErrorStrategy: DefaultErrorStrategy {
///
override
public func recoverInline(_ recognizer: Parser) throws -> Token {
let e: InputMismatchException = try InputMismatchException(recognizer)
var context: ParserRuleContext? = recognizer.getContext()
let e = InputMismatchException(recognizer)
var context = recognizer.getContext()
while let contextWrap = context {
contextWrap.exception = e
context = (contextWrap.getParent() as? ParserRuleContext)
}
throw ANTLRException.recognition(e: e)
throw ANTLRException.recognition(e: e)
}
///

View File

@ -17,12 +17,12 @@ open class BaseErrorListener: ANTLRErrorListener {
public init() {
}
open func syntaxError<T:ATNSimulator>(_ recognizer: Recognizer<T>,
_ offendingSymbol: AnyObject?,
_ line: Int,
_ charPositionInLine: Int,
_ msg: String,
_ e: AnyObject?
open func syntaxError<T>(_ recognizer: Recognizer<T>,
_ offendingSymbol: AnyObject?,
_ line: Int,
_ charPositionInLine: Int,
_ msg: String,
_ e: AnyObject?
) {
}
@ -33,7 +33,7 @@ open class BaseErrorListener: ANTLRErrorListener {
_ stopIndex: Int,
_ exact: Bool,
_ ambigAlts: BitSet,
_ configs: ATNConfigSet) throws {
_ configs: ATNConfigSet) {
}
@ -42,7 +42,7 @@ open class BaseErrorListener: ANTLRErrorListener {
_ startIndex: Int,
_ stopIndex: Int,
_ conflictingAlts: BitSet?,
_ configs: ATNConfigSet) throws {
_ configs: ATNConfigSet) {
}
@ -51,6 +51,6 @@ open class BaseErrorListener: ANTLRErrorListener {
_ startIndex: Int,
_ stopIndex: Int,
_ prediction: Int,
_ configs: ATNConfigSet) throws {
_ configs: ATNConfigSet) {
}
}

View File

@ -176,7 +176,7 @@ public class BufferedTokenStream: TokenStream {
let index = tokens.count - 1
throw ANTLRError.indexOutOfBounds(msg: "token index \(i) out of range 0..\(index)")
}
return tokens[i] //tokens[i]
return tokens[i]
}
///
@ -202,7 +202,6 @@ public class BufferedTokenStream: TokenStream {
return subset
}
//TODO: LT(i)!.getType();
public func LA(_ i: Int) throws -> Int {
return try LT(i)!.getType()
}
@ -273,11 +272,11 @@ public class BufferedTokenStream: TokenStream {
fetchedEOF = false
}
public func getTokens() -> Array<Token> {
public func getTokens() -> [Token] {
return tokens
}
public func getTokens(_ start: Int, _ stop: Int) throws -> Array<Token>? {
public func getTokens(_ start: Int, _ stop: Int) throws -> [Token]? {
return try getTokens(start, stop, nil)
}
@ -286,40 +285,36 @@ public class BufferedTokenStream: TokenStream {
/// the token type BitSet. Return null if no tokens were found. This
/// method looks at both on and off channel tokens.
///
public func getTokens(_ start: Int, _ stop: Int, _ types: Set<Int>?) throws -> Array<Token>? {
public func getTokens(_ start: Int, _ stop: Int, _ types: Set<Int>?) throws -> [Token]? {
try lazyInit()
if start < 0 || stop >= tokens.count ||
stop < 0 || start >= tokens.count {
throw ANTLRError.indexOutOfBounds(msg: "start \(start) or stop \(stop) not in 0..\(tokens.count - 1)")
if start < 0 || start >= tokens.count ||
stop < 0 || stop >= tokens.count {
throw ANTLRError.indexOutOfBounds(msg: "start \(start) or stop \(stop) not in 0...\(tokens.count - 1)")
}
if start > stop {
return nil
}
var filteredTokens: Array<Token> = Array<Token>()
var filteredTokens = [Token]()
for i in start...stop {
let t: Token = tokens[i]
if let types = types , !types.contains(t.getType()) {
}else {
let t = tokens[i]
if let types = types, !types.contains(t.getType()) {
}
else {
filteredTokens.append(t)
}
}
if filteredTokens.isEmpty {
return nil
//filteredTokens = nil;
}
return filteredTokens
}
public func getTokens(_ start: Int, _ stop: Int, _ ttype: Int) throws -> Array<Token>? {
//TODO Set<Int> initialCapacity
var s: Set<Int> = Set<Int>()
public func getTokens(_ start: Int, _ stop: Int, _ ttype: Int) throws -> [Token]? {
var s = Set<Int>()
s.insert(ttype)
//s.append(ttype);
return try getTokens(start, stop, s)
return try getTokens(start, stop, s)
}
///
@ -464,7 +459,7 @@ public class BufferedTokenStream: TokenStream {
}
}
}
if hidden.count == 0 {
if hidden.isEmpty {
return nil
}
return hidden

View File

@ -19,12 +19,11 @@ public protocol CharStream: IntStream {
/// - parameter interval: an interval within the stream
/// - returns: the text of the specified interval
///
/// - throws: _ANTLRError.nullPointer_ if `interval` is `null`
/// - throws: _ANTLRError.illegalArgument_ if `interval.a < 0`, or if
/// `interval.b < interval.a - 1`, or if `interval.b` lies at or
/// past the end of the stream
/// - throws: _ANTLRError.unsupportedOperation_ if the stream does not support
/// getting the text of the specified interval
///
func getText(_ interval: Interval) -> String
func getText(_ interval: Interval) throws -> String
}

View File

@ -7,12 +7,6 @@
public class CommonToken: WritableToken {
///
/// An empty _org.antlr.v4.runtime.misc.Pair_ which is used as the default value of
/// _#source_ for tokens that do not have a source.
///
internal static let EMPTY_SOURCE: (TokenSource?, CharStream?) = (nil, nil)
///
/// This is the backing field for _#getType_ and _#setType_.
///
@ -21,20 +15,20 @@ public class CommonToken: WritableToken {
///
/// This is the backing field for _#getLine_ and _#setLine_.
///
internal var line: Int = 0
internal var line = 0
///
/// This is the backing field for _#getCharPositionInLine_ and
/// _#setCharPositionInLine_.
///
internal var charPositionInLine: Int = -1
internal var charPositionInLine = -1
// set to invalid position
///
/// This is the backing field for _#getChannel_ and
/// _#setChannel_.
///
internal var channel: Int = DEFAULT_CHANNEL
internal var channel = DEFAULT_CHANNEL
///
/// This is the backing field for _#getTokenSource_ and
@ -47,7 +41,7 @@ public class CommonToken: WritableToken {
/// _org.antlr.v4.runtime.misc.Pair_ containing these values.
///
internal var source: (TokenSource?, CharStream?)
internal let source: TokenSourceAndStream
///
/// This is the backing field for _#getText_ when the token text is
@ -61,19 +55,19 @@ public class CommonToken: WritableToken {
/// This is the backing field for _#getTokenIndex_ and
/// _#setTokenIndex_.
///
internal var index: Int = -1
internal var index = -1
///
/// This is the backing field for _#getStartIndex_ and
/// _#setStartIndex_.
///
internal var start: Int = 0
internal var start = 0
///
/// This is the backing field for _#getStopIndex_ and
/// _#setStopIndex_.
///
internal var stop: Int = 0
internal var stop = 0
///
/// Constructs a new _org.antlr.v4.runtime.CommonToken_ with the specified token type.
@ -85,16 +79,16 @@ public class CommonToken: WritableToken {
public init(_ type: Int) {
self.type = type
self.source = CommonToken.EMPTY_SOURCE
self.source = TokenSourceAndStream.EMPTY
}
public init(_ source: (TokenSource?, CharStream?), _ type: Int, _ channel: Int, _ start: Int, _ stop: Int) {
public init(_ source: TokenSourceAndStream, _ type: Int, _ channel: Int, _ start: Int, _ stop: Int) {
self.source = source
self.type = type
self.channel = channel
self.start = start
self.stop = stop
if let tsource = source.0 {
if let tsource = source.tokenSource {
self.line = tsource.getLine()
self.charPositionInLine = tsource.getCharPositionInLine()
}
@ -111,20 +105,12 @@ public class CommonToken: WritableToken {
self.type = type
self.channel = CommonToken.DEFAULT_CHANNEL
self.text = text
self.source = CommonToken.EMPTY_SOURCE
self.source = TokenSourceAndStream.EMPTY
}
///
/// Constructs a new _org.antlr.v4.runtime.CommonToken_ as a copy of another _org.antlr.v4.runtime.Token_.
///
///
/// If `oldToken` is also a _org.antlr.v4.runtime.CommonToken_ instance, the newly
/// constructed token will share a reference to the _#text_ field and
/// the _org.antlr.v4.runtime.misc.Pair_ stored in _#source_. Otherwise, _#text_ will
/// be assigned the result of calling _#getText_, and _#source_
/// will be constructed from the result of _org.antlr.v4.runtime.Token#getTokenSource_ and
/// _org.antlr.v4.runtime.Token#getInputStream_.
///
///
/// - parameter oldToken: The token to copy.
///
public init(_ oldToken: Token) {
@ -135,14 +121,8 @@ public class CommonToken: WritableToken {
channel = oldToken.getChannel()
start = oldToken.getStartIndex()
stop = oldToken.getStopIndex()
if oldToken is CommonToken {
text = (oldToken as! CommonToken).text
source = (oldToken as! CommonToken).source
} else {
text = oldToken.getText()
source = (oldToken.getTokenSource(), oldToken.getInputStream())
}
text = oldToken.getText()
source = oldToken.getTokenSourceAndStream()
}
@ -157,14 +137,19 @@ public class CommonToken: WritableToken {
public func getText() -> String? {
if text != nil {
return text!
if let text = text {
return text
}
if let input = getInputStream() {
let n: Int = input.size()
let n = input.size()
if start < n && stop < n {
return input.getText(Interval.of(start, stop))
do {
return try input.getText(Interval.of(start, stop))
}
catch {
return nil
}
} else {
return "<EOF>"
}
@ -247,12 +232,16 @@ public class CommonToken: WritableToken {
public func getTokenSource() -> TokenSource? {
return source.0
return source.tokenSource
}
public func getInputStream() -> CharStream? {
return source.1
return source.stream
}
public func getTokenSourceAndStream() -> TokenSourceAndStream {
return source
}
public var description: String {
@ -260,10 +249,8 @@ public class CommonToken: WritableToken {
}
public func toString(_ r: Recognizer<ATNSimulator>?) -> String {
var channelStr: String = ""
if channel > 0 {
channelStr = ",channel=\(channel)"
}
let channelStr = (channel > 0 ? ",channel=\(channel)" : "")
var txt: String
if let tokenText = getText() {
txt = tokenText.replacingOccurrences(of: "\n", with: "\\n")
@ -272,12 +259,16 @@ public class CommonToken: WritableToken {
} else {
txt = "<no text>"
}
var typeString = "\(type)"
let typeString: String
if let r = r {
typeString = r.getVocabulary().getDisplayName(type);
typeString = r.getVocabulary().getDisplayName(type)
}
else {
typeString = "\(type)"
}
return "[@\(getTokenIndex()),\(start):\(stop)='\(txt)',<\(typeString)>\(channelStr),\(line):\(getCharPositionInLine())]"
}
public var visited: Bool {
get {
return _visited

View File

@ -65,18 +65,17 @@ public class CommonTokenFactory: TokenFactory {
}
public func create(_ source: (TokenSource?, CharStream?), _ type: Int, _ text: String?,
public func create(_ source: TokenSourceAndStream, _ type: Int, _ text: String?,
_ channel: Int, _ start: Int, _ stop: Int,
_ line: Int, _ charPositionInLine: Int) -> Token {
let t: CommonToken = CommonToken(source, type, channel, start, stop)
let t = CommonToken(source, type, channel, start, stop)
t.setLine(line)
t.setCharPositionInLine(charPositionInLine)
if text != nil {
t.setText(text!)
} else {
if let cStream = source.1 , copyText {
t.setText(cStream.getText(Interval.of(start, stop)))
}
if let text = text {
t.setText(text)
}
else if let cStream = source.stream, copyText {
t.setText(try! cStream.getText(Interval.of(start, stop)))
}
return t

View File

@ -39,7 +39,7 @@ public class CommonTokenStream: BufferedTokenStream {
/// The default value is _org.antlr.v4.runtime.Token#DEFAULT_CHANNEL_, which matches the
/// default channel assigned to tokens created by the lexer.
///
internal var channel: Int = CommonToken.DEFAULT_CHANNEL
internal var channel = CommonToken.DEFAULT_CHANNEL
///
/// Constructs a new _org.antlr.v4.runtime.CommonTokenStream_ using the specified token
@ -77,8 +77,8 @@ public class CommonTokenStream: BufferedTokenStream {
return nil
}
var i: Int = p
var n: Int = 1
var i = p
var n = 1
// find k good tokens looking backwards
while n <= k {
// skip off-channel tokens
@ -101,8 +101,8 @@ public class CommonTokenStream: BufferedTokenStream {
if k < 0 {
return try LB(-k)
}
var i: Int = p
var n: Int = 1 // we know tokens[p] is a good one
var i = p
var n = 1 // we know tokens[p] is a good one
// find k good tokens
while n < k {
// skip off-channel tokens, but make sure to not look past EOF
@ -119,11 +119,11 @@ public class CommonTokenStream: BufferedTokenStream {
/// Count EOF just once.
///
public func getNumberOfOnChannelTokens() throws -> Int {
var n: Int = 0
var n = 0
try fill()
let length = tokens.count
for i in 0..<length {
let t: Token = tokens[i]
let t = tokens[i]
if t.getChannel() == channel {
n += 1
}

View File

@ -25,12 +25,12 @@ public class ConsoleErrorListener: BaseErrorListener {
/// line __line__:__charPositionInLine__ __msg__
///
///
override public func syntaxError<T:ATNSimulator>(_ recognizer: Recognizer<T>,
_ offendingSymbol: AnyObject?,
_ line: Int,
_ charPositionInLine: Int,
_ msg: String,
_ e: AnyObject?
override public func syntaxError<T>(_ recognizer: Recognizer<T>,
_ offendingSymbol: AnyObject?,
_ line: Int,
_ charPositionInLine: Int,
_ msg: String,
_ e: AnyObject?
) {
if Parser.ConsoleError {
errPrint("line \(line):\(charPositionInLine) \(msg)")

View File

@ -90,7 +90,7 @@ public class DefaultErrorStrategy: ANTLRErrorStrategy {
/// the exception
///
public func reportError(_ recognizer: Parser,
_ e: AnyObject) {
_ e: RecognitionException) {
// if we've already reported an error and have not matched a token
// yet successfully, don't report any errors.
if inErrorRecoveryMode(recognizer) {
@ -98,20 +98,18 @@ public class DefaultErrorStrategy: ANTLRErrorStrategy {
return // don't report spurious errors
}
beginErrorCondition(recognizer)
if (e is NoViableAltException) {
try! reportNoViableAlternative(recognizer, e as! NoViableAltException);
} else {
if (e is InputMismatchException) {
reportInputMismatch(recognizer, e as! InputMismatchException);
} else {
if (e is FailedPredicateException) {
reportFailedPredicate(recognizer, e as! FailedPredicateException);
} else {
errPrint("unknown recognition error type: " + String(describing: type(of: e)));
let re = (e as! RecognitionException<ParserATNSimulator>)
recognizer.notifyErrorListeners(re.getOffendingToken(), re.message ?? "", e);
}
}
if let nvae = e as? NoViableAltException {
reportNoViableAlternative(recognizer, nvae)
}
else if let ime = e as? InputMismatchException {
reportInputMismatch(recognizer, ime)
}
else if let fpe = e as? FailedPredicateException {
reportFailedPredicate(recognizer, fpe)
}
else {
errPrint("unknown recognition error type: " + String(describing: type(of: e)))
recognizer.notifyErrorListeners(e.getOffendingToken(), e.message ?? "", e)
}
}
@ -120,7 +118,7 @@ public class DefaultErrorStrategy: ANTLRErrorStrategy {
/// until we find one in the resynchronization set--loosely the set of tokens
/// that can follow the current rule.
///
public func recover(_ recognizer: Parser, _ e: AnyObject) throws {
public func recover(_ recognizer: Parser, _ e: RecognitionException) throws {
// print("recover in "+recognizer.getRuleInvocationStack()+
// " index="+getTokenStream(recognizer).index()+
// ", lastErrorIndex="+
@ -140,10 +138,10 @@ public class DefaultErrorStrategy: ANTLRErrorStrategy {
}
lastErrorIndex = getTokenStream(recognizer).index()
if lastErrorStates == nil {
lastErrorStates = try IntervalSet()
lastErrorStates = IntervalSet()
}
try lastErrorStates!.add(recognizer.getState())
let followSet: IntervalSet = try getErrorRecoverySet(recognizer)
let followSet = getErrorRecoverySet(recognizer)
try consumeUntil(recognizer, followSet)
}
@ -195,24 +193,19 @@ public class DefaultErrorStrategy: ANTLRErrorStrategy {
///
public func sync(_ recognizer: Parser) throws {
let s: ATNState = recognizer.getInterpreter().atn.states[recognizer.getState()]!
let s = recognizer.getInterpreter().atn.states[recognizer.getState()]!
// errPrint("sync @ "+s.stateNumber+"="+s.getClass().getSimpleName());
// If already recovering, don't try to sync
if inErrorRecoveryMode(recognizer) {
return
}
let tokens: TokenStream = getTokenStream(recognizer)
let la: Int = try tokens.LA(1)
let tokens = getTokenStream(recognizer)
let la = try tokens.LA(1)
// try cheaper subset first; might get lucky. seems to shave a wee bit off
//let set : IntervalSet = recognizer.getATN().nextTokens(s)
if try recognizer.getATN().nextTokens(s).contains(CommonToken.EPSILON) {
return
}
if try recognizer.getATN().nextTokens(s).contains(la) {
let nextToks = recognizer.getATN().nextTokens(s)
if nextToks.contains(CommonToken.EPSILON) || nextToks.contains(la) {
return
}
@ -225,15 +218,14 @@ public class DefaultErrorStrategy: ANTLRErrorStrategy {
if try singleTokenDeletion(recognizer) != nil {
return
}
throw try ANTLRException.recognition(e: InputMismatchException(recognizer))
throw ANTLRException.recognition(e: InputMismatchException(recognizer))
case ATNState.PLUS_LOOP_BACK: fallthrough
case ATNState.STAR_LOOP_BACK:
// errPrint("at loop back: "+s.getClass().getSimpleName());
try reportUnwantedToken(recognizer)
let expecting: IntervalSet = try recognizer.getExpectedTokens()
let whatFollowsLoopIterationOrRule: IntervalSet =
try expecting.or(try getErrorRecoverySet(recognizer)) as! IntervalSet
reportUnwantedToken(recognizer)
let expecting = try recognizer.getExpectedTokens()
let whatFollowsLoopIterationOrRule = expecting.or(getErrorRecoverySet(recognizer)) as! IntervalSet
try consumeUntil(recognizer, whatFollowsLoopIterationOrRule)
break
@ -253,19 +245,21 @@ public class DefaultErrorStrategy: ANTLRErrorStrategy {
/// - parameter e: the recognition exception
///
internal func reportNoViableAlternative(_ recognizer: Parser,
_ e: NoViableAltException) throws {
let tokens: TokenStream? = getTokenStream(recognizer)
_ e: NoViableAltException) {
let tokens = getTokenStream(recognizer)
var input: String
if let tokens = tokens {
if e.getStartToken().getType() == CommonToken.EOF {
input = "<EOF>"
} else {
if e.getStartToken().getType() == CommonToken.EOF {
input = "<EOF>"
}
else {
do {
input = try tokens.getText(e.getStartToken(), e.getOffendingToken())
}
} else {
input = "<unknown input>"
catch {
input = "<unknown>"
}
}
let msg: String = "no viable alternative at input " + escapeWSAndQuote(input)
let msg = "no viable alternative at input " + escapeWSAndQuote(input)
recognizer.notifyErrorListeners(e.getOffendingToken(), msg, e)
}
@ -280,7 +274,7 @@ public class DefaultErrorStrategy: ANTLRErrorStrategy {
///
internal func reportInputMismatch(_ recognizer: Parser,
_ e: InputMismatchException) {
let msg: String = "mismatched input " + getTokenErrorDisplay(e.getOffendingToken()) +
let msg = "mismatched input " + getTokenErrorDisplay(e.getOffendingToken()) +
" expecting " + e.getExpectedTokens()!.toString(recognizer.getVocabulary())
recognizer.notifyErrorListeners(e.getOffendingToken(), msg, e)
}
@ -296,8 +290,8 @@ public class DefaultErrorStrategy: ANTLRErrorStrategy {
///
internal func reportFailedPredicate(_ recognizer: Parser,
_ e: FailedPredicateException) {
let ruleName: String = recognizer.getRuleNames()[recognizer._ctx!.getRuleIndex()]
let msg: String = "rule " + ruleName + " " + e.message! // e.getMessage()
let ruleName = recognizer.getRuleNames()[recognizer._ctx!.getRuleIndex()]
let msg = "rule \(ruleName) \(e.message!)"
recognizer.notifyErrorListeners(e.getOffendingToken(), msg, e)
}
@ -319,18 +313,17 @@ public class DefaultErrorStrategy: ANTLRErrorStrategy {
///
/// - parameter recognizer: the parser instance
///
internal func reportUnwantedToken(_ recognizer: Parser) throws {
internal func reportUnwantedToken(_ recognizer: Parser) {
if inErrorRecoveryMode(recognizer) {
return
}
beginErrorCondition(recognizer)
let t: Token = try recognizer.getCurrentToken()
let tokenName: String = getTokenErrorDisplay(t)
let expecting: IntervalSet = try getExpectedTokens(recognizer)
let msg: String = "extraneous input " + tokenName + " expecting " +
expecting.toString(recognizer.getVocabulary())
let t = try? recognizer.getCurrentToken()
let tokenName = getTokenErrorDisplay(t)
let expecting = (try? getExpectedTokens(recognizer)) ?? IntervalSet.EMPTY_SET
let msg = "extraneous input \(tokenName) expecting \(expecting.toString(recognizer.getVocabulary()))"
recognizer.notifyErrorListeners(t, msg, nil)
}
@ -351,17 +344,16 @@ public class DefaultErrorStrategy: ANTLRErrorStrategy {
///
/// - parameter recognizer: the parser instance
///
internal func reportMissingToken(_ recognizer: Parser) throws {
internal func reportMissingToken(_ recognizer: Parser) {
if inErrorRecoveryMode(recognizer) {
return
}
beginErrorCondition(recognizer)
let t: Token = try recognizer.getCurrentToken()
let expecting: IntervalSet = try getExpectedTokens(recognizer)
let msg: String = "missing " + expecting.toString(recognizer.getVocabulary()) +
" at " + getTokenErrorDisplay(t)
let t = try? recognizer.getCurrentToken()
let expecting = (try? getExpectedTokens(recognizer)) ?? IntervalSet.EMPTY_SET
let msg = "missing \(expecting.toString(recognizer.getVocabulary())) at \(getTokenErrorDisplay(t))"
recognizer.notifyErrorListeners(t, msg, nil)
}
@ -419,23 +411,20 @@ public class DefaultErrorStrategy: ANTLRErrorStrategy {
public func recoverInline(_ recognizer: Parser) throws -> Token {
// SINGLE TOKEN DELETION
let matchedSymbol: Token? = try singleTokenDeletion(recognizer)
if matchedSymbol != nil {
let matchedSymbol = try singleTokenDeletion(recognizer)
if let matchedSymbol = matchedSymbol {
// we have deleted the extra token.
// now, move past ttype token as if all were ok
try recognizer.consume()
return matchedSymbol!
return matchedSymbol
}
// SINGLE TOKEN INSERTION
if try singleTokenInsertion(recognizer) {
return try getMissingSymbol(recognizer)
}
throw try ANTLRException.recognition(e: InputMismatchException(recognizer))
// throw try ANTLRException.InputMismatch(e: InputMismatchException(recognizer) )
//RuntimeException("InputMismatchException")
// even that didn't work; must throw the exception
//throwException() /* throw InputMismatchException(recognizer); */
throw ANTLRException.recognition(e: InputMismatchException(recognizer))
}
///
@ -456,17 +445,17 @@ public class DefaultErrorStrategy: ANTLRErrorStrategy {
/// strategy for the current mismatched input, otherwise `false`
///
internal func singleTokenInsertion(_ recognizer: Parser) throws -> Bool {
let currentSymbolType: Int = try getTokenStream(recognizer).LA(1)
let currentSymbolType = try getTokenStream(recognizer).LA(1)
// if current token is consistent with what could come after current
// ATN state, then we know we're missing a token; error recovery
// is free to conjure up and insert the missing token
let currentState: ATNState = recognizer.getInterpreter().atn.states[recognizer.getState()]!
let next: ATNState = currentState.transition(0).target
let atn: ATN = recognizer.getInterpreter().atn
let expectingAtLL2: IntervalSet = try atn.nextTokens(next, recognizer._ctx)
let currentState = recognizer.getInterpreter().atn.states[recognizer.getState()]!
let next = currentState.transition(0).target
let atn = recognizer.getInterpreter().atn
let expectingAtLL2 = atn.nextTokens(next, recognizer._ctx)
// print("LT(2) set="+expectingAtLL2.toString(recognizer.getTokenNames()));
if expectingAtLL2.contains(currentSymbolType) {
try reportMissingToken(recognizer)
reportMissingToken(recognizer)
return true
}
return false
@ -492,10 +481,10 @@ public class DefaultErrorStrategy: ANTLRErrorStrategy {
/// `null`
///
internal func singleTokenDeletion(_ recognizer: Parser) throws -> Token? {
let nextTokenType: Int = try getTokenStream(recognizer).LA(2)
let expecting: IntervalSet = try getExpectedTokens(recognizer)
let nextTokenType = try getTokenStream(recognizer).LA(2)
let expecting = try getExpectedTokens(recognizer)
if expecting.contains(nextTokenType) {
try reportUnwantedToken(recognizer)
reportUnwantedToken(recognizer)
///
/// errPrint("recoverFromMismatchedToken deleting "+
/// ((TokenStream)getTokenStream(recognizer)).LT(1)+
@ -504,7 +493,7 @@ public class DefaultErrorStrategy: ANTLRErrorStrategy {
///
try recognizer.consume() // simply delete extra token
// we want to return the token we're actually matching
let matchedSymbol: Token = try recognizer.getCurrentToken()
let matchedSymbol = try recognizer.getCurrentToken()
reportMatch(recognizer) // we know current token is correct
return matchedSymbol
}
@ -536,25 +525,27 @@ public class DefaultErrorStrategy: ANTLRErrorStrategy {
}
internal func getMissingSymbol(_ recognizer: Parser) throws -> Token {
let currentSymbol: Token = try recognizer.getCurrentToken()
let expecting: IntervalSet = try getExpectedTokens(recognizer)
let expectedTokenType: Int = expecting.getMinElement() // get any element
let currentSymbol = try recognizer.getCurrentToken()
let expecting = try getExpectedTokens(recognizer)
let expectedTokenType = expecting.getMinElement() // get any element
var tokenText: String
if expectedTokenType == CommonToken.EOF {
tokenText = "<missing EOF>"
} else {
tokenText = "<missing " + recognizer.getVocabulary().getDisplayName(expectedTokenType) + ">"
}
var current: Token = currentSymbol
let lookback: Token? = try getTokenStream(recognizer).LT(-1)
var current = currentSymbol
let lookback = try getTokenStream(recognizer).LT(-1)
if current.getType() == CommonToken.EOF && lookback != nil {
current = lookback!
}
let token = recognizer.getTokenFactory().create((current.getTokenSource(), current.getTokenSource()!.getInputStream()), expectedTokenType, tokenText,
CommonToken.DEFAULT_CHANNEL,
-1, -1,
current.getLine(), current.getCharPositionInLine())
let token = recognizer.getTokenFactory().create(
current.getTokenSourceAndStream(),
expectedTokenType, tokenText,
CommonToken.DEFAULT_CHANNEL,
-1, -1,
current.getLine(), current.getCharPositionInLine())
return token
}
@ -574,22 +565,22 @@ public class DefaultErrorStrategy: ANTLRErrorStrategy {
/// so that it creates a new Java type.
///
internal func getTokenErrorDisplay(_ t: Token?) -> String {
if t == nil {
guard let t = t else {
return "<no token>"
}
var s: String? = getSymbolText(t!)
var s = getSymbolText(t)
if s == nil {
if getSymbolType(t!) == CommonToken.EOF {
if getSymbolType(t) == CommonToken.EOF {
s = "<EOF>"
} else {
s = "<\(getSymbolType(t!))>"
s = "<\(getSymbolType(t))>"
}
}
return escapeWSAndQuote(s!)
}
internal func getSymbolText(_ symbol: Token) -> String {
return symbol.getText()!
internal func getSymbolText(_ symbol: Token) -> String? {
return symbol.getText()
}
internal func getSymbolType(_ symbol: Token) -> Int {
@ -698,19 +689,19 @@ public class DefaultErrorStrategy: ANTLRErrorStrategy {
/// Like Grosch I implement context-sensitive FOLLOW sets that are combined
/// at run-time upon error to avoid overhead during parsing.
///
internal func getErrorRecoverySet(_ recognizer: Parser) throws -> IntervalSet {
let atn: ATN = recognizer.getInterpreter().atn
internal func getErrorRecoverySet(_ recognizer: Parser) -> IntervalSet {
let atn = recognizer.getInterpreter().atn
var ctx: RuleContext? = recognizer._ctx
let recoverSet: IntervalSet = try IntervalSet()
while let ctxWrap = ctx , ctxWrap.invokingState >= 0 {
let recoverSet = IntervalSet()
while let ctxWrap = ctx, ctxWrap.invokingState >= 0 {
// compute what follows who invoked us
let invokingState: ATNState = atn.states[ctxWrap.invokingState]!
let rt: RuleTransition = invokingState.transition(0) as! RuleTransition
let follow: IntervalSet = try atn.nextTokens(rt.followState)
try recoverSet.addAll(follow)
let invokingState = atn.states[ctxWrap.invokingState]!
let rt = invokingState.transition(0) as! RuleTransition
let follow = atn.nextTokens(rt.followState)
try! recoverSet.addAll(follow)
ctx = ctxWrap.parent
}
try recoverSet.remove(CommonToken.EPSILON)
try! recoverSet.remove(CommonToken.EPSILON)
// print("recover set "+recoverSet.toString(recognizer.getTokenNames()));
return recoverSet
}
@ -720,10 +711,9 @@ public class DefaultErrorStrategy: ANTLRErrorStrategy {
///
internal func consumeUntil(_ recognizer: Parser, _ set: IntervalSet) throws {
// errPrint("consumeUntil("+set.toString(recognizer.getTokenNames())+")");
var ttype: Int = try getTokenStream(recognizer).LA(1)
var ttype = try getTokenStream(recognizer).LA(1)
while ttype != CommonToken.EOF && !set.contains(ttype) {
//print("consume during recover LA(1)="+getTokenNames()[input.LA(1)]);
// getTokenStream(recognizer).consume();
try recognizer.consume()
ttype = try getTokenStream(recognizer).LA(1)
}

View File

@ -59,16 +59,16 @@ public class DiagnosticErrorListener: BaseErrorListener {
_ stopIndex: Int,
_ exact: Bool,
_ ambigAlts: BitSet,
_ configs: ATNConfigSet) throws {
_ configs: ATNConfigSet) {
if exactOnly && !exact {
return
}
let decision = getDecisionDescription(recognizer, dfa)
let conflictingAlts = try getConflictingAlts(ambigAlts, configs)
let text = try recognizer.getTokenStream()!.getText(Interval.of(startIndex, stopIndex))
let conflictingAlts = getConflictingAlts(ambigAlts, configs)
let text = getTextInInterval(recognizer, startIndex, stopIndex)
let message = "reportAmbiguity d=\(decision): ambigAlts=\(conflictingAlts), input='\(text)'"
try recognizer.notifyErrorListeners(message)
recognizer.notifyErrorListeners(message)
}
override
@ -77,11 +77,11 @@ public class DiagnosticErrorListener: BaseErrorListener {
_ startIndex: Int,
_ stopIndex: Int,
_ conflictingAlts: BitSet?,
_ configs: ATNConfigSet) throws {
_ configs: ATNConfigSet) {
let decision = getDecisionDescription(recognizer, dfa)
let text = try recognizer.getTokenStream()!.getText(Interval.of(startIndex, stopIndex))
let text = getTextInInterval(recognizer, startIndex, stopIndex)
let message = "reportAttemptingFullContext d=\(decision), input='\(text)'"
try recognizer.notifyErrorListeners(message)
recognizer.notifyErrorListeners(message)
}
override
@ -90,11 +90,11 @@ public class DiagnosticErrorListener: BaseErrorListener {
_ startIndex: Int,
_ stopIndex: Int,
_ prediction: Int,
_ configs: ATNConfigSet) throws {
_ configs: ATNConfigSet) {
let decision = getDecisionDescription(recognizer, dfa)
let text = try recognizer.getTokenStream()!.getText(Interval.of(startIndex, stopIndex))
let text = getTextInInterval(recognizer, startIndex, stopIndex)
let message = "reportContextSensitivity d=\(decision), input='\(text)'"
try recognizer.notifyErrorListeners(message)
recognizer.notifyErrorListeners(message)
}
internal func getDecisionDescription(_ recognizer: Parser, _ dfa: DFA) -> String {
@ -125,12 +125,17 @@ public class DiagnosticErrorListener: BaseErrorListener {
/// - returns: Returns `reportedAlts` if it is not `null`, otherwise
/// returns the set of alternatives represented in `configs`.
///
internal func getConflictingAlts(_ reportedAlts: BitSet?, _ configs: ATNConfigSet) throws -> BitSet {
if reportedAlts != nil {
return reportedAlts!
}
let result = try configs.getAltBitSet()
return result
internal func getConflictingAlts(_ reportedAlts: BitSet?, _ configs: ATNConfigSet) -> BitSet {
return reportedAlts ?? configs.getAltBitSet()
}
}
fileprivate func getTextInInterval(_ recognizer: Parser, _ startIndex: Int, _ stopIndex: Int) -> String {
do {
return try recognizer.getTokenStream()?.getText(Interval.of(startIndex, stopIndex)) ?? "<unknown>"
}
catch {
return "<unknown>"
}
}

View File

@ -11,30 +11,18 @@
/// Disambiguating predicate evaluation occurs when we test a predicate during
/// prediction.
///
public class FailedPredicateException: RecognitionException<ParserATNSimulator> {
public class FailedPredicateException: RecognitionException {
private final var ruleIndex: Int
private final var predicateIndex: Int
private final var predicate: String?
public convenience init(_ recognizer: Parser) throws {
try self.init(recognizer, nil)
}
public init(_ recognizer: Parser, _ predicate: String? = nil, _ message: String? = nil) {
let s = recognizer.getInterpreter().atn.states[recognizer.getState()]!
public convenience init(_ recognizer: Parser, _ predicate: String?)throws {
try self.init(recognizer, predicate, nil)
}
public init(_ recognizer: Parser,
_ predicate: String?,
_ message: String?) throws
{
let s: ATNState = recognizer.getInterpreter().atn.states[recognizer.getState()]!
let trans: AbstractPredicateTransition = s.transition(0) as! AbstractPredicateTransition
if trans is PredicateTransition {
self.ruleIndex = (trans as! PredicateTransition).ruleIndex
self.predicateIndex = (trans as! PredicateTransition).predIndex
let trans = s.transition(0) as! AbstractPredicateTransition
if let predex = trans as? PredicateTransition {
self.ruleIndex = predex.ruleIndex
self.predicateIndex = predex.predIndex
}
else {
self.ruleIndex = 0
@ -43,9 +31,10 @@ public class FailedPredicateException: RecognitionException<ParserATNSimulator>
self.predicate = predicate
super.init(FailedPredicateException.formatMessage(predicate!, message), recognizer , recognizer.getInputStream()!, recognizer._ctx)
try self.setOffendingToken(recognizer.getCurrentToken())
super.init(recognizer, recognizer.getInputStream()!, recognizer._ctx, FailedPredicateException.formatMessage(predicate, message))
if let token = try? recognizer.getCurrentToken() {
setOffendingToken(token)
}
}
public func getRuleIndex() -> Int {
@ -56,17 +45,17 @@ public class FailedPredicateException: RecognitionException<ParserATNSimulator>
return predicateIndex
}
public func getPredicate() -> String? {
return predicate
}
private static func formatMessage(_ predicate: String, _ message: String?) -> String {
private static func formatMessage(_ predicate: String?, _ message: String?) -> String {
if message != nil {
return message!
}
return "failed predicate: {predicate}?" //String.format(Locale.getDefault(), "failed predicate: {%s}?", predicate);
let predstr = predicate ?? "<unknown>"
return "failed predicate: {\(predstr)}?"
}
}

View File

@ -10,9 +10,11 @@
/// when the current input does not match the expected token.
///
public class InputMismatchException: RecognitionException<ParserATNSimulator> {
public init(_ recognizer: Parser) throws {
public class InputMismatchException: RecognitionException {
public init(_ recognizer: Parser) {
super.init(recognizer, recognizer.getInputStream()!, recognizer._ctx)
self.setOffendingToken(try recognizer.getCurrentToken())
if let token = try? recognizer.getCurrentToken() {
setOffendingToken(token)
}
}
}

View File

@ -14,27 +14,24 @@
import Foundation
//public class Lexer : Recognizer<Int, LexerATNSimulator>
open class Lexer: Recognizer<LexerATNSimulator>, TokenSource {
public static let EOF = -1
public static let DEFAULT_MODE = 0
public static let MORE = -2
public static let SKIP = -3
open class Lexer: Recognizer<LexerATNSimulator>
, TokenSource {
public static let EOF: Int = -1
public static let DEFAULT_MODE: Int = 0
public static let MORE: Int = -2
public static let SKIP: Int = -3
public static let DEFAULT_TOKEN_CHANNEL: Int = CommonToken.DEFAULT_CHANNEL
public static let HIDDEN: Int = CommonToken.HIDDEN_CHANNEL
public static let MIN_CHAR_VALUE: Int = Character.MIN_VALUE;
public static let MAX_CHAR_VALUE: Int = Character.MAX_VALUE;
public static let DEFAULT_TOKEN_CHANNEL = CommonToken.DEFAULT_CHANNEL
public static let HIDDEN = CommonToken.HIDDEN_CHANNEL
public static let MIN_CHAR_VALUE = Character.MIN_VALUE;
public static let MAX_CHAR_VALUE = Character.MAX_VALUE;
public var _input: CharStream?
internal var _tokenFactorySourcePair: (TokenSource?, CharStream?)
internal var _tokenFactorySourcePair: TokenSourceAndStream
///
/// How to create token objects
///
internal var _factory: TokenFactory = CommonTokenFactory.DEFAULT
internal var _factory = CommonTokenFactory.DEFAULT
///
/// The goal of all lexer rules/methods is to create a token object.
@ -52,36 +49,36 @@ open class Lexer: Recognizer<LexerATNSimulator>
/// Needed, for example, to get the text for current token. Set at
/// the start of nextToken.
///
public var _tokenStartCharIndex: Int = -1
public var _tokenStartCharIndex = -1
///
/// The line on which the first character of the token resides
///
public var _tokenStartLine: Int = 0
public var _tokenStartLine = 0
///
/// The character position of first character within the line
///
public var _tokenStartCharPositionInLine: Int = 0
public var _tokenStartCharPositionInLine = 0
///
/// Once we see EOF on char stream, next token will be EOF.
/// If you have DONE : EOF ; then you see DONE EOF.
///
public var _hitEOF: Bool = false
public var _hitEOF = false
///
/// The channel number for the current token
///
public var _channel: Int = 0
public var _channel = 0
///
/// The token type for the current token
///
public var _type: Int = 0
public var _type = 0
public final var _modeStack: Stack<Int> = Stack<Int>()
public var _mode: Int = Lexer.DEFAULT_MODE
public final var _modeStack = Stack<Int>()
public var _mode = Lexer.DEFAULT_MODE
///
/// You can set the text for the current token to override what is in
@ -90,13 +87,17 @@ open class Lexer: Recognizer<LexerATNSimulator>
public var _text: String?
public override init() {
self._tokenFactorySourcePair = TokenSourceAndStream()
super.init()
self._tokenFactorySourcePair.tokenSource = self
}
public init(_ input: CharStream) {
super.init()
self._input = input
self._tokenFactorySourcePair = (self, input)
self._tokenFactorySourcePair = TokenSourceAndStream()
super.init()
self._tokenFactorySourcePair.tokenSource = self
self._tokenFactorySourcePair.stream = input
}
open func reset() throws {
@ -131,7 +132,7 @@ open class Lexer: Recognizer<LexerATNSimulator>
// Mark start location in char stream so unbuffered streams are
// guaranteed at least have text of current token
var tokenStartMarker: Int = _input.mark()
var tokenStartMarker = _input.mark()
defer {
// make sure we release marker after match or
// unbuffered char stream will keep buffering
@ -237,10 +238,10 @@ open class Lexer: Recognizer<LexerATNSimulator>
open override func setInputStream(_ input: IntStream) throws {
self._input = nil
self._tokenFactorySourcePair = (self, _input!)
self._tokenFactorySourcePair = makeTokenSourceAndStream()
try reset()
self._input = input as? CharStream
self._tokenFactorySourcePair = (self, _input!)
self._tokenFactorySourcePair = makeTokenSourceAndStream()
}
@ -273,24 +274,25 @@ open class Lexer: Recognizer<LexerATNSimulator>
///
@discardableResult
open func emit() -> Token {
let t: Token = _factory.create(_tokenFactorySourcePair, _type, _text, _channel, _tokenStartCharIndex, getCharIndex() - 1,
_tokenStartLine, _tokenStartCharPositionInLine)
let t = _factory.create(_tokenFactorySourcePair, _type, _text, _channel, _tokenStartCharIndex, getCharIndex() - 1, _tokenStartLine, _tokenStartCharPositionInLine)
emit(t)
return t
}
@discardableResult
open func emitEOF() -> Token {
let cpos: Int = getCharPositionInLine()
let line: Int = getLine()
let eof: Token = _factory.create(
_tokenFactorySourcePair,
CommonToken.EOF,
nil,
CommonToken.DEFAULT_CHANNEL,
_input!.index(),
_input!.index() - 1,
line,
cpos)
let cpos = getCharPositionInLine()
let line = getLine()
let idx = _input!.index()
let eof = _factory.create(
_tokenFactorySourcePair,
CommonToken.EOF,
nil,
CommonToken.DEFAULT_CHANNEL,
idx,
idx - 1,
line,
cpos)
emit(eof)
return eof
}
@ -374,23 +376,13 @@ open class Lexer: Recognizer<LexerATNSimulator>
return nil
}
///
/// Used to print out token names like ID during debugging and
/// error reporting. The generated parsers implement a method
/// that overrides this to point to their String[] tokenNames.
///
override
open func getTokenNames() -> [String?]? {
return nil
}
///
/// Return a list of all Token objects in input char stream.
/// Forces load of all tokens. Does not include EOF token.
///
open func getAllTokens() throws -> Array<Token> {
var tokens: Array<Token> = Array<Token>()
var t: Token = try nextToken()
open func getAllTokens() throws -> [Token] {
var tokens = [Token]()
var t = try nextToken()
while t.getType() != CommonToken.EOF {
tokens.append(t)
t = try nextToken()
@ -405,25 +397,31 @@ open class Lexer: Recognizer<LexerATNSimulator>
}
}
open func notifyListeners<T:ATNSimulator>(_ e: LexerNoViableAltException, recognizer: Recognizer<T>) {
open func notifyListeners<T>(_ e: LexerNoViableAltException, recognizer: Recognizer<T>) {
let text: String = _input!.getText(Interval.of(_tokenStartCharIndex, _input!.index()))
let msg: String = "token recognition error at: '\(getErrorDisplay(text))'"
let text: String
do {
text = try _input!.getText(Interval.of(_tokenStartCharIndex, _input!.index()))
}
catch {
text = "<unknown>"
}
let msg = "token recognition error at: '\(getErrorDisplay(text))'"
let listener: ANTLRErrorListener = getErrorListenerDispatch()
let listener = getErrorListenerDispatch()
listener.syntaxError(recognizer, nil, _tokenStartLine, _tokenStartCharPositionInLine, msg, e)
}
open func getErrorDisplay(_ s: String) -> String {
let buf: StringBuilder = StringBuilder()
for c: Character in s.characters {
let buf = StringBuilder()
for c in s.characters {
buf.append(getErrorDisplay(c))
}
return buf.toString()
}
open func getErrorDisplay(_ c: Character) -> String {
var s: String = String(c) // String.valueOf(c as Character);
var s = String(c)
if c.integerValue == CommonToken.EOF {
s = "<EOF>"
}
@ -455,4 +453,8 @@ open class Lexer: Recognizer<LexerATNSimulator>
// TODO: Do we lose character or line position information?
try _input!.consume()
}
internal func makeTokenSourceAndStream() -> TokenSourceAndStream {
return TokenSourceAndStream(self, _input)
}
}

View File

@ -9,20 +9,15 @@ public class LexerInterpreter: Lexer {
internal final var grammarFileName: String
internal final var atn: ATN
///
/// /@Deprecated
///
internal final var tokenNames: [String?]?
internal final var ruleNames: [String]
internal final var channelNames: [String]
internal final var modeNames: [String]
private final var vocabulary: Vocabulary?
internal final var _decisionToDFA: [DFA]
internal final var _sharedContextCache: PredictionContextCache =
PredictionContextCache()
internal final var _sharedContextCache = PredictionContextCache()
// public override init() {
// super.init()}
@ -40,13 +35,6 @@ public class LexerInterpreter: Lexer {
self.grammarFileName = grammarFileName
self.atn = atn
self.tokenNames = [String?]()
//new String[atn.maxTokenType];
let length = tokenNames!.count
for i in 0..<length {
tokenNames![i] = vocabulary.getDisplayName(i)
}
self.ruleNames = ruleNames
self.channelNames = channelNames
self.modeNames = modeNames
@ -57,9 +45,7 @@ public class LexerInterpreter: Lexer {
for i in 0..<_decisionToDFALength {
_decisionToDFA[i] = DFA(atn.getDecisionState(i)!, i)
}
super.init()
self._input = input
self._tokenFactorySourcePair = (self, input)
super.init(input)
self._interp = LexerATNSimulator(self, atn, _decisionToDFA, _sharedContextCache)
if atn.grammarType != ATNType.lexer {
@ -78,14 +64,6 @@ public class LexerInterpreter: Lexer {
return grammarFileName
}
override
///
/// /@Deprecated
///
public func getTokenNames() -> [String?]? {
return tokenNames
}
override
public func getRuleNames() -> [String] {
return ruleNames

View File

@ -5,7 +5,7 @@
///
public class LexerNoViableAltException: RecognitionException<LexerATNSimulator>, CustomStringConvertible {
public class LexerNoViableAltException: RecognitionException, CustomStringConvertible {
///
/// Matching attempted at what input index?
///
@ -31,23 +31,15 @@ public class LexerNoViableAltException: RecognitionException<LexerATNSimulator>,
return startIndex
}
public func getDeadEndConfigs() -> ATNConfigSet {
return deadEndConfigs
}
//override
// public func getInputStream() -> CharStream {
// return super.getInputStream() as! CharStream;
// }
public var description: String {
var symbol: String = ""
if startIndex >= 0 && startIndex < getInputStream().size() {
let charStream: CharStream = getInputStream() as! CharStream
let interval: Interval = Interval.of(startIndex, startIndex)
symbol = charStream.getText(interval)
var symbol = ""
if let charStream = getInputStream() as? CharStream, startIndex >= 0 && startIndex < charStream.size() {
let interval = Interval.of(startIndex, startIndex)
symbol = try! charStream.getText(interval)
symbol = Utils.escapeWhitespace(symbol, false)
}

View File

@ -17,7 +17,7 @@ public class ListTokenSource: TokenSource {
///
/// The wrapped collection of _org.antlr.v4.runtime.Token_ objects to return.
///
internal final var tokens: Array<Token>
internal final var tokens: [Token]
///
/// The name of the input source. If this value is `null`, a call to
@ -32,7 +32,7 @@ public class ListTokenSource: TokenSource {
/// _#nextToken_. The end of the input is indicated by this value
/// being greater than or equal to the number of items in _#tokens_.
///
internal var i: Int = 0
internal var i = 0
///
/// This field caches the EOF token for the token source.
@ -43,7 +43,7 @@ public class ListTokenSource: TokenSource {
/// This is the backing field for _#getTokenFactory_ and
/// _setTokenFactory_.
///
private var _factory: TokenFactory = CommonTokenFactory.DEFAULT
private var _factory = CommonTokenFactory.DEFAULT
///
/// Constructs a new _org.antlr.v4.runtime.ListTokenSource_ instance from the specified
@ -52,7 +52,7 @@ public class ListTokenSource: TokenSource {
/// - parameter tokens: The collection of _org.antlr.v4.runtime.Token_ objects to provide as a
/// _org.antlr.v4.runtime.TokenSource_.
///
public convenience init(_ tokens: Array<Token>) {
public convenience init(_ tokens: [Token]) {
self.init(tokens, nil)
}
@ -67,8 +67,7 @@ public class ListTokenSource: TokenSource {
/// the next _org.antlr.v4.runtime.Token_ (or the previous token if the end of the input has
/// been reached).
///
public init(_ tokens: Array<Token>, _ sourceName: String?) {
public init(_ tokens: [Token], _ sourceName: String?) {
self.tokens = tokens
self.sourceName = sourceName
}
@ -76,28 +75,24 @@ public class ListTokenSource: TokenSource {
public func getCharPositionInLine() -> Int {
if i < tokens.count {
return tokens[i].getCharPositionInLine()
} else {
if let eofToken = eofToken {
return eofToken.getCharPositionInLine()
} else {
if tokens.count > 0 {
// have to calculate the result from the line/column of the previous
// token, along with the text of the token.
let lastToken: Token = tokens[tokens.count - 1]
}
else if let eofToken = eofToken {
return eofToken.getCharPositionInLine()
}
else if tokens.count > 0 {
// have to calculate the result from the line/column of the previous
// token, along with the text of the token.
let lastToken = tokens[tokens.count - 1]
if let tokenText = lastToken.getText() {
let lastNewLine: Int = tokenText.lastIndexOf("\n")
if lastNewLine >= 0 {
return tokenText.length - lastNewLine - 1
}
}
var position = lastToken.getCharPositionInLine()
position += lastToken.getStopIndex()
position -= lastToken.getStartIndex()
position += 1
return position
if let tokenText = lastToken.getText() {
let lastNewLine = tokenText.lastIndexOf("\n")
if lastNewLine >= 0 {
return tokenText.length - lastNewLine - 1
}
}
return (lastToken.getCharPositionInLine() +
lastToken.getStopIndex() -
lastToken.getStartIndex() + 1)
}
// only reach this if tokens is empty, meaning EOF occurs at the first
@ -108,22 +103,23 @@ public class ListTokenSource: TokenSource {
public func nextToken() -> Token {
if i >= tokens.count {
if eofToken == nil {
var start: Int = -1
var start = -1
if tokens.count > 0 {
let previousStop: Int = tokens[tokens.count - 1].getStopIndex()
let previousStop = tokens[tokens.count - 1].getStopIndex()
if previousStop != -1 {
start = previousStop + 1
}
}
let stop: Int = max(-1, start - 1)
eofToken = _factory.create((self, getInputStream()!), CommonToken.EOF, "EOF", CommonToken.DEFAULT_CHANNEL, start, stop, getLine(), getCharPositionInLine())
let stop = max(-1, start - 1)
let source = TokenSourceAndStream(self, getInputStream())
eofToken = _factory.create(source, CommonToken.EOF, "EOF", CommonToken.DEFAULT_CHANNEL, start, stop, getLine(), getCharPositionInLine())
}
return eofToken!
}
let t: Token = tokens[i]
let t = tokens[i]
if i == tokens.count - 1 && t.getType() == CommonToken.EOF {
eofToken = t
}
@ -142,8 +138,8 @@ public class ListTokenSource: TokenSource {
if tokens.count > 0 {
// have to calculate the result from the line/column of the previous
// token, along with the text of the token.
let lastToken: Token = tokens[tokens.count - 1]
var line: Int = lastToken.getLine()
let lastToken = tokens[tokens.count - 1]
var line = lastToken.getLine()
if let tokenText = lastToken.getText() {
let length = tokenText.length
@ -168,14 +164,12 @@ public class ListTokenSource: TokenSource {
public func getInputStream() -> CharStream? {
if i < tokens.count {
return tokens[i].getInputStream()
} else {
if let eofToken = eofToken{
return eofToken.getInputStream()
} else {
if tokens.count > 0 {
return tokens[tokens.count - 1].getInputStream()
}
}
}
else if let eofToken = eofToken {
return eofToken.getInputStream()
}
else if tokens.count > 0 {
return tokens[tokens.count - 1].getInputStream()
}
// no input stream information is available
@ -183,8 +177,8 @@ public class ListTokenSource: TokenSource {
}
public func getSourceName() -> String {
if sourceName != nil {
return sourceName!
if let sourceName = sourceName {
return sourceName
}
if let inputStream = getInputStream() {

View File

@ -10,7 +10,7 @@
/// in the various paths when the error. Reported by reportNoViableAlternative()
///
public class NoViableAltException: RecognitionException<ParserATNSimulator> {
public class NoViableAltException: RecognitionException {
/// Which configurations did we try at input.index() that couldn't match input.LT(1)?
private final var deadEndConfigs: ATNConfigSet?
@ -22,29 +22,31 @@ public class NoViableAltException: RecognitionException<ParserATNSimulator> {
///
private final var startToken: Token
public convenience init(_ recognizer: Parser?) throws {
public convenience init(_ recognizer: Parser) {
// LL(1) error
let token = try! recognizer.getCurrentToken()
self.init(recognizer,
recognizer!.getInputStream()!,
try recognizer!.getCurrentToken(),
try recognizer!.getCurrentToken(),
recognizer.getInputStream()!,
token,
token,
nil,
recognizer!._ctx)
recognizer._ctx)
}
public init(_ recognizer: Parser?,
_ input: IntStream,
_ startToken: Token,
_ offendingToken: Token,
_ offendingToken: Token?,
_ deadEndConfigs: ATNConfigSet?,
_ ctx: ParserRuleContext?) {
self.deadEndConfigs = deadEndConfigs
self.startToken = startToken
// as? Recognizer<AnyObject, ATNSimulator>
super.init(recognizer, input, ctx)
self.setOffendingToken(offendingToken)
if let offendingToken = offendingToken {
setOffendingToken(offendingToken)
}
}

View File

@ -12,7 +12,7 @@ import Foundation
/// This is all the parsing support code essentially; most of it is error recovery stuff.
///
open class Parser: Recognizer<ParserATNSimulator> {
public static let EOF: Int = -1
public static let EOF = -1
public static var ConsoleError = true
public class TraceListener: ParseTreeListener {
@ -27,16 +27,13 @@ open class Parser: Recognizer<ParserATNSimulator> {
print("enter \(ruleName), LT(1)=\(lt1)")
}
public func visitTerminal(_ node: TerminalNode) {
print("consume \(String(describing: node.getSymbol())) rule \(host.getRuleNames()[host._ctx!.getRuleIndex()])")
}
public func visitErrorNode(_ node: ErrorNode) {
}
public func exitEveryRule(_ ctx: ParserRuleContext) throws {
let ruleName = host.getRuleNames()[ctx.getRuleIndex()]
let lt1 = try host._input.LT(1)!.getText()!
@ -45,23 +42,17 @@ open class Parser: Recognizer<ParserATNSimulator> {
}
public class TrimToSizeListener: ParseTreeListener {
public static let INSTANCE: TrimToSizeListener = TrimToSizeListener()
public static let INSTANCE = TrimToSizeListener()
public func enterEveryRule(_ ctx: ParserRuleContext) {
}
public func visitTerminal(_ node: TerminalNode) {
}
public func visitErrorNode(_ node: ErrorNode) {
}
public func exitEveryRule(_ ctx: ParserRuleContext) {
// TODO: Print exit info.
}
@ -193,7 +184,7 @@ open class Parser: Recognizer<ParserATNSimulator> {
///
@discardableResult
public func match(_ ttype: Int) throws -> Token {
var t: Token = try getCurrentToken()
var t = try getCurrentToken()
if t.getType() == ttype {
_errHandler.reportMatch(self)
try consume()
@ -228,7 +219,7 @@ open class Parser: Recognizer<ParserATNSimulator> {
///
@discardableResult
public func matchWildcard() throws -> Token {
var t: Token = try getCurrentToken()
var t = try getCurrentToken()
if t.getType() > 0 {
_errHandler.reportMatch(self)
try consume()
@ -297,18 +288,11 @@ open class Parser: Recognizer<ParserATNSimulator> {
/// using the default _org.antlr.v4.runtime.Parser.TrimToSizeListener_ during the parse process.
///
public func getTrimParseTree() -> Bool {
return !getParseListeners().filter({ $0 === TrimToSizeListener.INSTANCE }).isEmpty
}
public func getParseListeners() -> Array<ParseTreeListener> {
let listeners: Array<ParseTreeListener>? = _parseListeners
if listeners == nil {
return Array<ParseTreeListener>()
}
return listeners!
public func getParseListeners() -> [ParseTreeListener] {
return _parseListeners ?? [ParseTreeListener]()
}
///
@ -336,14 +320,12 @@ open class Parser: Recognizer<ParserATNSimulator> {
///
/// - Parameter listener: the listener to add
///
/// - Throws: _ANTLRError.nullPointer_ if listener is `null`
///
public func addParseListener(_ listener: ParseTreeListener) {
if _parseListeners == nil {
_parseListeners = Array<ParseTreeListener>()
_parseListeners = [ParseTreeListener]()
}
self._parseListeners!.append(listener)
_parseListeners!.append(listener)
}
///
@ -401,11 +383,11 @@ open class Parser: Recognizer<ParserATNSimulator> {
public func triggerExitRuleEvent() throws {
// reverse order walk of listeners
if let _parseListeners = _parseListeners, let _ctx = _ctx {
var i: Int = _parseListeners.count - 1
var i = _parseListeners.count - 1
while i >= 0 {
let listener: ParseTreeListener = _parseListeners[i]
let listener = _parseListeners[i]
_ctx.exitRule(listener)
try listener.exitEveryRule(_ctx)
try listener.exitEveryRule(_ctx)
i -= 1
}
}
@ -423,14 +405,12 @@ open class Parser: Recognizer<ParserATNSimulator> {
override
open func getTokenFactory() -> TokenFactory {
//<AnyObject>
return _input.getTokenSource().getTokenFactory()
}
/// Tell our token source and error strategy about a new way to create tokens.
override
open func setTokenFactory(_ factory: TokenFactory) {
//<AnyObject>
_input.getTokenSource().setTokenFactory(factory)
}
@ -441,15 +421,13 @@ open class Parser: Recognizer<ParserATNSimulator> {
/// - Throws: _ANTLRError.unsupportedOperation_ if the current parser does not
/// implement the _#getSerializedATN()_ method.
///
public func getATNWithBypassAlts() -> ATN {
let serializedAtn: String = getSerializedATN()
let serializedAtn = getSerializedATN()
var result: ATN? = bypassAltsAtnCache[serializedAtn]
bypassAltsAtnCacheMutex.synchronized {
[unowned self] in
var result = bypassAltsAtnCache[serializedAtn]
bypassAltsAtnCacheMutex.synchronized { [unowned self] in
if result == nil {
let deserializationOptions: ATNDeserializationOptions = ATNDeserializationOptions()
let deserializationOptions = ATNDeserializationOptions()
try! deserializationOptions.setGenerateRuleBypassTransitions(true)
result = try! ATNDeserializer(deserializationOptions).deserialize(Array(serializedAtn.characters))
self.bypassAltsAtnCache[serializedAtn] = result!
@ -471,14 +449,12 @@ open class Parser: Recognizer<ParserATNSimulator> {
///
public func compileParseTreePattern(_ pattern: String, _ patternRuleIndex: Int) throws -> ParseTreePattern {
if let tokenStream = getTokenStream() {
let tokenSource: TokenSource = tokenStream.getTokenSource()
if tokenSource is Lexer {
let lexer: Lexer = tokenSource as! Lexer
let tokenSource = tokenStream.getTokenSource()
if let lexer = tokenSource as? Lexer {
return try compileParseTreePattern(pattern, patternRuleIndex, lexer)
}
}
throw ANTLRError.unsupportedOperation(msg: "Parser can't discover a lexer to use")
}
///
@ -487,7 +463,7 @@ open class Parser: Recognizer<ParserATNSimulator> {
///
public func compileParseTreePattern(_ pattern: String, _ patternRuleIndex: Int,
_ lexer: Lexer) throws -> ParseTreePattern {
let m: ParseTreePatternMatcher = ParseTreePatternMatcher(lexer, self)
let m = ParseTreePatternMatcher(lexer, self)
return try m.compile(pattern, patternRuleIndex)
}
@ -530,19 +506,21 @@ open class Parser: Recognizer<ParserATNSimulator> {
return try _input.LT(1)!
}
public final func notifyErrorListeners(_ msg: String) throws {
try notifyErrorListeners(getCurrentToken(), msg, nil)
public final func notifyErrorListeners(_ msg: String) {
let token = try? getCurrentToken()
notifyErrorListeners(token, msg, nil)
}
public func notifyErrorListeners(_ offendingToken: Token, _ msg: String,
_ e: AnyObject?) {
public func notifyErrorListeners(_ offendingToken: Token?, _ msg: String, _ e: AnyObject?) {
_syntaxErrors += 1
var line: Int = -1
var charPositionInLine: Int = -1
line = offendingToken.getLine()
charPositionInLine = offendingToken.getCharPositionInLine()
var line = -1
var charPositionInLine = -1
if let offendingToken = offendingToken {
line = offendingToken.getLine()
charPositionInLine = offendingToken.getCharPositionInLine()
}
let listener: ANTLRErrorListener = getErrorListenerDispatch()
let listener = getErrorListenerDispatch()
listener.syntaxError(self, offendingToken, line, charPositionInLine, msg, e)
}
@ -569,27 +547,27 @@ open class Parser: Recognizer<ParserATNSimulator> {
///
@discardableResult
public func consume() throws -> Token {
let o: Token = try getCurrentToken()
let o = try getCurrentToken()
if o.getType() != Parser.EOF {
try getInputStream()!.consume()
}
guard let _ctx = _ctx else {
return o
}
let hasListener: Bool = _parseListeners != nil && !_parseListeners!.isEmpty
let hasListener = _parseListeners != nil && !_parseListeners!.isEmpty
if _buildParseTrees || hasListener {
if _errHandler.inErrorRecoveryMode(self) {
let node: ErrorNode = _ctx.addErrorNode(createErrorNode(parent: _ctx, t: o))
let node = _ctx.addErrorNode(createErrorNode(parent: _ctx, t: o))
if let _parseListeners = _parseListeners {
for listener: ParseTreeListener in _parseListeners {
for listener in _parseListeners {
listener.visitErrorNode(node)
}
}
} else {
let node: TerminalNode = _ctx.addChild(createTerminalNode(parent: _ctx, t: o))
let node = _ctx.addChild(createTerminalNode(parent: _ctx, t: o))
if let _parseListeners = _parseListeners {
for listener: ParseTreeListener in _parseListeners {
for listener in _parseListeners {
listener.visitTerminal(node)
}
}
@ -705,7 +683,7 @@ open class Parser: Recognizer<ParserATNSimulator> {
/// Make the current context the child of the incoming localctx.
///
public func pushNewRecursionContext(_ localctx: ParserRuleContext, _ state: Int, _ ruleIndex: Int) throws {
let previous: ParserRuleContext = _ctx!
let previous = _ctx!
previous.parent = localctx
previous.invokingState = state
previous.stop = try _input.LT(-1)
@ -724,12 +702,12 @@ open class Parser: Recognizer<ParserATNSimulator> {
public func unrollRecursionContexts(_ _parentctx: ParserRuleContext?) throws {
_precedenceStack.pop()
_ctx!.stop = try _input.LT(-1)
let retctx: ParserRuleContext = _ctx! // save current ctx (return value)
let retctx = _ctx! // save current ctx (return value)
// unroll so _ctx is as it was before call to recursive method
if _parseListeners != nil {
while let ctxWrap = _ctx , ctxWrap !== _parentctx {
try triggerExitRuleEvent()
while let ctxWrap = _ctx, ctxWrap !== _parentctx {
try triggerExitRuleEvent()
_ctx = ctxWrap.parent as? ParserRuleContext
}
} else {
@ -746,7 +724,7 @@ open class Parser: Recognizer<ParserATNSimulator> {
}
public func getInvokingContext(_ ruleIndex: Int) -> ParserRuleContext? {
var p: ParserRuleContext? = _ctx
var p = _ctx
while let pWrap = p {
if pWrap.getRuleIndex() == ruleIndex {
return pWrap
@ -859,7 +837,7 @@ open class Parser: Recognizer<ParserATNSimulator> {
// parser.getInterpreter()!.setPredictionMode(PredictionMode.LL_EXACT_AMBIG_DETECTION);
//
// // get ambig trees
// var alt : Int = ambiguityInfo.ambigAlts.nextSetBit(0);
// var alt : Int = ambiguityInfo.ambigAlts.firstSetBit();
// while alt>=0 {
// // re-parse entire input for all ambiguous alternatives
// // (don't have to do first as it's been parsed, but do again for simplicity
@ -897,12 +875,11 @@ open class Parser: Recognizer<ParserATNSimulator> {
/// - Returns: `true` if `symbol` can follow the current state in
/// the ATN, otherwise `false`.
///
public func isExpectedToken(_ symbol: Int) throws -> Bool {
// return getInterpreter().atn.nextTokens(_ctx);
let atn: ATN = getInterpreter().atn
public func isExpectedToken(_ symbol: Int) -> Bool {
let atn = getInterpreter().atn
var ctx: ParserRuleContext? = _ctx
let s: ATNState = atn.states[getState()]!
var following: IntervalSet = try atn.nextTokens(s)
let s = atn.states[getState()]!
var following = atn.nextTokens(s)
if following.contains(symbol) {
return true
}
@ -911,10 +888,10 @@ open class Parser: Recognizer<ParserATNSimulator> {
return false
}
while let ctxWrap = ctx , ctxWrap.invokingState >= 0 && following.contains(CommonToken.EPSILON) {
let invokingState: ATNState = atn.states[ctxWrap.invokingState]!
let rt: RuleTransition = invokingState.transition(0) as! RuleTransition
following = try atn.nextTokens(rt.followState)
while let ctxWrap = ctx, ctxWrap.invokingState >= 0 && following.contains(CommonToken.EPSILON) {
let invokingState = atn.states[ctxWrap.invokingState]!
let rt = invokingState.transition(0) as! RuleTransition
following = atn.nextTokens(rt.followState)
if following.contains(symbol) {
return true
}
@ -941,19 +918,15 @@ open class Parser: Recognizer<ParserATNSimulator> {
}
public func getExpectedTokensWithinCurrentRule() throws -> IntervalSet {
let atn: ATN = getInterpreter().atn
let s: ATNState = atn.states[getState()]!
return try atn.nextTokens(s)
public func getExpectedTokensWithinCurrentRule() -> IntervalSet {
let atn = getInterpreter().atn
let s = atn.states[getState()]!
return atn.nextTokens(s)
}
/// Get a rule's index (i.e., `RULE_ruleName` field) or -1 if not found.
public func getRuleIndex(_ ruleName: String) -> Int {
let ruleIndex: Int? = getRuleIndexMap()[ruleName]
if ruleIndex != nil {
return ruleIndex!
}
return -1
return getRuleIndexMap()[ruleName] ?? -1
}
public func getRuleContext() -> ParserRuleContext? {
@ -967,17 +940,17 @@ open class Parser: Recognizer<ParserATNSimulator> {
///
/// This is very useful for error messages.
///
public func getRuleInvocationStack() -> Array<String> {
public func getRuleInvocationStack() -> [String] {
return getRuleInvocationStack(_ctx)
}
public func getRuleInvocationStack(_ p: RuleContext?) -> Array<String> {
public func getRuleInvocationStack(_ p: RuleContext?) -> [String] {
var p = p
var ruleNames: [String] = getRuleNames()
var stack: Array<String> = Array<String>()
var ruleNames = getRuleNames()
var stack = [String]()
while let pWrap = p {
// compute what follows who invoked us
let ruleIndex: Int = pWrap.getRuleIndex()
let ruleIndex = pWrap.getRuleIndex()
if ruleIndex < 0 {
stack.append("n/a")
} else {
@ -989,16 +962,14 @@ open class Parser: Recognizer<ParserATNSimulator> {
}
/// For debugging and other purposes.
public func getDFAStrings() -> Array<String> {
var s: Array<String> = Array<String>()
public func getDFAStrings() -> [String] {
var s = [String]()
guard let _interp = _interp else {
return s
}
decisionToDFAMutex.synchronized {
[unowned self] in
decisionToDFAMutex.synchronized { [unowned self] in
for d in 0..<_interp.decisionToDFA.count {
let dfa: DFA = _interp.decisionToDFA[d]
let dfa = _interp.decisionToDFA[d]
s.append(dfa.toString(self.getVocabulary()))
}
@ -1008,15 +979,13 @@ open class Parser: Recognizer<ParserATNSimulator> {
/// For debugging and other purposes.
public func dumpDFA() {
guard let _interp = _interp else {
guard let _interp = _interp else {
return
}
decisionToDFAMutex.synchronized {
[unowned self] in
var seenOne: Bool = false
decisionToDFAMutex.synchronized { [unowned self] in
var seenOne = false
for d in 0..<_interp.decisionToDFA.count {
let dfa: DFA = _interp.decisionToDFA[d]
for dfa in _interp.decisionToDFA {
if !dfa.states.isEmpty {
if seenOne {
print("")
@ -1036,9 +1005,9 @@ open class Parser: Recognizer<ParserATNSimulator> {
override
open func getParseInfo() -> ParseInfo? {
let interp: ParserATNSimulator? = getInterpreter()
if interp is ProfilingATNSimulator {
return ParseInfo(interp as! ProfilingATNSimulator)
let interp = getInterpreter()
if let interp = interp as? ProfilingATNSimulator {
return ParseInfo(interp)
}
return nil
}
@ -1047,16 +1016,15 @@ open class Parser: Recognizer<ParserATNSimulator> {
/// - Since: 4.3
///
public func setProfile(_ profile: Bool) {
let interp: ParserATNSimulator = getInterpreter()
let saveMode: PredictionMode = interp.getPredictionMode()
let interp = getInterpreter()
let saveMode = interp.getPredictionMode()
if profile {
if !(interp is ProfilingATNSimulator) {
setInterpreter(ProfilingATNSimulator(self))
}
} else {
if interp is ProfilingATNSimulator {
let sim: ParserATNSimulator =
ParserATNSimulator(self, getATN(), interp.decisionToDFA, interp.getSharedContextCache()!)
let sim = ParserATNSimulator(self, getATN(), interp.decisionToDFA, interp.getSharedContextCache()!)
setInterpreter(sim)
}
}

View File

@ -32,10 +32,6 @@ public class ParserInterpreter: Parser {
internal final var sharedContextCache: PredictionContextCache =
PredictionContextCache()
///
/// /@Deprecated
///
internal final var tokenNames: [String]
internal final var ruleNames: [String]
private final var vocabulary: Vocabulary
@ -64,7 +60,6 @@ public class ParserInterpreter: Parser {
self.grammarFileName = old.grammarFileName
self.statesNeedingLeftRecursionContext = old.statesNeedingLeftRecursionContext
self.decisionToDFA = old.decisionToDFA
self.tokenNames = old.tokenNames
self.ruleNames = old.ruleNames
self.vocabulary = old.vocabulary
try super.init(old.getTokenStream()!)
@ -73,26 +68,11 @@ public class ParserInterpreter: Parser {
sharedContextCache))
}
///
/// Use _#ParserInterpreter(String, org.antlr.v4.runtime.Vocabulary, java.util.Collection, org.antlr.v4.runtime.atn.ATN, org.antlr.v4.runtime.TokenStream)_ instead.
///
//@Deprecated
public convenience init(_ grammarFileName: String, _ tokenNames: Array<String?>?,
_ ruleNames: Array<String>, _ atn: ATN, _ input: TokenStream) throws {
try self.init(grammarFileName, Vocabulary.fromTokenNames(tokenNames), ruleNames, atn, input)
}
public init(_ grammarFileName: String, _ vocabulary: Vocabulary,
_ ruleNames: Array<String>, _ atn: ATN, _ input: TokenStream) throws {
self.grammarFileName = grammarFileName
self.atn = atn
self.tokenNames = [String]()// new String[atn.maxTokenType];
let length = tokenNames.count
for i in 0..<length {
tokenNames[i] = vocabulary.getDisplayName(i)
}
self.ruleNames = ruleNames
self.vocabulary = vocabulary
self.decisionToDFA = [DFA]() //new DFA[atn.getNumberOfDecisions()];
@ -123,14 +103,6 @@ public class ParserInterpreter: Parser {
return atn
}
// override
///
/// /@Deprecated
///
public func getTokenNames() -> [String] {
return tokenNames
}
override
public func getVocabulary() -> Vocabulary {
return vocabulary
@ -148,17 +120,17 @@ public class ParserInterpreter: Parser {
/// Begin parsing at startRuleIndex
public func parse(_ startRuleIndex: Int) throws -> ParserRuleContext {
let startRuleStartState: RuleStartState = atn.ruleToStartState[startRuleIndex]
let startRuleStartState = atn.ruleToStartState[startRuleIndex]
let rootContext: InterpreterRuleContext = InterpreterRuleContext(nil, ATNState.INVALID_STATE_NUMBER, startRuleIndex)
let rootContext = InterpreterRuleContext(nil, ATNState.INVALID_STATE_NUMBER, startRuleIndex)
if startRuleStartState.isPrecedenceRule {
try enterRecursionRule(rootContext, startRuleStartState.stateNumber, startRuleIndex, 0)
try enterRecursionRule(rootContext, startRuleStartState.stateNumber, startRuleIndex, 0)
} else {
try enterRule(rootContext, startRuleStartState.stateNumber, startRuleIndex)
}
while true {
let p: ATNState = getATNState()!
let p = getATNState()!
switch p.getStateType() {
case ATNState.RULE_STOP:
// pop; return from rule
@ -208,7 +180,7 @@ public class ParserInterpreter: Parser {
var altNum: Int
if p.getNumberOfTransitions() > 1 {
try getErrorHandler().sync(self)
let decision: Int = (p as! DecisionState).decision
let decision = (p as! DecisionState).decision
if decision == overrideDecision && _input.index() == overrideDecisionInputIndex {
altNum = overrideDecisionAlt
} else {
@ -218,7 +190,7 @@ public class ParserInterpreter: Parser {
altNum = 1
}
let transition: Transition = p.transition(altNum - 1)
let transition = p.transition(altNum - 1)
switch transition.getSerializationType() {
case Transition.EPSILON:
if try statesNeedingLeftRecursionContext.get(p.stateNumber) &&
@ -252,9 +224,9 @@ public class ParserInterpreter: Parser {
break
case Transition.RULE:
let ruleStartState: RuleStartState = transition.target as! RuleStartState
let ruleIndex: Int = ruleStartState.ruleIndex!
let ctx: InterpreterRuleContext = InterpreterRuleContext(_ctx, p.stateNumber, ruleIndex)
let ruleStartState = transition.target as! RuleStartState
let ruleIndex = ruleStartState.ruleIndex!
let ctx = InterpreterRuleContext(_ctx, p.stateNumber, ruleIndex)
if ruleStartState.isPrecedenceRule {
try enterRecursionRule(ctx, ruleStartState.stateNumber, ruleIndex, (transition as! RuleTransition).precedence)
} else {
@ -263,25 +235,20 @@ public class ParserInterpreter: Parser {
break
case Transition.PREDICATE:
let predicateTransition: PredicateTransition = transition as! PredicateTransition
let predicateTransition = transition as! PredicateTransition
if try !sempred(_ctx!, predicateTransition.ruleIndex, predicateTransition.predIndex) {
throw try ANTLRException.recognition(e: FailedPredicateException(self))
throw ANTLRException.recognition(e: FailedPredicateException(self))
}
break
case Transition.ACTION:
let actionTransition: ActionTransition = transition as! ActionTransition
let actionTransition = transition as! ActionTransition
try action(_ctx, actionTransition.ruleIndex, actionTransition.actionIndex)
break
case Transition.PRECEDENCE:
if !precpred(_ctx!, (transition as! PrecedencePredicateTransition).precedence) {
throw try ANTLRException.recognition(e: FailedPredicateException(self, "precpred(_ctx,\((transition as! PrecedencePredicateTransition).precedence))"))
throw ANTLRException.recognition(e: FailedPredicateException(self, "precpred(_ctx,\((transition as! PrecedencePredicateTransition).precedence))"))
}
break
@ -294,16 +261,16 @@ public class ParserInterpreter: Parser {
}
internal func visitRuleStopState(_ p: ATNState) throws {
let ruleStartState: RuleStartState = atn.ruleToStartState[p.ruleIndex!]
let ruleStartState = atn.ruleToStartState[p.ruleIndex!]
if ruleStartState.isPrecedenceRule {
let parentContext: (ParserRuleContext?, Int) = _parentContextStack.pop()
try unrollRecursionContexts(parentContext.0!)
setState(parentContext.1)
let (parentContext, parentState) = _parentContextStack.pop()
try unrollRecursionContexts(parentContext!)
setState(parentState)
} else {
try exitRule()
}
let ruleTransition: RuleTransition = atn.states[getState()]!.transition(0) as! RuleTransition
let ruleTransition = atn.states[getState()]!.transition(0) as! RuleTransition
setState(ruleTransition.followState.stateNumber)
}

View File

@ -13,21 +13,20 @@
///
public class ProxyErrorListener: ANTLRErrorListener {
private final var delegates: Array<ANTLRErrorListener>
public init(_ delegates: Array<ANTLRErrorListener>) {
private final var delegates: [ANTLRErrorListener]
public init(_ delegates: [ANTLRErrorListener]) {
self.delegates = delegates
}
public func syntaxError<T:ATNSimulator>(_ recognizer: Recognizer<T>,
_ offendingSymbol: AnyObject?,
_ line: Int,
_ charPositionInLine: Int,
_ msg: String,
_ e: AnyObject?)
{
for listener: ANTLRErrorListener in delegates {
public func syntaxError<T>(_ recognizer: Recognizer<T>,
_ offendingSymbol: AnyObject?,
_ line: Int,
_ charPositionInLine: Int,
_ msg: String,
_ e: AnyObject?)
{
for listener in delegates {
listener.syntaxError(recognizer, offendingSymbol, line, charPositionInLine, msg, e)
}
}
@ -39,9 +38,9 @@ public class ProxyErrorListener: ANTLRErrorListener {
_ stopIndex: Int,
_ exact: Bool,
_ ambigAlts: BitSet,
_ configs: ATNConfigSet) throws {
for listener: ANTLRErrorListener in delegates {
try listener.reportAmbiguity(recognizer, dfa, startIndex, stopIndex, exact, ambigAlts, configs)
_ configs: ATNConfigSet) {
for listener in delegates {
listener.reportAmbiguity(recognizer, dfa, startIndex, stopIndex, exact, ambigAlts, configs)
}
}
@ -51,9 +50,9 @@ public class ProxyErrorListener: ANTLRErrorListener {
_ startIndex: Int,
_ stopIndex: Int,
_ conflictingAlts: BitSet?,
_ configs: ATNConfigSet) throws {
for listener: ANTLRErrorListener in delegates {
try listener.reportAttemptingFullContext(recognizer, dfa, startIndex, stopIndex, conflictingAlts, configs)
_ configs: ATNConfigSet) {
for listener in delegates {
listener.reportAttemptingFullContext(recognizer, dfa, startIndex, stopIndex, conflictingAlts, configs)
}
}
@ -63,9 +62,9 @@ public class ProxyErrorListener: ANTLRErrorListener {
_ startIndex: Int,
_ stopIndex: Int,
_ prediction: Int,
_ configs: ATNConfigSet) throws {
for listener: ANTLRErrorListener in delegates {
try listener.reportContextSensitivity(recognizer, dfa, startIndex, stopIndex, prediction, configs)
_ configs: ATNConfigSet) {
for listener in delegates {
listener.reportContextSensitivity(recognizer, dfa, startIndex, stopIndex, prediction, configs)
}
}
}

View File

@ -11,16 +11,15 @@
/// and what kind of problem occurred.
///
public class RecognitionException<T:ATNSimulator> {
public class RecognitionException {
///
/// The _org.antlr.v4.runtime.Recognizer_ where this exception originated.
///
private final var recognizer: Recognizer<T>?
//Recognizer<AnyObject,ATNSimulator>? ;
private final var recognizer: RecognizerProtocol?
private final var ctx: RuleContext?
private final weak var ctx: RuleContext?
private final var input: IntStream
private final var input: IntStream?
///
/// The current _org.antlr.v4.runtime.Token_ when an error occurred. Since not all streams
@ -29,28 +28,18 @@ public class RecognitionException<T:ATNSimulator> {
///
private var offendingToken: Token!
private var offendingState: Int = -1
private var offendingState = -1
public var message: String?
public init(_ recognizer: Recognizer<T>?,
_ input: IntStream,
_ ctx: ParserRuleContext?) {
self.recognizer = recognizer
self.input = input
self.ctx = ctx
if let recognizer = recognizer {
self.offendingState = recognizer.getState()
}
}
public init(_ message: String,
_ recognizer: Recognizer<T>?,
public init(_ recognizer: RecognizerProtocol?,
_ input: IntStream,
_ ctx: ParserRuleContext?) {
self.message = message
_ ctx: ParserRuleContext? = nil,
_ message: String? = nil) {
self.recognizer = recognizer
self.input = input
self.ctx = ctx
self.message = message
if let recognizer = recognizer {
self.offendingState = recognizer.getState()
}
@ -87,7 +76,6 @@ public class RecognitionException<T:ATNSimulator> {
if let recognizer = recognizer {
return try? recognizer.getATN().getExpectedTokens(offendingState, ctx!)
}
return nil
}
@ -113,10 +101,13 @@ public class RecognitionException<T:ATNSimulator> {
/// where this exception was thrown, or `null` if the stream is not
/// available.
///
public func getInputStream() -> IntStream {
public func getInputStream() -> IntStream? {
return input
}
public func clearInputStream() {
input = nil
}
public func getOffendingToken() -> Token {
return offendingToken
@ -134,7 +125,11 @@ public class RecognitionException<T:ATNSimulator> {
/// - Returns: The recognizer where this exception occurred, or `null` if
/// the recognizer is not available.
///
public func getRecognizer() -> Recognizer<T>? {
public func getRecognizer() -> RecognizerProtocol? {
return recognizer
}
public func clearRecognizer() {
self.recognizer = nil
}
}

View File

@ -5,60 +5,53 @@
import Foundation
open class Recognizer<ATNInterpreter:ATNSimulator> {
//public static let EOF: Int = -1
public protocol RecognizerProtocol {
func getATN() -> ATN
func getGrammarFileName() -> String
func getParseInfo() -> ParseInfo?
func getRuleNames() -> [String]
func getSerializedATN() -> String
func getState() -> Int
func getTokenType(_ tokenName: String) -> Int
func getVocabulary() -> Vocabulary
}
open class Recognizer<ATNInterpreter: ATNSimulator>: RecognizerProtocol {
//TODO: WeakKeyDictionary NSMapTable Dictionary MapTable<Vocabulary,HashMap<String, Int>>
private let tokenTypeMapCache = HashMap<Vocabulary,Dictionary<String, Int>>()
private let tokenTypeMapCache = HashMap<Vocabulary, [String : Int]>()
private let ruleIndexMapCache = HashMap<ArrayWrapper<String>,Dictionary<String, Int>>()
private var _listeners: Array<ANTLRErrorListener> = [ConsoleErrorListener.INSTANCE]
private let ruleIndexMapCache = HashMap<ArrayWrapper<String>, [String : Int]>()
private var _listeners: [ANTLRErrorListener] = [ConsoleErrorListener.INSTANCE]
public var _interp: ATNInterpreter!
private var _stateNumber: Int = -1
private var _stateNumber = -1
///
/// mutex for tokenTypeMapCache updates
///
private var tokenTypeMapCacheMutex = Mutex()
private let tokenTypeMapCacheMutex = Mutex()
///
/// mutex for ruleIndexMapCacheMutex updates
///
private var ruleIndexMapCacheMutex = Mutex()
/// Used to print out token names like ID during debugging and
/// error reporting. The generated parsers implement a method
/// that overrides this to point to their String[] tokenNames.
///
/// Use _#getVocabulary()_ instead.
///
///
/// /@Deprecated
///
open func getTokenNames() -> [String?]? {
RuntimeException(#function + " must be overridden")
return []
}
private let ruleIndexMapCacheMutex = Mutex()
open func getRuleNames() -> [String] {
RuntimeException(#function + " must be overridden")
return []
fatalError(#function + " must be overridden")
}
///
///
/// Get the vocabulary used by the recognizer.
///
/// - Returns: A _org.antlr.v4.runtime.Vocabulary_ instance providing information about the
/// vocabulary used by the grammar.
///
open func getVocabulary() -> Vocabulary {
return Vocabulary.fromTokenNames(getTokenNames())
fatalError(#function + " must be overridden")
}
///
@ -66,35 +59,29 @@ open class Recognizer<ATNInterpreter:ATNSimulator> {
///
/// Used for XPath and tree pattern compilation.
///
public func getTokenTypeMap() -> Dictionary<String, Int> {
let vocabulary: Vocabulary = getVocabulary()
var result: Dictionary<String, Int>? = self.tokenTypeMapCache[vocabulary]
tokenTypeMapCacheMutex.synchronized {
[unowned self] in
public func getTokenTypeMap() -> [String : Int] {
let vocabulary = getVocabulary()
var result = tokenTypeMapCache[vocabulary]
tokenTypeMapCacheMutex.synchronized { [unowned self] in
if result == nil {
result = Dictionary<String, Int>()
result = [String : Int]()
let length = self.getATN().maxTokenType
for i in 0...length {
let literalName: String? = vocabulary.getLiteralName(i)
if literalName != nil {
result![literalName!] = i
if let literalName = vocabulary.getLiteralName(i) {
result![literalName] = i
}
let symbolicName: String? = vocabulary.getSymbolicName(i)
if symbolicName != nil {
result![symbolicName!] = i
if let symbolicName = vocabulary.getSymbolicName(i) {
result![symbolicName] = i
}
}
result!["EOF"] = CommonToken.EOF
//TODO Result Collections.unmodifiableMap
self.tokenTypeMapCache[vocabulary] = result!
}
}
return result!
}
///
@ -102,26 +89,20 @@ open class Recognizer<ATNInterpreter:ATNSimulator> {
///
/// Used for XPath and tree pattern compilation.
///
public func getRuleIndexMap() -> Dictionary<String, Int> {
let ruleNames: [String] = getRuleNames()
public func getRuleIndexMap() -> [String : Int] {
let ruleNames = getRuleNames()
let result: Dictionary<String, Int>? = self.ruleIndexMapCache[ArrayWrapper<String>(ruleNames)]
ruleIndexMapCacheMutex.synchronized {
[unowned self] in
let result = ruleIndexMapCache[ArrayWrapper<String>(ruleNames)]
ruleIndexMapCacheMutex.synchronized { [unowned self] in
if result == nil {
self.ruleIndexMapCache[ArrayWrapper<String>(ruleNames)] = Utils.toMap(ruleNames)
}
}
return result!
}
public func getTokenType(_ tokenName: String) -> Int {
let ttype: Int? = getTokenTypeMap()[tokenName]
if ttype != nil {
return ttype!
}
return CommonToken.INVALID_TYPE
return getTokenTypeMap()[tokenName] ?? CommonToken.INVALID_TYPE
}
///
@ -132,16 +113,14 @@ open class Recognizer<ATNInterpreter:ATNSimulator> {
/// created the interpreter from it.
///
open func getSerializedATN() -> String {
RuntimeException("there is no serialized ATN")
fatalError()
fatalError("there is no serialized ATN")
}
/// For debugging and other purposes, might want the grammar name.
/// Have ANTLR generate an implementation for this method.
///
open func getGrammarFileName() -> String {
RuntimeException(#function + " must be overridden")
return ""
fatalError(#function + " must be overridden")
}
///
@ -150,8 +129,7 @@ open class Recognizer<ATNInterpreter:ATNSimulator> {
/// - Returns: The _org.antlr.v4.runtime.atn.ATN_ used by the recognizer for prediction.
///
open func getATN() -> ATN {
RuntimeException(#function + " must be overridden")
fatalError()
fatalError(#function + " must be overridden")
}
///
@ -185,54 +163,14 @@ open class Recognizer<ATNInterpreter:ATNSimulator> {
///
/// What is the error header, normally line/character position information?
///
open func getErrorHeader(_ e: AnyObject) -> String {
let line: Int = (e as! RecognitionException).getOffendingToken().getLine()
let charPositionInLine: Int = (e as! RecognitionException).getOffendingToken().getCharPositionInLine()
return "line " + String(line) + ":" + String(charPositionInLine)
open func getErrorHeader(_ e: RecognitionException) -> String {
let offending = e.getOffendingToken()
let line = offending.getLine()
let charPositionInLine = offending.getCharPositionInLine()
return "line \(line):\(charPositionInLine)"
}
/// How should a token be displayed in an error message? The default
/// is to display just the text, but during development you might
/// want to have a lot of information spit out. Override in that case
/// to use t.toString() (which, for CommonToken, dumps everything about
/// the token). This is better than forcing you to override a method in
/// your token objects because you don't have to go modify your lexer
/// so that it creates a new Java type.
///
/// This method is not called by the ANTLR 4 Runtime. Specific
/// implementations of _org.antlr.v4.runtime.ANTLRErrorStrategy_ may provide a similar
/// feature when necessary. For example, see
/// _org.antlr.v4.runtime.DefaultErrorStrategy#getTokenErrorDisplay_.
///
///
/// /@Deprecated
///
open func getTokenErrorDisplay(_ t: Token?) -> String {
guard let t = t else {
return "<no token>"
}
var s: String
if let text = t.getText() {
s = text
} else {
if t.getType() == CommonToken.EOF {
s = "<EOF>"
} else {
s = "<\(t.getType())>"
}
}
s = s.replacingOccurrences(of: "\n", with: "\\n")
s = s.replacingOccurrences(of: "\r", with: "\\r")
s = s.replacingOccurrences(of: "\t", with: "\\t")
return "\(s)"
}
///
/// - Throws: ANTLRError.nullPointer if `listener` is `null`.
///
open func addErrorListener(_ listener: ANTLRErrorListener) {
_listeners.append(listener)
}
@ -240,16 +178,13 @@ open class Recognizer<ATNInterpreter:ATNSimulator> {
_listeners = _listeners.filter() {
$0 !== listener
}
// _listeners.removeObject(listener);
}
open func removeErrorListeners() {
_listeners.removeAll()
}
open func getErrorListeners() -> Array<ANTLRErrorListener> {
open func getErrorListeners() -> [ANTLRErrorListener] {
return _listeners
}
@ -263,7 +198,7 @@ open class Recognizer<ATNInterpreter:ATNSimulator> {
return true
}
open func precpred(_ localctx: RuleContext?, _ precedence: Int) throws -> Bool {
open func precpred(_ localctx: RuleContext?, _ precedence: Int) -> Bool {
return true
}
@ -288,22 +223,18 @@ open class Recognizer<ATNInterpreter:ATNSimulator> {
}
open func getInputStream() -> IntStream? {
RuntimeException(#function + "Must be overridden")
fatalError()
fatalError(#function + " must be overridden")
}
open func setInputStream(_ input: IntStream) throws {
RuntimeException(#function + "Must be overridden")
fatalError(#function + " must be overridden")
}
open func getTokenFactory() -> TokenFactory {
RuntimeException(#function + "Must be overridden")
fatalError()
fatalError(#function + " must be overridden")
}
open func setTokenFactory(_ input: TokenFactory) {
RuntimeException(#function + "Must be overridden")
fatalError(#function + " must be overridden")
}
}

View File

@ -56,17 +56,18 @@
///
open class RuleContext: RuleNode {
public static let EMPTY: ParserRuleContext = ParserRuleContext()
public static let EMPTY = ParserRuleContext()
/// What context invoked this rule?
public var parent: RuleContext?
public weak var parent: RuleContext?
/// What state invoked the rule associated with this context?
/// The "return address" is the followState of invokingState
/// If parent is null, this should be -1 this context object represents
/// the start rule.
///
public var invokingState: Int = -1
public var invokingState = -1
override
public init() {
super.init()
@ -79,7 +80,7 @@ open class RuleContext: RuleNode {
}
open func depth() -> Int {
var n: Int = 0
var n = 0
var p: RuleContext? = self
while let pWrap = p {
p = pWrap.parent
@ -131,7 +132,7 @@ open class RuleContext: RuleNode {
return ""
}
let builder: StringBuilder = StringBuilder()
let builder = StringBuilder()
for i in 0..<length {
builder.append((getChild(i) as! ParseTree).getText())
}
@ -159,54 +160,10 @@ open class RuleContext: RuleNode {
return visitor.visitChildren(self)
}
// /// Call this method to view a parse tree in a dialog box visually.
// public func inspect(parser : Parser) -> Future<JDialog> {
// var ruleNames : Array<String> = parser != nil ? Arrays.asList(parser.getRuleNames()) : null;
// return inspect(ruleNames);
// }
//
// public func inspect(ruleNames : Array<String>) -> Future<JDialog> {
// var viewer : TreeViewer = TreeViewer(ruleNames, self);
// return viewer.open();
// }
//
// /// Save this tree in a postscript file
// public func save(parser : Parser, _ fileName : String)
// throws; IOException, PrintException
// {
// var ruleNames : Array<String> = parser != nil ? Arrays.asList(parser.getRuleNames()) : null;
// save(ruleNames, fileName);
// }
//
// /// Save this tree in a postscript file using a particular font name and size
// public func save(parser : Parser, _ fileName : String,
// _ fontName : String, _ fontSize : Int)
// throws; IOException
// {
// var ruleNames : Array<String> = parser != nil ? Arrays.asList(parser.getRuleNames()) : null;
// save(ruleNames, fileName, fontName, fontSize);
// }
//
// /// Save this tree in a postscript file
// public func save(ruleNames : Array<String>, _ fileName : String)
// throws; IOException, PrintException
// {
// Trees.writePS(self, ruleNames, fileName);
// }
//
// /// Save this tree in a postscript file using a particular font name and size
// public func save(ruleNames : Array<String>, _ fileName : String,
// _ fontName : String, _ fontSize : Int)
// throws; IOException
// {
// Trees.writePS(self, ruleNames, fileName, fontName, fontSize);
// }
/// Print out a whole tree, not just a node, in LISP format
/// (root child1 .. childN). Print just a node if this is a leaf.
/// We have to know the recognizer so we can get rule names.
///
///
open override func toStringTree(_ recog: Parser) -> String {
return Trees.toStringTree(self, recog)
}
@ -214,54 +171,51 @@ open class RuleContext: RuleNode {
/// Print out a whole tree, not just a node, in LISP format
/// (root child1 .. childN). Print just a node if this is a leaf.
///
public func toStringTree(_ ruleNames: Array<String>?) -> String {
public func toStringTree(_ ruleNames: [String]?) -> String {
return Trees.toStringTree(self, ruleNames)
}
open override func toStringTree() -> String {
let info: Array<String>? = nil
return toStringTree(info)
return toStringTree(nil)
}
open override var description: String {
let p1: Array<String>? = nil
let p2: RuleContext? = nil
return toString(p1, p2)
return toString(nil, nil)
}
open override var debugDescription: String {
return description
}
public final func toString<T:ATNSimulator>(_ recog: Recognizer<T>) -> String {
public final func toString<T>(_ recog: Recognizer<T>) -> String {
return toString(recog, ParserRuleContext.EMPTY)
}
public final func toString(_ ruleNames: Array<String>) -> String {
public final func toString(_ ruleNames: [String]) -> String {
return toString(ruleNames, nil)
}
// recog null unless ParserRuleContext, in which case we use subclass toString(...)
open func toString<T:ATNSimulator>(_ recog: Recognizer<T>?, _ stop: RuleContext) -> String {
let ruleNames: [String]? = recog != nil ? recog!.getRuleNames() : nil
let ruleNamesList: Array<String>? = ruleNames ?? nil
return toString(ruleNamesList, stop)
open func toString<T>(_ recog: Recognizer<T>?, _ stop: RuleContext) -> String {
let ruleNames = recog?.getRuleNames()
return toString(ruleNames, stop)
}
open func toString(_ ruleNames: Array<String>?, _ stop: RuleContext?) -> String {
let buf: StringBuilder = StringBuilder()
open func toString(_ ruleNames: [String]?, _ stop: RuleContext?) -> String {
let buf = StringBuilder()
var p: RuleContext? = self
buf.append("[")
while let pWrap = p , pWrap !== stop {
if ruleNames == nil {
while let pWrap = p, pWrap !== stop {
if let ruleNames = ruleNames {
let ruleIndex = pWrap.getRuleIndex()
let ruleIndexInRange = (ruleIndex >= 0 && ruleIndex < ruleNames.count)
let ruleName = (ruleIndexInRange ? ruleNames[ruleIndex] : String(ruleIndex))
buf.append(ruleName)
}
else {
if !pWrap.isEmpty() {
buf.append(pWrap.invokingState)
}
} else {
let ruleIndex: Int = pWrap.getRuleIndex()
let ruleIndexInRange: Bool = ruleIndex >= 0 && ruleIndex < ruleNames!.count
let ruleName: String = ruleIndexInRange ? ruleNames![ruleIndex] : String(ruleIndex)
buf.append(ruleName)
}
if pWrap.parent != nil && (ruleNames != nil || !pWrap.parent!.isEmpty()) {

View File

@ -98,5 +98,7 @@ public protocol Token: class, CustomStringConvertible {
///
func getInputStream() -> CharStream?
func getTokenSourceAndStream() -> TokenSourceAndStream
var visited: Bool { get set }
}

View File

@ -8,7 +8,6 @@
/// the error handling strategy (to create missing tokens). Notifying the parser
/// of a new factory means that it notifies it's token source and error strategy.
///
public protocol TokenFactory {
//typealias Symbol
@ -16,10 +15,33 @@ public protocol TokenFactory {
/// error handling strategy. If text!=null, than the start and stop positions
/// are wiped to -1 in the text override is set in the CommonToken.
///
func create(_ source: (TokenSource?, CharStream?), _ type: Int, _ text: String?,
func create(_ source: TokenSourceAndStream, _ type: Int, _ text: String?,
_ channel: Int, _ start: Int, _ stop: Int,
_ line: Int, _ charPositionInLine: Int) -> Token
/// Generically useful
func create(_ type: Int, _ text: String) -> Token
}
/**
Holds the references to the TokenSource and CharStream used to create a Token.
These are together to reduce memory footprint by having one instance of
TokenSourceAndStream shared across many tokens. The references here are weak
to avoid retain cycles.
*/
public class TokenSourceAndStream {
///
/// An empty TokenSourceAndStream which is used as the default value of
/// _#source_ for tokens that do not have a source.
///
public static let EMPTY = TokenSourceAndStream()
public weak var tokenSource: TokenSource?
public weak var stream: CharStream?
public init(_ tokenSource: TokenSource? = nil, _ stream: CharStream? = nil) {
self.tokenSource = tokenSource
self.stream = stream
}
}

View File

@ -63,7 +63,6 @@ public protocol TokenStream: IntStream {
///
/// - Parameter interval: The interval of tokens within this stream to get text
/// for.
/// - Throws: ANTLRError.nullPointer if `interval` is `null`
/// - Returns: The text of all tokens within the specified interval in this
/// stream.
///

View File

@ -0,0 +1,385 @@
/*
* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.
* Use of this file is governed by the BSD 3-clause license that
* can be found in the LICENSE.txt file in the project root.
*/
import Foundation
/** Do not buffer up the entire char stream. It does keep a small buffer
* for efficiency and also buffers while a mark exists (set by the
* lookahead prediction in parser). "Unbuffered" here refers to fact
* that it doesn't buffer all data, not that's it's on demand loading of char.
*
* Before 4.7, this class used the default environment encoding to convert
* bytes to UTF-16, and held the UTF-16 bytes in the buffer as chars.
*
* As of 4.7, the class uses UTF-8 by default, and the buffer holds Unicode
* code points in the buffer as ints.
*/
open class UnbufferedCharStream: CharStream {
/**
* A moving window buffer of the data being scanned. While there's a marker,
* we keep adding to buffer. Otherwise, {@link #consume consume()} resets so
* we start filling at index 0 again.
*/
internal var data: [Int]
/**
* The number of characters currently in {@link #data data}.
*
* <p>This is not the buffer capacity, that's {@code data.length}.</p>
*/
internal var n = 0
/**
* 0..n-1 index into {@link #data data} of next character.
*
* <p>The {@code LA(1)} character is {@code data[p]}. If {@code p == n}, we are
* out of buffered characters.</p>
*/
internal var p = 0
/**
* Count up with {@link #mark mark()} and down with
* {@link #release release()}. When we {@code release()} the last mark,
* {@code numMarkers} reaches 0 and we reset the buffer. Copy
* {@code data[p]..data[n-1]} to {@code data[0]..data[(n-1)-p]}.
*/
internal var numMarkers = 0
/**
* This is the {@code LA(-1)} character for the current position.
*/
internal var lastChar = -1
/**
* When {@code numMarkers > 0}, this is the {@code LA(-1)} character for the
* first character in {@link #data data}. Otherwise, this is unspecified.
*/
internal var lastCharBufferStart = 0
/**
* Absolute character index. It's the index of the character about to be
* read via {@code LA(1)}. Goes from 0 to the number of characters in the
* entire stream, although the stream size is unknown before the end is
* reached.
*/
internal var currentCharIndex = 0
internal let input: InputStream
private var unicodeIterator: UnicodeScalarStreamIterator
/** The name or source of this char stream. */
public var name: String = ""
public init(_ input: InputStream, _ bufferSize: Int = 256) {
self.input = input
self.data = [Int](repeating: 0, count: bufferSize)
let si = UInt8StreamIterator(input)
self.unicodeIterator = UnicodeScalarStreamIterator(si)
}
public func consume() throws {
if try LA(1) == CommonToken.EOF {
throw ANTLRError.illegalState(msg: "cannot consume EOF")
}
// buf always has at least data[p==0] in this method due to ctor
lastChar = data[p] // track last char for LA(-1)
if p == n - 1 && numMarkers == 0 {
n = 0
p = -1 // p++ will leave this at 0
lastCharBufferStart = lastChar
}
p += 1
currentCharIndex += 1
sync(1)
}
/**
* Make sure we have 'need' elements from current position {@link #p p}.
* Last valid {@code p} index is {@code data.length-1}. {@code p+need-1} is
* the char index 'need' elements ahead. If we need 1 element,
* {@code (p+1-1)==p} must be less than {@code data.length}.
*/
internal func sync(_ want: Int) {
let need = (p + want - 1) - n + 1 // how many more elements we need?
if need > 0 {
fill(need)
}
}
/**
* Add {@code n} characters to the buffer. Returns the number of characters
* actually added to the buffer. If the return value is less than {@code n},
* then EOF was reached before {@code n} characters could be added.
*/
@discardableResult internal func fill(_ toAdd: Int) -> Int {
for i in 0 ..< toAdd {
if n > 0 && data[n - 1] == CommonToken.EOF {
return i
}
guard let c = nextChar() else {
return i
}
add(c)
}
return n
}
/**
* Override to provide different source of characters than
* {@link #input input}.
*/
internal func nextChar() -> Int? {
if let next = unicodeIterator.next() {
return Int(next.value)
}
else if unicodeIterator.hasErrorOccurred {
return nil
}
else {
return nil
}
}
internal func add(_ c: Int) {
if n >= data.count {
data += [Int](repeating: 0, count: data.count)
}
data[n] = c
n += 1
}
public func LA(_ i: Int) throws -> Int {
let result = try LA_(i)
print("LA(\(i)) -> \(result)")
return result
}
private func LA_(_ i: Int) throws -> Int {
if i == -1 {
return lastChar // special case
}
sync(i)
let index = p + i - 1
if index < 0 {
throw ANTLRError.indexOutOfBounds(msg: "")
}
if index >= n {
return CommonToken.EOF
}
return data[index]
}
/**
* Return a marker that we can release later.
*
* <p>The specific marker value used for this class allows for some level of
* protection against misuse where {@code seek()} is called on a mark or
* {@code release()} is called in the wrong order.</p>
*/
public func mark() -> Int {
if numMarkers == 0 {
lastCharBufferStart = lastChar
}
let mark = -numMarkers - 1
numMarkers += 1
return mark
}
/** Decrement number of markers, resetting buffer if we hit 0.
* @param marker
*/
public func release(_ marker: Int) throws {
let expectedMark = -numMarkers
if marker != expectedMark {
preconditionFailure("release() called with an invalid marker.")
}
numMarkers -= 1
if numMarkers == 0 && p > 0 {
// release buffer when we can, but don't do unnecessary work
// Copy data[p]..data[n-1] to data[0]..data[(n-1)-p], reset ptrs
// p is last valid char; move nothing if p==n as we have no valid char
let dataCapacity = data.capacity
data = Array(data[p ..< n])
data += [Int](repeating: 0, count: dataCapacity - (n - p))
precondition(data.capacity == dataCapacity)
n = n - p
p = 0
lastCharBufferStart = lastChar
}
}
public func index() -> Int {
return currentCharIndex
}
/** Seek to absolute character index, which might not be in the current
* sliding window. Move {@code p} to {@code index-bufferStartIndex}.
*/
public func seek(_ index_: Int) throws {
var index = index_
if index == currentCharIndex {
return
}
if index > currentCharIndex {
sync(index - currentCharIndex)
index = min(index, getBufferStartIndex() + n - 1)
}
// index == to bufferStartIndex should set p to 0
let i = index - getBufferStartIndex()
if i < 0 {
throw ANTLRError.illegalArgument(msg: "cannot seek to negative index \(index)")
}
else if i >= n {
let si = getBufferStartIndex()
let ei = si + n
let msg = "seek to index outside buffer: \(index) not in \(si)..\(ei)"
throw ANTLRError.unsupportedOperation(msg: msg)
}
p = i
currentCharIndex = index
if p == 0 {
lastChar = lastCharBufferStart
}
else {
lastChar = data[p - 1]
}
}
public func size() -> Int {
preconditionFailure("Unbuffered stream cannot know its size")
}
public func getSourceName() -> String {
return name
}
public func getText(_ interval: Interval) throws -> String {
if interval.a < 0 || interval.b < interval.a - 1 {
throw ANTLRError.illegalArgument(msg: "invalid interval")
}
let bufferStartIndex = getBufferStartIndex()
if n > 0 &&
data[n - 1] == CommonToken.EOF &&
interval.a + interval.length() > bufferStartIndex + n {
throw ANTLRError.illegalArgument(msg: "the interval extends past the end of the stream")
}
if interval.a < bufferStartIndex || interval.b >= bufferStartIndex + n {
let msg = "interval \(interval) outside buffer: \(bufferStartIndex)...\(bufferStartIndex + n - 1)"
throw ANTLRError.unsupportedOperation(msg: msg)
}
if interval.b < interval.a {
// The EOF token.
return ""
}
// convert from absolute to local index
let i = interval.a - bufferStartIndex
let j = interval.b - bufferStartIndex
// Convert from Int codepoints to a String.
let codepoints = data[i ... j].map { Character(Unicode.Scalar($0)!) }
return String(codepoints)
}
internal func getBufferStartIndex() -> Int {
return currentCharIndex - p
}
}
fileprivate struct UInt8StreamIterator: IteratorProtocol {
private static let bufferSize = 1024
private let stream: InputStream
private var buffer = [UInt8](repeating: 0, count: UInt8StreamIterator.bufferSize)
private var buffGen: IndexingIterator<ArraySlice<UInt8>>
var hasErrorOccurred = false
init(_ stream: InputStream) {
self.stream = stream
self.buffGen = buffer[0..<0].makeIterator()
}
mutating func next() -> UInt8? {
if let result = buffGen.next() {
return result
}
if hasErrorOccurred {
return nil
}
switch stream.streamStatus {
case .notOpen, .writing, .closed:
preconditionFailure()
case .atEnd:
return nil
case .error:
hasErrorOccurred = true
return nil
case .opening, .open, .reading:
break
}
let count = stream.read(&buffer, maxLength: buffer.capacity)
if count <= 0 {
hasErrorOccurred = true
return nil
}
buffGen = buffer.prefix(count).makeIterator()
return buffGen.next()
}
}
fileprivate struct UnicodeScalarStreamIterator: IteratorProtocol {
private var streamIterator: UInt8StreamIterator
private var codec = Unicode.UTF8()
var hasErrorOccurred = false
init(_ streamIterator: UInt8StreamIterator) {
self.streamIterator = streamIterator
}
mutating func next() -> Unicode.Scalar? {
if streamIterator.hasErrorOccurred {
hasErrorOccurred = true
return nil
}
switch codec.decode(&streamIterator) {
case .scalarValue(let scalar):
return scalar
case .emptyInput:
return nil
case .error:
hasErrorOccurred = true
return nil
}
}
}

View File

@ -4,7 +4,7 @@
*/
public class UnbufferedTokenStream<T>: TokenStream {
public class UnbufferedTokenStream: TokenStream {
internal var tokenSource: TokenSource
///
@ -274,10 +274,7 @@ public class UnbufferedTokenStream<T>: TokenStream {
public func size() -> Int {
RuntimeException("Unbuffered stream cannot know its size")
fatalError()
fatalError("Unbuffered stream cannot know its size")
}

View File

@ -73,9 +73,9 @@ public class ATN {
/// the rule surrounding `s`. In other words, the set will be
/// restricted to tokens reachable staying within `s`'s rule.
///
public func nextTokens(_ s: ATNState, _ ctx: RuleContext?)throws -> IntervalSet {
let anal: LL1Analyzer = LL1Analyzer(self)
let next: IntervalSet = try anal.LOOK(s, ctx)
public func nextTokens(_ s: ATNState, _ ctx: RuleContext?) -> IntervalSet {
let anal = LL1Analyzer(self)
let next = anal.LOOK(s, ctx)
return next
}
@ -84,14 +84,14 @@ public class ATN {
/// staying in same rule. _org.antlr.v4.runtime.Token#EPSILON_ is in set if we reach end of
/// rule.
///
public func nextTokens(_ s: ATNState) throws -> IntervalSet {
public func nextTokens(_ s: ATNState) -> IntervalSet {
if let nextTokenWithinRule = s.nextTokenWithinRule
{
return nextTokenWithinRule
}
let intervalSet = try nextTokens(s, nil)
let intervalSet = nextTokens(s, nil)
s.nextTokenWithinRule = intervalSet
try intervalSet.setReadonly(true)
try! intervalSet.setReadonly(true)
return intervalSet
}
@ -151,27 +151,27 @@ public class ATN {
}
var ctx: RuleContext? = context
let s: ATNState = states[stateNumber]!
var following: IntervalSet = try nextTokens(s)
let s = states[stateNumber]!
var following = nextTokens(s)
if !following.contains(CommonToken.EPSILON) {
return following
}
let expected: IntervalSet = try IntervalSet()
try expected.addAll(following)
try expected.remove(CommonToken.EPSILON)
let expected = IntervalSet()
try! expected.addAll(following)
try! expected.remove(CommonToken.EPSILON)
while let ctxWrap = ctx , ctxWrap.invokingState >= 0 && following.contains(CommonToken.EPSILON) {
let invokingState: ATNState = states[ctxWrap.invokingState]!
let rt: RuleTransition = invokingState.transition(0) as! RuleTransition
following = try nextTokens(rt.followState)
try expected.addAll(following)
try expected.remove(CommonToken.EPSILON)
while let ctxWrap = ctx, ctxWrap.invokingState >= 0 && following.contains(CommonToken.EPSILON) {
let invokingState = states[ctxWrap.invokingState]!
let rt = invokingState.transition(0) as! RuleTransition
following = nextTokens(rt.followState)
try! expected.addAll(following)
try! expected.remove(CommonToken.EPSILON)
ctx = ctxWrap.parent
}
if following.contains(CommonToken.EPSILON) {
try expected.add(CommonToken.EOF)
try! expected.add(CommonToken.EOF)
}
return expected

View File

@ -150,13 +150,12 @@ public class ATNConfig: Hashable, CustomStringConvertible {
///
public var hashValue: Int {
var hashCode: Int = MurmurHash.initialize(7)
var hashCode = MurmurHash.initialize(7)
hashCode = MurmurHash.update(hashCode, state.stateNumber)
hashCode = MurmurHash.update(hashCode, alt)
hashCode = MurmurHash.update(hashCode, context)
hashCode = MurmurHash.update(hashCode, semanticContext)
hashCode = MurmurHash.finish(hashCode, 4)
return hashCode
return MurmurHash.finish(hashCode, 4)
}
@ -167,7 +166,7 @@ public class ATNConfig: Hashable, CustomStringConvertible {
//return "MyClass \(string)"
return toString(nil, true)
}
public func toString<T:ATNSimulator>(_ recog: Recognizer<T>?, _ showAlt: Bool) -> String {
public func toString<T>(_ recog: Recognizer<T>?, _ showAlt: Bool) -> String {
let buf: StringBuilder = StringBuilder()
buf.append("(")
buf.append(state)

View File

@ -27,7 +27,7 @@ public class ATNConfigSet: Hashable, CustomStringConvertible {
/// fields; in particular, conflictingAlts is set after
/// we've made this readonly.
///
internal final var readonly: Bool = false
internal final var readonly = false
///
/// All configs but hashed by (s, i, _, pi) not including context. Wiped out
@ -38,11 +38,11 @@ public class ATNConfigSet: Hashable, CustomStringConvertible {
///
/// Track the elements as they are added to the set; supports get(i)
///
public final var configs: Array<ATNConfig> = Array<ATNConfig>()
public final var configs = [ATNConfig]()
// TODO: these fields make me pretty uncomfortable but nice to pack up info together, saves recomputation
// TODO: can we track conflicts as they are added to save scanning configs later?
public final var uniqueAlt: Int = 0
public final var uniqueAlt = 0
//TODO no default
///
/// Currently this is only used when we detect SLL conflict; this does
@ -54,9 +54,9 @@ public class ATNConfigSet: Hashable, CustomStringConvertible {
// Used in parser and lexer. In lexer, it indicates we hit a pred
// while computing a closure operation. Don't make a DFA state from this.
public final var hasSemanticContext: Bool = false
public final var hasSemanticContext = false
//TODO no default
public final var dipsIntoOuterContext: Bool = false
public final var dipsIntoOuterContext = false
//TODO no default
///
@ -66,7 +66,7 @@ public class ATNConfigSet: Hashable, CustomStringConvertible {
///
public final var fullCtx: Bool
private var cachedHashCode: Int = -1
private var cachedHashCode = -1
public init(_ fullCtx: Bool) {
configLookup = LookupDictionary()
@ -76,9 +76,9 @@ public class ATNConfigSet: Hashable, CustomStringConvertible {
self.init(true)
}
public convenience init(_ old: ATNConfigSet) throws {
public convenience init(_ old: ATNConfigSet) {
self.init(old.fullCtx)
try addAll(old)
try! addAll(old)
self.uniqueAlt = old.uniqueAlt
self.conflictingAlts = old.conflictingAlts
self.hasSemanticContext = old.hasSemanticContext
@ -108,7 +108,6 @@ public class ATNConfigSet: Hashable, CustomStringConvertible {
_ mergeCache: inout DoubleKeyMap<PredictionContext, PredictionContext, PredictionContext>?) throws -> Bool {
if readonly {
throw ANTLRError.illegalState(msg: "This set is readonly")
}
if config.semanticContext != SemanticContext.NONE {
@ -125,10 +124,9 @@ public class ATNConfigSet: Hashable, CustomStringConvertible {
return true
}
// a previous (s,i,pi,_), merge with it and save result
let rootIsWildcard: Bool = !fullCtx
let rootIsWildcard = !fullCtx
let merged: PredictionContext =
PredictionContext.merge(existing.context!, config.context!, rootIsWildcard, &mergeCache)
let merged = PredictionContext.merge(existing.context!, config.context!, rootIsWildcard, &mergeCache)
// no need to check for existing.context, config.context in cache
// since only way to create new graphs is "call rule" and here. We
@ -154,16 +152,14 @@ public class ATNConfigSet: Hashable, CustomStringConvertible {
///
/// Return a List holding list of configs
///
public final func elements() -> Array<ATNConfig> {
public final func elements() -> [ATNConfig] {
return configs
}
public final func getStates() -> Set<ATNState> {
let length = configs.count
var states: Set<ATNState> = Set<ATNState>(minimumCapacity: length)
for i in 0..<length {
states.insert(configs[i].state)
var states = Set<ATNState>(minimumCapacity: configs.count)
for config in configs {
states.insert(config.state)
}
return states
}
@ -176,21 +172,19 @@ public class ATNConfigSet: Hashable, CustomStringConvertible {
///
/// - since: 4.3
///
public final func getAlts() throws -> BitSet {
let alts: BitSet = BitSet()
let length = configs.count
for i in 0..<length {
try alts.set(configs[i].alt)
public final func getAlts() -> BitSet {
let alts = BitSet()
for config in configs {
try! alts.set(config.alt)
}
return alts
}
public final func getPredicates() -> Array<SemanticContext> {
var preds: Array<SemanticContext> = Array<SemanticContext>()
let length = configs.count
for i in 0..<length {
if configs[i].semanticContext != SemanticContext.NONE {
preds.append(configs[i].semanticContext)
public final func getPredicates() -> [SemanticContext] {
var preds = [SemanticContext]()
for config in configs {
if config.semanticContext != SemanticContext.NONE {
preds.append(config.semanticContext)
}
}
return preds
@ -203,22 +197,20 @@ public class ATNConfigSet: Hashable, CustomStringConvertible {
public final func optimizeConfigs(_ interpreter: ATNSimulator) throws {
if readonly {
throw ANTLRError.illegalState(msg: "This set is readonly")
}
if configLookup.isEmpty {
return
}
let length = configs.count
for i in 0..<length {
configs[i].context = interpreter.getCachedContext(configs[i].context!)
for config in configs {
config.context = interpreter.getCachedContext(config.context!)
}
}
@discardableResult
public final func addAll(_ coll: ATNConfigSet) throws -> Bool {
for c: ATNConfig in coll.configs {
try add(c)
for c in coll.configs {
try add(c)
}
return false
}
@ -238,12 +230,9 @@ public class ATNConfigSet: Hashable, CustomStringConvertible {
private var configsHashValue: Int {
var hashCode = 1
for item in configs {
hashCode = Int.multiplyWithOverflow(3, hashCode).0
hashCode = Int.addWithOverflow(hashCode, item.hashValue).0
hashCode = hashCode &* 3 &+ item.hashValue
}
return hashCode
}
public final var count: Int {
@ -269,7 +258,6 @@ public class ATNConfigSet: Hashable, CustomStringConvertible {
public final func clear() throws {
if readonly {
throw ANTLRError.illegalState(msg: "This set is readonly")
}
configs.removeAll()
cachedHashCode = -1
@ -287,7 +275,7 @@ public class ATNConfigSet: Hashable, CustomStringConvertible {
}
public var description: String {
let buf: StringBuilder = StringBuilder()
let buf = StringBuilder()
buf.append(elements().map({ $0.description }))
if hasSemanticContext {
buf.append(",hasSemanticContext=")
@ -316,18 +304,15 @@ public class ATNConfigSet: Hashable, CustomStringConvertible {
/// return configLookup.toArray(a);
///
private final func configHash(_ stateNumber: Int,_ context: PredictionContext?) -> Int{
var hashCode: Int = MurmurHash.initialize(7)
var hashCode = MurmurHash.initialize(7)
hashCode = MurmurHash.update(hashCode, stateNumber)
hashCode = MurmurHash.update(hashCode, context)
hashCode = MurmurHash.finish(hashCode, 2)
return hashCode
return MurmurHash.finish(hashCode, 2)
}
public final func getConflictingAltSubsets() throws -> Array<BitSet> {
public final func getConflictingAltSubsets() -> [BitSet] {
let length = configs.count
let configToAlts: HashMap<Int, BitSet> = HashMap<Int, BitSet>(count: length)
let configToAlts = HashMap<Int, BitSet>(count: length)
for i in 0..<length {
let hash = configHash(configs[i].state.stateNumber, configs[i].context)
@ -339,15 +324,15 @@ public class ATNConfigSet: Hashable, CustomStringConvertible {
configToAlts[hash] = alts
}
try alts.set(configs[i].alt)
try! alts.set(configs[i].alt)
}
return configToAlts.values
}
public final func getStateToAltMap() throws -> HashMap<ATNState, BitSet> {
public final func getStateToAltMap() -> HashMap<ATNState, BitSet> {
let length = configs.count
let m: HashMap<ATNState, BitSet> = HashMap<ATNState, BitSet>(count: length) //minimumCapacity: length)
let m = HashMap<ATNState, BitSet>(count: length)
for i in 0..<length {
var alts: BitSet
@ -358,42 +343,37 @@ public class ATNConfigSet: Hashable, CustomStringConvertible {
m[configs[i].state] = alts
}
try alts.set(configs[i].alt)
try! alts.set(configs[i].alt)
}
return m
}
//for DFAState
public final func getAltSet() -> Set<Int>? {
var alts: Set<Int> = Set<Int>()
let length = configs.count
for i in 0..<length {
alts.insert(configs[i].alt)
}
if alts.isEmpty {
if configs.isEmpty {
return nil
}
var alts = Set<Int>()
for config in configs {
alts.insert(config.alt)
}
return alts
}
//for DiagnosticErrorListener
public final func getAltBitSet() throws -> BitSet {
let result: BitSet = BitSet()
let length = configs.count
for i in 0..<length {
try result.set(configs[i].alt)
public final func getAltBitSet() -> BitSet {
let result = BitSet()
for config in configs {
try! result.set(config.alt)
}
return result
}
//LexerATNSimulator
public final var firstConfigWithRuleStopState: ATNConfig? {
let length = configs.count
for i in 0..<length {
if configs[i].state is RuleStopState {
return configs[i]
public final var firstConfigWithRuleStopState: ATNConfig? {
for config in configs {
if config.state is RuleStopState {
return config
}
}
@ -402,135 +382,124 @@ public class ATNConfigSet: Hashable, CustomStringConvertible {
//ParserATNSimulator
public final func getUniqueAlt() -> Int {
var alt: Int = ATN.INVALID_ALT_NUMBER
let length = configs.count
for i in 0..<length {
public final func getUniqueAlt() -> Int {
var alt = ATN.INVALID_ALT_NUMBER
for config in configs {
if alt == ATN.INVALID_ALT_NUMBER {
alt = configs[i].alt // found first alt
} else {
if configs[i].alt != alt {
return ATN.INVALID_ALT_NUMBER
}
alt = config.alt // found first alt
} else if config.alt != alt {
return ATN.INVALID_ALT_NUMBER
}
}
return alt
}
public final func removeAllConfigsNotInRuleStopState(_ mergeCache: inout DoubleKeyMap<PredictionContext, PredictionContext, PredictionContext>?,_ lookToEndOfRule: Bool,_ atn: ATN) throws -> ATNConfigSet {
public final func removeAllConfigsNotInRuleStopState(_ mergeCache: inout DoubleKeyMap<PredictionContext, PredictionContext, PredictionContext>?,_ lookToEndOfRule: Bool,_ atn: ATN) -> ATNConfigSet {
if PredictionMode.allConfigsInRuleStopStates(self) {
return self
}
let result: ATNConfigSet = ATNConfigSet(fullCtx)
let length = configs.count
for i in 0..<length {
if configs[i].state is RuleStopState {
try result.add(configs[i],&mergeCache)
let result = ATNConfigSet(fullCtx)
for config in configs {
if config.state is RuleStopState {
try! result.add(config, &mergeCache)
continue
}
if lookToEndOfRule && configs[i].state.onlyHasEpsilonTransitions() {
let nextTokens: IntervalSet = try atn.nextTokens(configs[i].state)
if lookToEndOfRule && config.state.onlyHasEpsilonTransitions() {
let nextTokens = atn.nextTokens(config.state)
if nextTokens.contains(CommonToken.EPSILON) {
let endOfRuleState: ATNState = atn.ruleToStopState[configs[i].state.ruleIndex!]
try result.add(ATNConfig(configs[i], endOfRuleState), &mergeCache)
let endOfRuleState = atn.ruleToStopState[config.state.ruleIndex!]
try! result.add(ATNConfig(config, endOfRuleState), &mergeCache)
}
}
}
return result
}
public final func applyPrecedenceFilter(_ mergeCache: inout DoubleKeyMap<PredictionContext, PredictionContext, PredictionContext>?,_ parser: Parser,_ _outerContext: ParserRuleContext!) throws -> ATNConfigSet {
let configSet: ATNConfigSet = ATNConfigSet(fullCtx)
let length = configs.count
let statesFromAlt1: HashMap<Int, PredictionContext> = HashMap<Int, PredictionContext>(count: length)
for i in 0..<length {
let configSet = ATNConfigSet(fullCtx)
let statesFromAlt1 = HashMap<Int, PredictionContext>(count: configs.count)
for config in configs {
// handle alt 1 first
if configs[i].alt != 1 {
if config.alt != 1 {
continue
}
let updatedContext: SemanticContext? = try configs[i].semanticContext.evalPrecedence(parser, _outerContext)
let updatedContext = try config.semanticContext.evalPrecedence(parser, _outerContext)
if updatedContext == nil {
// the configuration was eliminated
continue
}
statesFromAlt1[configs[i].state.stateNumber] = configs[i].context
if updatedContext != configs[i].semanticContext {
try configSet.add(ATNConfig(configs[i], updatedContext!), &mergeCache)
statesFromAlt1[config.state.stateNumber] = config.context
if updatedContext != config.semanticContext {
try! configSet.add(ATNConfig(config, updatedContext!), &mergeCache)
} else {
try configSet.add(configs[i],&mergeCache)
try! configSet.add(config, &mergeCache)
}
}
for i in 0..<length {
if configs[i].alt == 1 {
for config in configs {
if config.alt == 1 {
// already handled
continue
}
if !configs[i].isPrecedenceFilterSuppressed() {
if !config.isPrecedenceFilterSuppressed() {
///
/// In the future, this elimination step could be updated to also
/// filter the prediction context for alternatives predicting alt>1
/// (basically a graph subtraction algorithm).
///
let context: PredictionContext? = statesFromAlt1[configs[i].state.stateNumber]
if context != nil && context == configs[i].context {
let context = statesFromAlt1[config.state.stateNumber]
if context != nil && context == config.context {
// eliminated
continue
}
}
try configSet.add(configs[i], &mergeCache)
try! configSet.add(config, &mergeCache)
}
return configSet
}
internal func getPredsForAmbigAlts(_ ambigAlts: BitSet,
_ nalts: Int) throws -> [SemanticContext?]? {
var altToPred: [SemanticContext?]? = [SemanticContext?](repeating: nil, count: nalts + 1) //new SemanticContext[nalts + 1];
let length = configs.count
for i in 0..<length {
if try ambigAlts.get(configs[i].alt) {
altToPred![configs[i].alt] = SemanticContext.or(altToPred![configs[i].alt], configs[i].semanticContext)
}
internal func getPredsForAmbigAlts(_ ambigAlts: BitSet, _ nalts: Int) -> [SemanticContext?]? {
var altToPred = [SemanticContext?](repeating: nil, count: nalts + 1)
for config in configs {
if try! ambigAlts.get(config.alt) {
altToPred[config.alt] = SemanticContext.or(altToPred[config.alt], config.semanticContext)
}
var nPredAlts: Int = 0
for i in 1...nalts {
if altToPred![i] == nil {
altToPred![i] = SemanticContext.NONE
} else {
if altToPred![i] != SemanticContext.NONE {
nPredAlts += 1
}
}
}
var nPredAlts = 0
for i in 1...nalts {
if altToPred[i] == nil {
altToPred[i] = SemanticContext.NONE
}
// // Optimize away p||p and p&&p TODO: optimize() was a no-op
// for (int i = 0; i < altToPred.length; i++) {
// altToPred[i] = altToPred[i].optimize();
// }
// nonambig alts are null in altToPred
if nPredAlts == 0 {
altToPred = nil
else if altToPred[i] != SemanticContext.NONE {
nPredAlts += 1
}
}
return altToPred
// // Optimize away p||p and p&&p TODO: optimize() was a no-op
// for (int i = 0; i < altToPred.length; i++) {
// altToPred[i] = altToPred[i].optimize();
// }
// nonambig alts are null in altToPred
return (nPredAlts == 0 ? nil : altToPred)
}
public final func getAltThatFinishedDecisionEntryRule() throws -> Int {
let alts: IntervalSet = try IntervalSet()
let length = configs.count
for i in 0..<length {
if configs[i].getOuterContextDepth() > 0 ||
(configs[i].state is RuleStopState &&
configs[i].context!.hasEmptyPath()) {
try alts.add(configs[i].alt)
public final func getAltThatFinishedDecisionEntryRule() -> Int {
let alts = IntervalSet()
for config in configs {
if config.getOuterContextDepth() > 0 ||
(config.state is RuleStopState &&
config.context!.hasEmptyPath()) {
try! alts.add(config.alt)
}
}
if alts.size() == 0 {
@ -551,39 +520,36 @@ public class ATNConfigSet: Hashable, CustomStringConvertible {
///
public final func splitAccordingToSemanticValidity(
_ outerContext: ParserRuleContext,
_ evalSemanticContext:( SemanticContext,ParserRuleContext,Int,Bool) throws -> Bool) throws -> (ATNConfigSet, ATNConfigSet) {
let succeeded: ATNConfigSet = ATNConfigSet(fullCtx)
let failed: ATNConfigSet = ATNConfigSet(fullCtx)
let length = configs.count
for i in 0..<length {
if configs[i].semanticContext != SemanticContext.NONE {
let predicateEvaluationResult: Bool = try evalSemanticContext(configs[i].semanticContext, outerContext, configs[i].alt,fullCtx)
if predicateEvaluationResult {
try succeeded.add(configs[i])
} else {
try failed.add(configs[i])
}
_ evalSemanticContext: (SemanticContext, ParserRuleContext, Int, Bool) throws -> Bool) rethrows -> (ATNConfigSet, ATNConfigSet) {
let succeeded = ATNConfigSet(fullCtx)
let failed = ATNConfigSet(fullCtx)
for config in configs {
if config.semanticContext != SemanticContext.NONE {
let predicateEvaluationResult = try evalSemanticContext(config.semanticContext, outerContext, config.alt,fullCtx)
if predicateEvaluationResult {
try! succeeded.add(config)
} else {
try succeeded.add(configs[i])
try! failed.add(config)
}
} else {
try! succeeded.add(config)
}
return (succeeded, failed)
}
return (succeeded, failed)
}
//public enum PredictionMode
public final func dupConfigsWithoutSemanticPredicates() throws -> ATNConfigSet {
let dup: ATNConfigSet = ATNConfigSet()
let length = configs.count
for i in 0..<length {
let c = ATNConfig(configs[i], SemanticContext.NONE)
try dup.add(c)
public final func dupConfigsWithoutSemanticPredicates() -> ATNConfigSet {
let dup = ATNConfigSet()
for config in configs {
let c = ATNConfig(config, SemanticContext.NONE)
try! dup.add(c)
}
return dup
}
public final var hasConfigInRuleStopState: Bool {
let length = configs.count
for i in 0..<length {
if configs[i].state is RuleStopState {
for config in configs {
if config.state is RuleStopState {
return true
}
}
@ -592,9 +558,8 @@ public class ATNConfigSet: Hashable, CustomStringConvertible {
}
public final var allConfigsInRuleStopStates: Bool {
let length = configs.count
for i in 0..<length {
if !(configs[i].state is RuleStopState) {
for config in configs {
if !(config.state is RuleStopState) {
return false
}
}

View File

@ -117,26 +117,24 @@ public class ATNDeserializer {
}
var p: Int = 0
let version: Int = data[p].unicodeValue //toInt(data[p++]);
let version = data[p].unicodeValue
p += 1
if version != ATNDeserializer.SERIALIZED_VERSION {
let reason: String = "Could not deserialize ATN with version \(version) (expected \(ATNDeserializer.SERIALIZED_VERSION))."
let reason = "Could not deserialize ATN with version \(version) (expected \(ATNDeserializer.SERIALIZED_VERSION))."
throw ANTLRError.unsupportedOperation(msg: reason)
}
let uuid: UUID = toUUID(data, p)
p += 8
if !ATNDeserializer.SUPPORTED_UUIDS.contains(uuid) {
let reason: String = "Could not deserialize ATN with UUID \(uuid) (expected \(ATNDeserializer.SERIALIZED_UUID) or a legacy UUID)."
let reason = "Could not deserialize ATN with UUID \(uuid) (expected \(ATNDeserializer.SERIALIZED_UUID) or a legacy UUID)."
throw ANTLRError.unsupportedOperation(msg: reason)
}
let supportsPrecedencePredicates: Bool = isFeatureSupported(ATNDeserializer.ADDED_PRECEDENCE_TRANSITIONS, uuid)
let supportsLexerActions: Bool = isFeatureSupported(ATNDeserializer.ADDED_LEXER_ACTIONS, uuid)
let supportsPrecedencePredicates = isFeatureSupported(ATNDeserializer.ADDED_PRECEDENCE_TRANSITIONS, uuid)
let supportsLexerActions = isFeatureSupported(ATNDeserializer.ADDED_LEXER_ACTIONS, uuid)
let grammarType: ATNType = ATNType(rawValue: toInt(data[p]))!
let grammarType = ATNType(rawValue: toInt(data[p]))!
p += 1
let maxTokenType: Int = toInt(data[p])
p += 1
@ -145,12 +143,12 @@ public class ATNDeserializer {
//
// STATES
//
var loopBackStateNumbers: Array<(LoopEndState, Int)> = Array<(LoopEndState, Int)>()
var endStateNumbers: Array<(BlockStartState, Int)> = Array<(BlockStartState, Int)>()
let nstates: Int = toInt(data[p])
var loopBackStateNumbers = [(LoopEndState, Int)]()
var endStateNumbers = [(BlockStartState, Int)]()
let nstates = toInt(data[p])
p += 1
for _ in 0..<nstates {
let stype: Int = toInt(data[p])
let stype = toInt(data[p])
p += 1
// ignore bad type of states
if stype == ATNState.INVALID_TYPE {
@ -158,25 +156,23 @@ public class ATNDeserializer {
continue
}
var ruleIndex: Int = toInt(data[p])
var ruleIndex = toInt(data[p])
p += 1
if ruleIndex == Int.max {
// Character.MAX_VALUE
ruleIndex = -1
}
let s: ATNState = try stateFactory(stype, ruleIndex)!
let s = try stateFactory(stype, ruleIndex)!
if stype == ATNState.LOOP_END {
// special case
let loopBackStateNumber: Int = toInt(data[p])
let loopBackStateNumber = toInt(data[p])
p += 1
loopBackStateNumbers.append((s as! LoopEndState, loopBackStateNumber))
} else {
if let s = s as? BlockStartState {
let endStateNumber: Int = toInt(data[p])
p += 1
endStateNumbers.append(s, endStateNumber)
}
} else if let s = s as? BlockStartState {
let endStateNumber = toInt(data[p])
p += 1
endStateNumbers.append((s, endStateNumber))
}
atn.addState(s)
}
@ -272,12 +268,12 @@ public class ATNDeserializer {
var sets: Array<IntervalSet> = Array<IntervalSet>()
// First, deserialize sets with 16-bit arguments <= U+FFFF.
try readSets(data, &p, &sets, readUnicodeInt)
readSets(data, &p, &sets, readUnicodeInt)
// Next, if the ATN was serialized with the Unicode SMP feature,
// deserialize sets with 32-bit arguments <= U+10FFFF.
if isFeatureSupported(ATNDeserializer.ADDED_UNICODE_SMP, uuid) {
try readSets(data, &p, &sets, readUnicodeInt32)
readSets(data, &p, &sets, readUnicodeInt32)
}
//
@ -548,23 +544,23 @@ public class ATNDeserializer {
return result
}
private func readSets(_ data: [Character], _ p: inout Int, _ sets: inout Array<IntervalSet>, _ readUnicode: ([Character], inout Int) -> Int) throws {
let nsets: Int = toInt(data[p])
private func readSets(_ data: [Character], _ p: inout Int, _ sets: inout Array<IntervalSet>, _ readUnicode: ([Character], inout Int) -> Int) {
let nsets = toInt(data[p])
p += 1
for _ in 0..<nsets {
let nintervals: Int = toInt(data[p])
let nintervals = toInt(data[p])
p += 1
let set: IntervalSet = try IntervalSet()
let set = IntervalSet()
sets.append(set)
let containsEof: Bool = toInt(data[p]) != 0
let containsEof = (toInt(data[p]) != 0)
p += 1
if containsEof {
try set.add(-1)
try! set.add(-1)
}
for _ in 0..<nintervals {
try set.add(readUnicode(data, &p), readUnicode(data, &p))
try! set.add(readUnicode(data, &p), readUnicode(data, &p))
}
}
}
@ -724,27 +720,27 @@ public class ATNDeserializer {
//
// SETS
//
var sets: Array<IntervalSet> = Array<IntervalSet>()
let nsets: Int = dict["nsets"] as! Int
var sets = [IntervalSet]()
let nsets = dict["nsets"] as! Int
let intervalSet = dict["IntervalSet"] as! [Dictionary<String, Any>]
for i in 0..<nsets {
let setBuilder = intervalSet[i]
let nintervals: Int = setBuilder["size"] as! Int
let nintervals = setBuilder["size"] as! Int
let set: IntervalSet = try IntervalSet()
let set = IntervalSet()
sets.append(set)
let containsEof: Bool = (setBuilder["containsEof"] as! Int) != 0
let containsEof = (setBuilder["containsEof"] as! Int) != 0
if containsEof {
try set.add(-1)
try! set.add(-1)
}
let intervalsBuilder = setBuilder["Intervals"] as! [Dictionary<String, Any>]
let intervalsBuilder = setBuilder["Intervals"] as! [[String : Any]]
for j in 0..<nintervals {
let vals = intervalsBuilder[j]
try set.add((vals["a"] as! Int), (vals["b"] as! Int))
try! set.add((vals["a"] as! Int), (vals["b"] as! Int))
}
}
@ -759,15 +755,15 @@ public class ATNDeserializer {
for transitionsBuilder in allTransitions {
for transition in transitionsBuilder {
let src: Int = transition["src"] as! Int
let trg: Int = transition["trg"] as! Int
let ttype: Int = transition["edgeType"] as! Int
let arg1: Int = transition["arg1"] as! Int
let arg2: Int = transition["arg2"] as! Int
let arg3: Int = transition["arg3"] as! Int
let trans: Transition = try edgeFactory(atn, ttype, src, trg, arg1, arg2, arg3, sets)
let src = transition["src"] as! Int
let trg = transition["trg"] as! Int
let ttype = transition["edgeType"] as! Int
let arg1 = transition["arg1"] as! Int
let arg2 = transition["arg2"] as! Int
let arg3 = transition["arg3"] as! Int
let trans = try edgeFactory(atn, ttype, src, trg, arg1, arg2, arg3, sets)
let srcState: ATNState = atn.states[src]!
let srcState = atn.states[src]!
srcState.addTransition(trans)
}
@ -775,13 +771,13 @@ public class ATNDeserializer {
// edges for rule stop states can be derived, so they aren't serialized
for state: ATNState? in atn.states {
if let state = state {
for state in atn.states {
if let state = state {
let length = state.getNumberOfTransitions()
for i in 0..<length {
let t: Transition = state.transition(i)
let t = state.transition(i)
if let ruleTransition = t as? RuleTransition {
var outermostPrecedenceReturn: Int = -1
var outermostPrecedenceReturn = -1
if let targetRuleIndex = ruleTransition.target.ruleIndex {
if atn.ruleToStartState[targetRuleIndex].isPrecedenceRule {
if ruleTransition.precedence == 0 {
@ -789,7 +785,7 @@ public class ATNDeserializer {
}
}
let returnTransition: EpsilonTransition = EpsilonTransition(ruleTransition.followState, outermostPrecedenceReturn)
let returnTransition = EpsilonTransition(ruleTransition.followState, outermostPrecedenceReturn)
atn.ruleToStopState[targetRuleIndex].addTransition(returnTransition)
}
}
@ -797,39 +793,36 @@ public class ATNDeserializer {
}
}
for state: ATNState? in atn.states {
for state in atn.states {
if let state = state as? BlockStartState {
// we need to know the end state to set its start state
if let stateEndState = state.endState {
// block end states can only be associated to a single block start state
if stateEndState.startState != nil {
throw ANTLRError.illegalState(msg: "state.endState.startState != nil")
}
stateEndState.startState = state
}
else {
throw ANTLRError.illegalState(msg: "state.endState == nil")
}
}
if let loopbackState = state as? PlusLoopbackState {
let length = loopbackState.getNumberOfTransitions()
for i in 0..<length {
let target: ATNState = loopbackState.transition(i).target
if target is PlusBlockStartState {
(target as! PlusBlockStartState).loopBackState = loopbackState
let target = loopbackState.transition(i).target
if let startState = target as? PlusBlockStartState {
startState.loopBackState = loopbackState
}
}
} else {
if let loopbackState = state as? StarLoopbackState {
let length = loopbackState.getNumberOfTransitions()
for i in 0..<length {
let target: ATNState = loopbackState.transition(i).target
if target is StarLoopEntryState {
(target as! StarLoopEntryState).loopBackState = loopbackState
let target = loopbackState.transition(i).target
if let entryState = target as? StarLoopEntryState {
entryState.loopBackState = loopbackState
}
}
}
@ -840,13 +833,12 @@ public class ATNDeserializer {
//
// DECISIONS
//
let ndecisions: [Int] = dict["decisionToState"] as! [Int]
let ndecisions = dict["decisionToState"] as! [Int]
let length = ndecisions.count
for i in 0..<length {
let s: Int = ndecisions[i]
let decState: DecisionState = atn.states[s] as! DecisionState
let s = ndecisions[i]
let decState = atn.states[s] as! DecisionState
atn.appendDecisionToState(decState)
//atn.decisionToState.append(decState)
decState.decision = i
}
@ -854,40 +846,34 @@ public class ATNDeserializer {
// LEXER ACTIONS
//
if atn.grammarType == ATNType.lexer {
let lexerActionsBuilder = dict["lexerActions"] as! [Dictionary<String, Any>]
let lexerActionsBuilder = dict["lexerActions"] as! [[String : Any]]
if supportsLexerActions {
atn.lexerActions = [LexerAction](repeating: LexerAction(), count: lexerActionsBuilder.count) //[toInt(data[p++])];
atn.lexerActions = [LexerAction](repeating: LexerAction(), count: lexerActionsBuilder.count)
let length = atn.lexerActions.count
for i in 0..<length {
let actionTypeValue = lexerActionsBuilder[i]["actionType"] as! Int
let actionType: LexerActionType = LexerActionType(rawValue: actionTypeValue)! //LexerActionType.values()[toInt(data[p++])];
let data1: Int = lexerActionsBuilder[i]["a"] as! Int
let data2: Int = lexerActionsBuilder[i]["b"] as! Int
let lexerAction: LexerAction = lexerActionFactory(actionType, data1, data2)
let actionType = LexerActionType(rawValue: actionTypeValue)!
let data1 = lexerActionsBuilder[i]["a"] as! Int
let data2 = lexerActionsBuilder[i]["b"] as! Int
let lexerAction = lexerActionFactory(actionType, data1, data2)
atn.lexerActions[i] = lexerAction
}
} else {
// for compatibility with older serialized ATNs, convert the old
// serialized action index for action transitions to the new
// form, which is the index of a LexerCustomAction
var legacyLexerActions: Array<LexerAction> = Array<LexerAction>()
for state: ATNState? in atn.states {
var legacyLexerActions = [LexerAction]()
for state in atn.states {
if let state = state {
let length = state.getNumberOfTransitions()
for i in 0..<length {
let transition: Transition = state.transition(i)
if !(transition is ActionTransition) {
guard let transition = state.transition(i) as? ActionTransition else {
continue
}
let ruleIndex: Int = (transition as! ActionTransition).ruleIndex
let actionIndex: Int = (transition as! ActionTransition).actionIndex
let lexerAction: LexerCustomAction = LexerCustomAction(ruleIndex, actionIndex)
let ruleIndex = transition.ruleIndex
let actionIndex = transition.actionIndex
let lexerAction = LexerCustomAction(ruleIndex, actionIndex)
state.setTransition(i, ActionTransition(transition.target, ruleIndex, legacyLexerActions.count, false))
legacyLexerActions.append(lexerAction)
}
@ -912,11 +898,11 @@ public class ATNDeserializer {
}
for i in 0..<length {
let bypassStart: BasicBlockStartState = BasicBlockStartState()
let bypassStart = BasicBlockStartState()
bypassStart.ruleIndex = i
atn.addState(bypassStart)
let bypassStop: BlockEndState = BlockEndState()
let bypassStop = BlockEndState()
bypassStop.ruleIndex = i
atn.addState(bypassStop)
@ -930,7 +916,7 @@ public class ATNDeserializer {
if atn.ruleToStartState[i].isPrecedenceRule {
// wrap from the beginning of the rule to the StarLoopEntryState
endState = nil
for state: ATNState? in atn.states {
for state in atn.states {
if let state = state {
if state.ruleIndex != i {
continue
@ -940,7 +926,7 @@ public class ATNDeserializer {
continue
}
let maybeLoopEndState: ATNState = state.transition(state.getNumberOfTransitions() - 1).target
let maybeLoopEndState = state.transition(state.getNumberOfTransitions() - 1).target
if !(maybeLoopEndState is LoopEndState) {
continue
}
@ -963,9 +949,9 @@ public class ATNDeserializer {
}
// all non-excluded transitions that currently target end state need to target blockEnd instead
for state: ATNState? in atn.states {
for state in atn.states {
if let state = state {
for transition: Transition in state.transitions {
for transition in state.transitions {
if transition === excludeTransition! {
continue
}
@ -979,7 +965,7 @@ public class ATNDeserializer {
// all transitions leaving the rule start state need to leave blockStart instead
while atn.ruleToStartState[i].getNumberOfTransitions() > 0 {
let transition: Transition = atn.ruleToStartState[i].removeTransition(atn.ruleToStartState[i].getNumberOfTransitions() - 1)
let transition = atn.ruleToStartState[i].removeTransition(atn.ruleToStartState[i].getNumberOfTransitions() - 1)
bypassStart.addTransition(transition)
}
@ -987,7 +973,7 @@ public class ATNDeserializer {
atn.ruleToStartState[i].addTransition(EpsilonTransition(bypassStart))
bypassStop.addTransition(EpsilonTransition(endState!))
let matchState: ATNState = BasicState()
let matchState = BasicState()
atn.addState(matchState)
matchState.addTransition(AtomTransition(bypassStop, atn.ruleToTokenType[i]))
bypassStart.addTransition(EpsilonTransition(matchState))
@ -1011,7 +997,7 @@ public class ATNDeserializer {
/// - parameter atn: The ATN.
///
internal func markPrecedenceDecisions(_ atn: ATN) {
for state: ATNState? in atn.states {
for state in atn.states {
if let state = state as? StarLoopEntryState {
///
@ -1021,7 +1007,7 @@ public class ATNDeserializer {
///
if let stateRuleIndex = state.ruleIndex {
if atn.ruleToStartState[stateRuleIndex].isPrecedenceRule {
let maybeLoopEndState: ATNState = state.transition(state.getNumberOfTransitions() - 1).target
let maybeLoopEndState = state.transition(state.getNumberOfTransitions() - 1).target
if maybeLoopEndState is LoopEndState {
if maybeLoopEndState.epsilonOnlyTransitions && maybeLoopEndState.transition(0).target is RuleStopState {
state.precedenceRuleDecision = true
@ -1035,7 +1021,7 @@ public class ATNDeserializer {
internal func verifyATN(_ atn: ATN) throws {
// verify assumptions
for state: ATNState? in atn.states {
for state in atn.states {
guard let state = state else {
continue
}
@ -1084,8 +1070,7 @@ public class ATNDeserializer {
try checkCondition((state as! BlockEndState).startState != nil)
}
if state is DecisionState {
let decisionState: DecisionState = state as! DecisionState
if let decisionState = state as? DecisionState {
try checkCondition(decisionState.getNumberOfTransitions() <= 1 || decisionState.decision >= 0)
} else {
try checkCondition(state.getNumberOfTransitions() <= 1 || state is RuleStopState)
@ -1109,7 +1094,7 @@ public class ATNDeserializer {
_ type: Int, _ src: Int, _ trg: Int,
_ arg1: Int, _ arg2: Int, _ arg3: Int,
_ sets: Array<IntervalSet>) throws -> Transition {
let target: ATNState = atn.states[trg]!
let target = atn.states[trg]!
switch type {
case Transition.EPSILON: return EpsilonTransition(target)
case Transition.RANGE:
@ -1119,10 +1104,10 @@ public class ATNDeserializer {
return RangeTransition(target, arg1, arg2)
}
case Transition.RULE:
let rt: RuleTransition = RuleTransition(atn.states[arg1] as! RuleStartState, arg2, arg3, target)
let rt = RuleTransition(atn.states[arg1] as! RuleStartState, arg2, arg3, target)
return rt
case Transition.PREDICATE:
let pt: PredicateTransition = PredicateTransition(target, arg1, arg2, arg3 != 0)
let pt = PredicateTransition(target, arg1, arg2, arg3 != 0)
return pt
case Transition.PRECEDENCE:
return PrecedencePredicateTransition(target, arg1)
@ -1133,17 +1118,14 @@ public class ATNDeserializer {
return AtomTransition(target, arg1)
}
case Transition.ACTION:
let a: ActionTransition = ActionTransition(target, arg1, arg2, arg3 != 0)
return a
return ActionTransition(target, arg1, arg2, arg3 != 0)
case Transition.SET: return SetTransition(target, sets[arg1])
case Transition.NOT_SET: return NotSetTransition(target, sets[arg1])
case Transition.WILDCARD: return WildcardTransition(target)
default:
throw ANTLRError.illegalState(msg: "The specified transition type is not valid.")
}
}
internal func stateFactory(_ type: Int, _ ruleIndex: Int) throws -> ATNState? {
@ -1197,12 +1179,6 @@ public class ATNDeserializer {
case .type:
return LexerTypeAction(data1)
//default:
}
// let message : String = "The specified lexer action type \(type) is not valid."
// RuntimeException(message)
}
}

View File

@ -8,24 +8,7 @@
import Foundation
open class ATNSimulator {
///
/// - Use _org.antlr.v4.runtime.atn.ATNDeserializer#SERIALIZED_VERSION_ instead.
///
public static let SERIALIZED_VERSION: Int = {
return ATNDeserializer.SERIALIZED_VERSION
}()
///
/// This is the current serialized UUID.
/// - Use _org.antlr.v4.runtime.atn.ATNDeserializer#checkCondition(boolean)_ instead.
///
public static let SERIALIZED_UUID: UUID = {
return (ATNDeserializer.SERIALIZED_UUID as UUID)
}()
///
///
/// Must distinguish between missing edge and edge we know leads nowhere
///
public static let ERROR: DFAState = {
@ -34,7 +17,7 @@ open class ATNSimulator {
return error
}()
public var atn: ATN
public let atn: ATN
///
/// The context cache maps all PredictionContext objects that are equals()
@ -67,7 +50,7 @@ open class ATNSimulator {
}
open func reset() {
RuntimeException(" must overriden ")
fatalError(#function + " must be overridden")
}
///
@ -96,55 +79,11 @@ open class ATNSimulator {
//TODO: synced (sharedContextCache!)
//synced (sharedContextCache!) {
let visited: HashMap<PredictionContext, PredictionContext> =
HashMap<PredictionContext, PredictionContext>()
let visited = HashMap<PredictionContext, PredictionContext>()
return PredictionContext.getCachedContext(context,
sharedContextCache!,
visited)
//}
}
///
/// - note: Use _org.antlr.v4.runtime.atn.ATNDeserializer#deserialize_ instead.
///
public static func deserialize(_ data: [Character]) throws -> ATN {
return try ATNDeserializer().deserialize(data)
}
///
/// - note: Use _org.antlr.v4.runtime.atn.ATNDeserializer#checkCondition(boolean)_ instead.
///
public static func checkCondition(_ condition: Bool) throws {
try ATNDeserializer().checkCondition(condition)
}
///
/// - note: Use _org.antlr.v4.runtime.atn.ATNDeserializer#checkCondition(boolean, String)_ instead.
///
public static func checkCondition(_ condition: Bool, _ message: String) throws {
try ATNDeserializer().checkCondition(condition, message)
}
///
/// - note: Use _org.antlr.v4.runtime.atn.ATNDeserializer#toInt_ instead.
///
public func toInt(_ c: Character) -> Int {
return toInt(c)
}
///
/// - note: Use _org.antlr.v4.runtime.atn.ATNDeserializer#toInt32_ instead.
///
public func toInt32(_ data: [Character], _ offset: Int) -> Int {
return toInt32(data, offset)
}
///
/// - note: Use _org.antlr.v4.runtime.atn.ATNDeserializer#toLong_ instead.
///
public func toLong(_ data: [Character], _ offset: Int) -> Int64 {
return toLong(data, offset)
}
public static func edgeFactory(_ atn: ATN,
@ -153,12 +92,4 @@ open class ATNSimulator {
_ sets: Array<IntervalSet>) throws -> Transition {
return try ATNDeserializer().edgeFactory(atn, type, src, trg, arg1, arg2, arg3, sets)
}
///
/// - note: Use _org.antlr.v4.runtime.atn.ATNDeserializer#stateFactory_ instead.
///
public static func stateFactory(_ type: Int, _ ruleIndex: Int) throws -> ATNState {
return try ATNDeserializer().stateFactory(type, ruleIndex)!
}
}

View File

@ -183,8 +183,7 @@ public class ATNState: Hashable, CustomStringConvertible {
}
public func getStateType() -> Int {
RuntimeException(#function + " must be overridden")
return 0
fatalError(#function + " must be overridden")
}
public final func onlyHasEpsilonTransitions() -> Bool {

View File

@ -27,8 +27,8 @@ public final class AtomTransition: Transition, CustomStringConvertible {
}
override
public func labelIntervalSet() throws -> IntervalSet? {
return try IntervalSet.of(label)
public func labelIntervalSet() -> IntervalSet? {
return IntervalSet(label)
}
override

View File

@ -28,18 +28,18 @@ public class LL1Analyzer {
/// - parameter s: the ATN state
/// - returns: the expected symbols for each outgoing transition of `s`.
///
public func getDecisionLookahead(_ s: ATNState?) throws -> [IntervalSet?]? {
public func getDecisionLookahead(_ s: ATNState?) -> [IntervalSet?]? {
guard let s = s else {
return nil
}
let length = s.getNumberOfTransitions()
var look: [IntervalSet?] = [IntervalSet?](repeating: nil, count: length)
var look = [IntervalSet?](repeating: nil, count: length)
for alt in 0..<length {
look[alt] = try IntervalSet()
var lookBusy: Set<ATNConfig> = Set<ATNConfig>()
let seeThruPreds: Bool = false // fail to get lookahead upon pred
try _LOOK(s.transition(alt).target, nil, PredictionContext.EMPTY,
look[alt] = IntervalSet()
var lookBusy = Set<ATNConfig>()
let seeThruPreds = false // fail to get lookahead upon pred
_LOOK(s.transition(alt).target, nil, PredictionContext.EMPTY,
look[alt]!, &lookBusy, BitSet(), seeThruPreds, false)
// Wipe out lookahead for this alternative if we found nothing
// or we had a predicate when we !seeThruPreds
@ -66,8 +66,8 @@ public class LL1Analyzer {
/// - returns: The set of tokens that can follow `s` in the ATN in the
/// specified `ctx`.
///
public func LOOK(_ s: ATNState, _ ctx: RuleContext?) throws -> IntervalSet {
return try LOOK(s, nil, ctx)
public func LOOK(_ s: ATNState, _ ctx: RuleContext?) -> IntervalSet {
return LOOK(s, nil, ctx)
}
///
@ -89,13 +89,12 @@ public class LL1Analyzer {
/// specified `ctx`.
///
public func LOOK(_ s: ATNState, _ stopState: ATNState?, _ ctx: RuleContext?) throws -> IntervalSet {
let r: IntervalSet = try IntervalSet()
let seeThruPreds: Bool = true // ignore preds; get all lookahead
let lookContext: PredictionContext? = ctx != nil ? PredictionContext.fromRuleContext(s.atn!, ctx) : nil
public func LOOK(_ s: ATNState, _ stopState: ATNState?, _ ctx: RuleContext?) -> IntervalSet {
let r = IntervalSet()
let seeThruPreds = true // ignore preds; get all lookahead
let lookContext = ctx != nil ? PredictionContext.fromRuleContext(s.atn!, ctx) : nil
var config = Set<ATNConfig>()
try _LOOK(s, stopState, lookContext,
r, &config, BitSet(), seeThruPreds, true)
_LOOK(s, stopState, lookContext, r, &config, BitSet(), seeThruPreds, true)
return r
}
@ -135,13 +134,10 @@ public class LL1Analyzer {
_ look: IntervalSet,
_ lookBusy: inout Set<ATNConfig>,
_ calledRuleStack: BitSet,
_ seeThruPreds: Bool, _ addEOF: Bool) throws {
_ seeThruPreds: Bool,
_ addEOF: Bool) {
// print ("_LOOK(\(s.stateNumber), ctx=\(ctx)");
//TODO var c : ATNConfig = ATNConfig(s, 0, ctx);
if s.description == "273" {
var s = 0
}
var c: ATNConfig = ATNConfig(s, 0, ctx)
let c = ATNConfig(s, 0, ctx)
if lookBusy.contains(c) {
return
} else {
@ -150,12 +146,12 @@ public class LL1Analyzer {
if s == stopState {
guard let ctx = ctx else {
try look.add(CommonToken.EPSILON)
try! look.add(CommonToken.EPSILON)
return
}
if ctx.isEmpty() && addEOF {
try look.add(CommonToken.EOF)
try! look.add(CommonToken.EOF)
return
}
@ -163,75 +159,64 @@ public class LL1Analyzer {
if s is RuleStopState {
guard let ctx = ctx else {
try look.add(CommonToken.EPSILON)
try! look.add(CommonToken.EPSILON)
return
}
if ctx.isEmpty() && addEOF {
try look.add(CommonToken.EOF)
try! look.add(CommonToken.EOF)
return
}
if ctx != PredictionContext.EMPTY {
// run thru all possible stack tops in ctx
let length = ctx.size()
for i in 0..<length {
var returnState: ATNState = atn.states[(ctx.getReturnState(i))]!
var removed: Bool = try calledRuleStack.get(returnState.ruleIndex!)
try calledRuleStack.clear(returnState.ruleIndex!)
try self._LOOK(returnState, stopState, ctx.getParent(i), look, &lookBusy, calledRuleStack, seeThruPreds, addEOF)
defer {
if removed {
try! calledRuleStack.set(returnState.ruleIndex!)
}
let returnState = atn.states[(ctx.getReturnState(i))]!
let removed = try! calledRuleStack.get(returnState.ruleIndex!)
try! calledRuleStack.clear(returnState.ruleIndex!)
_LOOK(returnState, stopState, ctx.getParent(i), look, &lookBusy, calledRuleStack, seeThruPreds, addEOF)
if removed {
try! calledRuleStack.set(returnState.ruleIndex!)
}
}
return
}
}
var n: Int = s.getNumberOfTransitions()
let n = s.getNumberOfTransitions()
for i in 0..<n {
var t: Transition = s.transition(i)
if type(of: t) == RuleTransition.self {
if try calledRuleStack.get((t as! RuleTransition).target.ruleIndex!) {
let t = s.transition(i)
if let rt = t as? RuleTransition {
if try! calledRuleStack.get(rt.target.ruleIndex!) {
continue
}
var newContext: PredictionContext =
SingletonPredictionContext.create(ctx, (t as! RuleTransition).followState.stateNumber)
try calledRuleStack.set((t as! RuleTransition).target.ruleIndex!)
try _LOOK(t.target, stopState, newContext, look, &lookBusy, calledRuleStack, seeThruPreds, addEOF)
defer {
try! calledRuleStack.clear((t as! RuleTransition).target.ruleIndex!)
}
} else {
if t is AbstractPredicateTransition {
if seeThruPreds {
try _LOOK(t.target, stopState, ctx, look, &lookBusy, calledRuleStack, seeThruPreds, addEOF)
} else {
try look.add(HIT_PRED)
}
let newContext = SingletonPredictionContext.create(ctx, rt.followState.stateNumber)
try! calledRuleStack.set(rt.target.ruleIndex!)
_LOOK(t.target, stopState, newContext, look, &lookBusy, calledRuleStack, seeThruPreds, addEOF)
try! calledRuleStack.clear(rt.target.ruleIndex!)
}
else if t is AbstractPredicateTransition {
if seeThruPreds {
_LOOK(t.target, stopState, ctx, look, &lookBusy, calledRuleStack, seeThruPreds, addEOF)
} else {
if t.isEpsilon() {
try _LOOK(t.target, stopState, ctx, look, &lookBusy, calledRuleStack, seeThruPreds, addEOF)
} else {
if type(of: t) == WildcardTransition.self {
try look.addAll(IntervalSet.of(CommonToken.MIN_USER_TOKEN_TYPE, atn.maxTokenType))
} else {
var set: IntervalSet? = try t.labelIntervalSet()
if set != nil {
if t is NotSetTransition {
set = try set!.complement(IntervalSet.of(CommonToken.MIN_USER_TOKEN_TYPE, atn.maxTokenType)) as? IntervalSet
}
try look.addAll(set)
}
}
try! look.add(HIT_PRED)
}
}
else if t.isEpsilon() {
_LOOK(t.target, stopState, ctx, look, &lookBusy, calledRuleStack, seeThruPreds, addEOF)
}
else if t is WildcardTransition {
try! look.addAll(IntervalSet.of(CommonToken.MIN_USER_TOKEN_TYPE, atn.maxTokenType))
}
else {
var set = t.labelIntervalSet()
if set != nil {
if t is NotSetTransition {
set = set!.complement(IntervalSet.of(CommonToken.MIN_USER_TOKEN_TYPE, atn.maxTokenType)) as? IntervalSet
}
try! look.addAll(set)
}
}
}

View File

@ -77,15 +77,14 @@ public class LexerATNConfig: ATNConfig {
}*/
public var hashValue: Int {
var hashCode: Int = MurmurHash.initialize(7)
var hashCode = MurmurHash.initialize(7)
hashCode = MurmurHash.update(hashCode, state.stateNumber)
hashCode = MurmurHash.update(hashCode, alt)
hashCode = MurmurHash.update(hashCode, context)
hashCode = MurmurHash.update(hashCode, semanticContext)
hashCode = MurmurHash.update(hashCode, passedThroughNonGreedyDecision ? 1 : 0)
hashCode = MurmurHash.update(hashCode, lexerActionExecutor)
hashCode = MurmurHash.finish(hashCode, 6)
return hashCode
return MurmurHash.finish(hashCode, 6)
}

View File

@ -11,11 +11,11 @@
///
open class LexerATNSimulator: ATNSimulator {
public static let debug: Bool = false
public let dfa_debug: Bool = false
public static let debug = false
public let dfa_debug = false
public static let MIN_DFA_EDGE: Int = 0
public static let MAX_DFA_EDGE: Int = 127
public static let MIN_DFA_EDGE = 0
public static let MAX_DFA_EDGE = 127
// forces unicode to stay in ATN
///
@ -50,7 +50,7 @@ open class LexerATNSimulator: ATNSimulator {
}
internal let recog: Lexer?
internal weak var recog: Lexer?
///
/// The current token's starting index into the character stream.
@ -58,21 +58,21 @@ open class LexerATNSimulator: ATNSimulator {
/// DFA did not have a previous accept state. In this case, we use the
/// ATN-generated exception object.
///
internal var startIndex: Int = -1
internal var startIndex = -1
///
/// line number 1..n within the input
///
public var line: Int = 1
public var line = 1
///
/// The index of the character relative to the beginning of the line 0..n-1
///
public var charPositionInLine: Int = 0
public var charPositionInLine = 0
public final var decisionToDFA: [DFA]
internal var mode: Int = Lexer.DEFAULT_MODE
internal var mode = Lexer.DEFAULT_MODE
///
/// mutex for DFAState change
@ -88,9 +88,9 @@ open class LexerATNSimulator: ATNSimulator {
/// Used during DFA/ATN exec to record the most recent accept configuration info
///
internal final var prevAccept: SimState = SimState()
internal final var prevAccept = SimState()
public static var match_calls: Int = 0
public static var match_calls = 0
public convenience init(_ atn: ATN, _ decisionToDFA: [DFA],
_ sharedContextCache: PredictionContextCache) {
@ -116,11 +116,11 @@ open class LexerATNSimulator: ATNSimulator {
open func match(_ input: CharStream, _ mode: Int) throws -> Int {
LexerATNSimulator.match_calls += 1
self.mode = mode
var mark: Int = input.mark()
var mark = input.mark()
do {
self.startIndex = input.index()
self.prevAccept.reset()
var dfa: DFA = decisionToDFA[mode]
var dfa = decisionToDFA[mode]
defer {
try! input.release(mark)
}
@ -146,31 +146,30 @@ open class LexerATNSimulator: ATNSimulator {
override
open func clearDFA() {
for d in 0..<decisionToDFA.count {
decisionToDFA[d] = DFA(atn.getDecisionState(d)!, d)
}
}
internal func matchATN(_ input: CharStream) throws -> Int {
let startState: ATNState = atn.modeToStartState[mode]
let startState = atn.modeToStartState[mode]
if LexerATNSimulator.debug {
print("matchATN mode \(mode) start: \(startState)\n")
}
let old_mode: Int = mode
let old_mode = mode
let s0_closure: ATNConfigSet = try computeStartState(input, startState)
let suppressEdge: Bool = s0_closure.hasSemanticContext
let s0_closure = try computeStartState(input, startState)
let suppressEdge = s0_closure.hasSemanticContext
s0_closure.hasSemanticContext = false
let next: DFAState = addDFAState(s0_closure)
let next = addDFAState(s0_closure)
if !suppressEdge {
decisionToDFA[mode].s0 = next
}
let predict: Int = try execATN(input, next)
let predict = try execATN(input, next)
if LexerATNSimulator.debug {
print("DFA after matchATN: \(decisionToDFA[old_mode].toLexerString())")
@ -190,14 +189,13 @@ open class LexerATNSimulator: ATNSimulator {
captureSimState(prevAccept, input, ds0)
}
var t: Int = try input.LA(1)
var t = try input.LA(1)
var s: DFAState = ds0 // s is current/from DFA state
var s = ds0 // s is current/from DFA state
while true {
// while more work
if LexerATNSimulator.debug {
print("execATN loop starting closure: \(s.configs)\n")
}
@ -268,7 +266,7 @@ open class LexerATNSimulator: ATNSimulator {
return nil
}
let target: DFAState? = s.edges[t - LexerATNSimulator.MIN_DFA_EDGE]
let target = s.edges[t - LexerATNSimulator.MIN_DFA_EDGE]
if LexerATNSimulator.debug && target != nil {
print("reuse state \(s.stateNumber) edge to \(target!.stateNumber)")
}
@ -290,7 +288,7 @@ open class LexerATNSimulator: ATNSimulator {
///
internal func computeTargetState(_ input: CharStream, _ s: DFAState, _ t: Int) throws -> DFAState {
let reach: ATNConfigSet = OrderedATNConfigSet()
let reach = OrderedATNConfigSet()
// if we don't find an existing DFA state
// Fill reach starting from closure, following t transitions
@ -316,7 +314,7 @@ open class LexerATNSimulator: ATNSimulator {
internal func failOrAccept(_ prevAccept: SimState, _ input: CharStream,
_ reach: ATNConfigSet, _ t: Int) throws -> Int {
if let dfaState = prevAccept.dfaState {
let lexerActionExecutor: LexerActionExecutor? = dfaState.lexerActionExecutor
let lexerActionExecutor = dfaState.lexerActionExecutor
try accept(input, lexerActionExecutor, startIndex,
prevAccept.index, prevAccept.line, prevAccept.charPos)
return dfaState.prediction
@ -325,8 +323,7 @@ open class LexerATNSimulator: ATNSimulator {
if t == BufferedTokenStream.EOF && input.index() == startIndex {
return CommonToken.EOF
}
throw ANTLRException.recognition(e: LexerNoViableAltException(recog, input, startIndex, reach))
throw ANTLRException.recognition(e: LexerNoViableAltException(recog, input, startIndex, reach))
}
}
@ -338,12 +335,12 @@ open class LexerATNSimulator: ATNSimulator {
internal func getReachableConfigSet(_ input: CharStream, _ closureConfig: ATNConfigSet, _ reach: ATNConfigSet, _ t: Int) throws {
// this is used to skip processing for configs which have a lower priority
// than a config that already reached an accept state for the same rule
var skipAlt: Int = ATN.INVALID_ALT_NUMBER
for c: ATNConfig in closureConfig.configs {
var skipAlt = ATN.INVALID_ALT_NUMBER
for c in closureConfig.configs {
guard let c = c as? LexerATNConfig else {
continue
}
let currentAltReachedAcceptState: Bool = c.alt == skipAlt
let currentAltReachedAcceptState = (c.alt == skipAlt)
if currentAltReachedAcceptState && c.hasPassedThroughNonGreedyDecision() {
continue
}
@ -353,17 +350,17 @@ open class LexerATNSimulator: ATNSimulator {
}
let n: Int = c.state.getNumberOfTransitions()
let n = c.state.getNumberOfTransitions()
for ti in 0..<n {
// for each transition
let trans: Transition = c.state.transition(ti)
let trans = c.state.transition(ti)
if let target = getReachableTarget(trans, t) {
var lexerActionExecutor: LexerActionExecutor? = c.getLexerActionExecutor()
var lexerActionExecutor = c.getLexerActionExecutor()
if lexerActionExecutor != nil {
lexerActionExecutor = lexerActionExecutor!.fixOffsetBeforeMatch(input.index() - startIndex)
}
let treatEofAsEpsilon: Bool = t == BufferedTokenStream.EOF
let treatEofAsEpsilon = (t == BufferedTokenStream.EOF)
if try closure(input,
LexerATNConfig(c, target, lexerActionExecutor),
reach,
@ -384,7 +381,6 @@ open class LexerATNSimulator: ATNSimulator {
_ startIndex: Int, _ index: Int, _ line: Int, _ charPos: Int) throws {
if LexerATNSimulator.debug {
print("ACTION \(String(describing: lexerActionExecutor))\n")
}
// seek to after last char in token
@ -393,7 +389,7 @@ open class LexerATNSimulator: ATNSimulator {
self.charPositionInLine = charPos
//TODO: CHECK
if let lexerActionExecutor = lexerActionExecutor, let recog = recog {
try lexerActionExecutor.execute(recog, input, startIndex)
try lexerActionExecutor.execute(recog, input, startIndex)
}
}
@ -409,12 +405,12 @@ open class LexerATNSimulator: ATNSimulator {
final func computeStartState(_ input: CharStream,
_ p: ATNState) throws -> ATNConfigSet {
let initialContext: PredictionContext = PredictionContext.EMPTY
let configs: ATNConfigSet = OrderedATNConfigSet()
let initialContext = PredictionContext.EMPTY
let configs = OrderedATNConfigSet()
let length = p.getNumberOfTransitions()
for i in 0..<length {
let target: ATNState = p.transition(i).target
let c: LexerATNConfig = LexerATNConfig(target, i + 1, initialContext)
let target = p.transition(i).target
let c = LexerATNConfig(target, i + 1, initialContext)
try closure(input, c, configs, false, false, false)
}
return configs
@ -441,10 +437,8 @@ open class LexerATNSimulator: ATNSimulator {
if LexerATNSimulator.debug {
if recog != nil {
print("closure at \(recog!.getRuleNames()[config.state.ruleIndex!]) rule stop \(config)\n")
} else {
print("closure at rule stop \(config)\n")
}
}
@ -462,9 +456,9 @@ open class LexerATNSimulator: ATNSimulator {
let length = configContext.size()
for i in 0..<length {
if configContext.getReturnState(i) != PredictionContext.EMPTY_RETURN_STATE {
let newContext: PredictionContext = configContext.getParent(i)! // "pop" return state
let returnState: ATNState? = atn.states[configContext.getReturnState(i)]
let c: LexerATNConfig = LexerATNConfig(config, returnState!, newContext)
let newContext = configContext.getParent(i)! // "pop" return state
let returnState = atn.states[configContext.getReturnState(i)]
let c = LexerATNConfig(config, returnState!, newContext)
currentAltReachedAcceptState = try closure(input, c, configs, currentAltReachedAcceptState, speculative, treatEofAsEpsilon)
}
}
@ -480,13 +474,12 @@ open class LexerATNSimulator: ATNSimulator {
}
}
let p: ATNState = config.state
let p = config.state
let length = p.getNumberOfTransitions()
for i in 0..<length {
let t: Transition = p.transition(i)
let c: LexerATNConfig? = try getEpsilonTarget(input, config, t, configs, speculative, treatEofAsEpsilon)
if c != nil {
currentAltReachedAcceptState = try closure(input, c!, configs, currentAltReachedAcceptState, speculative, treatEofAsEpsilon)
let t = p.transition(i)
if let c = try getEpsilonTarget(input, config, t, configs, speculative, treatEofAsEpsilon) {
currentAltReachedAcceptState = try closure(input, c, configs, currentAltReachedAcceptState, speculative, treatEofAsEpsilon)
}
}
@ -504,9 +497,8 @@ open class LexerATNSimulator: ATNSimulator {
var c: LexerATNConfig? = nil
switch t.getSerializationType() {
case Transition.RULE:
let ruleTransition: RuleTransition = t as! RuleTransition
let newContext: PredictionContext =
SingletonPredictionContext.create(config.context, ruleTransition.followState.stateNumber)
let ruleTransition = t as! RuleTransition
let newContext = SingletonPredictionContext.create(config.context, ruleTransition.followState.stateNumber)
c = LexerATNConfig(config, t.target, newContext)
break
@ -534,7 +526,7 @@ open class LexerATNSimulator: ATNSimulator {
/// states reached by traversing predicates. Since this is when we
/// test them, we cannot cash the DFA state target of ID.
///
let pt: PredicateTransition = t as! PredicateTransition
let pt = t as! PredicateTransition
if LexerATNSimulator.debug {
print("EVAL rule \(pt.ruleIndex):\(pt.predIndex)")
}
@ -558,7 +550,7 @@ open class LexerATNSimulator: ATNSimulator {
// getEpsilonTarget to return two configurations, so
// additional modifications are needed before we can support
// the split operation.
let lexerActionExecutor: LexerActionExecutor = LexerActionExecutor.append(config.getLexerActionExecutor(), atn.lexerActions[(t as! ActionTransition).actionIndex])
let lexerActionExecutor = LexerActionExecutor.append(config.getLexerActionExecutor(), atn.lexerActions[(t as! ActionTransition).actionIndex])
c = LexerATNConfig(config, t.target, lexerActionExecutor)
break
} else {
@ -619,10 +611,10 @@ open class LexerATNSimulator: ATNSimulator {
return try recog.sempred(nil, ruleIndex, predIndex)
}
var savedCharPositionInLine: Int = charPositionInLine
var savedLine: Int = line
var index: Int = input.index()
var marker: Int = input.mark()
var savedCharPositionInLine = charPositionInLine
var savedLine = line
var index = input.index()
var marker = input.mark()
do {
try consume(input)
defer
@ -663,11 +655,9 @@ open class LexerATNSimulator: ATNSimulator {
/// If that gets us to a previously created (but dangling) DFA
/// state, we can continue in pure DFA mode from there.
///
let suppressEdge: Bool = q.hasSemanticContext
let suppressEdge = q.hasSemanticContext
q.hasSemanticContext = false
let to: DFAState = addDFAState(q)
let to = addDFAState(q)
if suppressEdge {
return to
@ -690,8 +680,7 @@ open class LexerATNSimulator: ATNSimulator {
dfaStateMutex.synchronized {
if p.edges == nil {
// make room for tokens 1..n and -1 masquerading as index 0
//TODO ARRAY COUNT
p.edges = [DFAState?](repeating: nil, count: LexerATNSimulator.MAX_DFA_EDGE - LexerATNSimulator.MIN_DFA_EDGE + 1) //new DFAState[MAX_DFA_EDGE-MIN_DFA_EDGE+1];
p.edges = [DFAState?](repeating: nil, count: LexerATNSimulator.MAX_DFA_EDGE - LexerATNSimulator.MIN_DFA_EDGE + 1)
}
p.edges[t - LexerATNSimulator.MIN_DFA_EDGE] = q // connect
}
@ -711,8 +700,8 @@ open class LexerATNSimulator: ATNSimulator {
///
assert(!configs.hasSemanticContext, "Expected: !configs.hasSemanticContext")
let proposed: DFAState = DFAState(configs)
let firstConfigWithRuleStopState: ATNConfig? = configs.firstConfigWithRuleStopState
let proposed = DFAState(configs)
let firstConfigWithRuleStopState = configs.firstConfigWithRuleStopState
if firstConfigWithRuleStopState != nil {
proposed.isAcceptState = true
@ -720,14 +709,14 @@ open class LexerATNSimulator: ATNSimulator {
proposed.prediction = atn.ruleToTokenType[firstConfigWithRuleStopState!.state.ruleIndex!]
}
let dfa: DFA = decisionToDFA[mode]
let dfa = decisionToDFA[mode]
return dfaStatesMutex.synchronized {
if let existing = dfa.states[proposed] {
return existing!
}
let newState: DFAState = proposed
let newState = proposed
newState.stateNumber = dfa.states.count
configs.setReadonly(true)
newState.configs = configs
@ -747,7 +736,7 @@ open class LexerATNSimulator: ATNSimulator {
public func getText(_ input: CharStream) -> String {
// index is first lookahead char, don't include.
return input.getText(Interval.of(startIndex, input.index() - 1))
return try! input.getText(Interval.of(startIndex, input.index() - 1))
}
public func getLine() -> Int {
@ -767,7 +756,7 @@ open class LexerATNSimulator: ATNSimulator {
}
public func consume(_ input: CharStream) throws {
let curChar: Int = try input.LA(1)
let curChar = try input.LA(1)
if String(Character(integerLiteral: curChar)) == "\n" {
line += 1
charPositionInLine = 0

View File

@ -22,8 +22,7 @@ public class LexerAction: Hashable {
/// - returns: The serialization type of the lexer action.
///
public func getActionType() -> LexerActionType {
RuntimeException(" must overriden ")
fatalError()
fatalError(#function + " must be overridden")
}
@ -42,8 +41,7 @@ public class LexerAction: Hashable {
/// otherwise, `false`.
///
public func isPositionDependent() -> Bool {
RuntimeException(" must overriden ")
fatalError()
fatalError(#function + " must be overridden")
}
///
@ -55,12 +53,11 @@ public class LexerAction: Hashable {
/// - parameter lexer: The lexer instance.
///
public func execute(_ lexer: Lexer) throws {
RuntimeException(" must overriden ")
fatalError(#function + " must be overridden")
}
public var hashValue: Int {
RuntimeException(" must overriden ")
fatalError()
fatalError(#function + " must be overridden")
}
}

View File

@ -36,7 +36,7 @@ public class LexerActionExecutor: Hashable {
public init(_ lexerActions: [LexerAction]) {
self.lexerActions = lexerActions
var hash: Int = MurmurHash.initialize()
var hash = MurmurHash.initialize()
for lexerAction: LexerAction in lexerActions {
hash = MurmurHash.update(hash, lexerAction)
}

View File

@ -65,7 +65,7 @@ public final class LexerChannelAction: LexerAction, CustomStringConvertible {
override
public var hashValue: Int {
var hash: Int = MurmurHash.initialize()
var hash = MurmurHash.initialize()
hash = MurmurHash.update(hash, getActionType().rawValue)
hash = MurmurHash.update(hash, channel)
return MurmurHash.finish(hash, 2)

View File

@ -94,7 +94,7 @@ public final class LexerCustomAction: LexerAction {
override
public var hashValue: Int {
var hash: Int = MurmurHash.initialize()
var hash = MurmurHash.initialize()
hash = MurmurHash.update(hash, getActionType().rawValue)
hash = MurmurHash.update(hash, ruleIndex)
hash = MurmurHash.update(hash, actionIndex)

View File

@ -97,7 +97,7 @@ public final class LexerIndexedCustomAction: LexerAction {
public override var hashValue: Int {
var hash: Int = MurmurHash.initialize()
var hash = MurmurHash.initialize()
hash = MurmurHash.update(hash, offset)
hash = MurmurHash.update(hash, action)
return MurmurHash.finish(hash, 2)

View File

@ -64,7 +64,7 @@ public final class LexerModeAction: LexerAction, CustomStringConvertible {
}
override
public var hashValue: Int {
var hash: Int = MurmurHash.initialize()
var hash = MurmurHash.initialize()
hash = MurmurHash.update(hash, getActionType().rawValue)
hash = MurmurHash.update(hash, mode)
return MurmurHash.finish(hash, 2)

View File

@ -58,7 +58,7 @@ public final class LexerMoreAction: LexerAction, CustomStringConvertible {
override
public var hashValue: Int {
var hash: Int = MurmurHash.initialize()
var hash = MurmurHash.initialize()
hash = MurmurHash.update(hash, getActionType().rawValue)
return MurmurHash.finish(hash, 1)

View File

@ -59,7 +59,7 @@ public final class LexerPopModeAction: LexerAction, CustomStringConvertible {
override
public var hashValue: Int {
var hash: Int = MurmurHash.initialize()
var hash = MurmurHash.initialize()
hash = MurmurHash.update(hash, getActionType().rawValue)
return MurmurHash.finish(hash, 1)

View File

@ -66,7 +66,7 @@ public final class LexerPushModeAction: LexerAction, CustomStringConvertible {
override
public var hashValue: Int {
var hash: Int = MurmurHash.initialize()
var hash = MurmurHash.initialize()
hash = MurmurHash.update(hash, getActionType().rawValue)
hash = MurmurHash.update(hash, mode)
return MurmurHash.finish(hash, 2)

View File

@ -58,7 +58,7 @@ public final class LexerSkipAction: LexerAction, CustomStringConvertible {
override
public var hashValue: Int {
var hash: Int = MurmurHash.initialize()
var hash = MurmurHash.initialize()
hash = MurmurHash.update(hash, getActionType().rawValue)
return MurmurHash.finish(hash, 1)
}

View File

@ -64,7 +64,7 @@ public class LexerTypeAction: LexerAction, CustomStringConvertible {
override
public var hashValue: Int {
var hash: Int = MurmurHash.initialize()
var hash = MurmurHash.initialize()
hash = MurmurHash.update(hash, getActionType().rawValue)
hash = MurmurHash.update(hash, type)
return MurmurHash.finish(hash, 2)

File diff suppressed because it is too large Load Diff

View File

@ -12,18 +12,19 @@ public class PredictionContext: Hashable, CustomStringConvertible {
/// Represents `$` in local context prediction, which means wildcard.
/// `+x = *`.
///
public static let EMPTY: EmptyPredictionContext = EmptyPredictionContext()
public static let EMPTY = EmptyPredictionContext()
///
/// Represents `$` in an array in full context mode, when `$`
/// doesn't mean wildcard: `$ + x = [$,x]`. Here,
/// `$` = _#EMPTY_RETURN_STATE_.
///
public static let EMPTY_RETURN_STATE: Int = Int(Int32.max)
public static let EMPTY_RETURN_STATE = Int(Int32.max)
private static let INITIAL_HASH: Int = 1
private static let INITIAL_HASH = UInt32(1)
public static var globalNodeCount = 0
public static var globalNodeCount: Int = 0
public final let id: Int = {
let oldGlobalNodeCount = globalNodeCount
globalNodeCount += 1
@ -62,12 +63,7 @@ public class PredictionContext: Hashable, CustomStringConvertible {
/// Return _#EMPTY_ if `outerContext` is empty or null.
///
public static func fromRuleContext(_ atn: ATN, _ outerContext: RuleContext?) -> PredictionContext {
var _outerContext: RuleContext
if let outerContext = outerContext {
_outerContext = outerContext
}else {
_outerContext = RuleContext.EMPTY
}
let _outerContext = outerContext ?? RuleContext.EMPTY
// if we are in RuleContext of start rule, s, then PredictionContext
// is EMPTY. Nobody called us. (if we are empty, return empty)
@ -76,29 +72,25 @@ public class PredictionContext: Hashable, CustomStringConvertible {
}
// If we have a parent, convert it to a PredictionContext graph
var parent: PredictionContext = EMPTY
parent = PredictionContext.fromRuleContext(atn, _outerContext.parent)
let parent = PredictionContext.fromRuleContext(atn, _outerContext.parent)
let state: ATNState = atn.states[_outerContext.invokingState]!
let transition: RuleTransition = state.transition(0) as! RuleTransition
let state = atn.states[_outerContext.invokingState]!
let transition = state.transition(0) as! RuleTransition
return SingletonPredictionContext.create(parent, transition.followState.stateNumber)
}
public func size() -> Int {
RuntimeException(#function + " must be overridden")
return 0
fatalError(#function + " must be overridden")
}
public func getParent(_ index: Int) -> PredictionContext? {
RuntimeException(#function + " must be overridden")
return nil
fatalError(#function + " must be overridden")
}
public func getReturnState(_ index: Int) -> Int {
RuntimeException(#function + " must be overridden")
return 0
fatalError(#function + " must be overridden")
}
@ -118,21 +110,19 @@ public class PredictionContext: Hashable, CustomStringConvertible {
}
static func calculateEmptyHashCode() -> Int {
var hash: Int = MurmurHash.initialize(INITIAL_HASH)
hash = MurmurHash.finish(hash, 0)
return hash
let hash = MurmurHash.initialize(INITIAL_HASH)
return MurmurHash.finish(hash, 0)
}
static func calculateHashCode(_ parent: PredictionContext?, _ returnState: Int) -> Int {
var hash: Int = MurmurHash.initialize(INITIAL_HASH)
var hash = MurmurHash.initialize(INITIAL_HASH)
hash = MurmurHash.update(hash, parent)
hash = MurmurHash.update(hash, returnState)
hash = MurmurHash.finish(hash, 2)
return hash
return MurmurHash.finish(hash, 2)
}
static func calculateHashCode(_ parents: [PredictionContext?], _ returnStates: [Int]) -> Int {
var hash: Int = MurmurHash.initialize(INITIAL_HASH)
var hash = MurmurHash.initialize(INITIAL_HASH)
var length = parents.count
for i in 0..<length {
hash = MurmurHash.update(hash, parents[i])
@ -142,8 +132,7 @@ public class PredictionContext: Hashable, CustomStringConvertible {
hash = MurmurHash.update(hash, returnStates[i])
}
hash = MurmurHash.finish(hash, 2 * parents.count)
return hash
return MurmurHash.finish(hash, 2 * parents.count)
}
// dispatch
@ -163,29 +152,27 @@ public class PredictionContext: Hashable, CustomStringConvertible {
return a
}
if (a is SingletonPredictionContext && b is SingletonPredictionContext) {
return mergeSingletons(a as! SingletonPredictionContext,
b as! SingletonPredictionContext,
rootIsWildcard, &mergeCache)
if let spc_a = a as? SingletonPredictionContext, let spc_b = b as? SingletonPredictionContext {
return mergeSingletons(spc_a, spc_b, rootIsWildcard, &mergeCache)
}
// At least one of a or b is array
// If one is $ and rootIsWildcard, return $ as * wildcard
if (rootIsWildcard) {
if (a is EmptyPredictionContext) {
if rootIsWildcard {
if a is EmptyPredictionContext {
return a
}
if (b is EmptyPredictionContext) {
if b is EmptyPredictionContext {
return b
}
}
// convert singleton so both are arrays to normalize
if (a is SingletonPredictionContext) {
a = ArrayPredictionContext(a as! SingletonPredictionContext)
if let spc_a = a as? SingletonPredictionContext {
a = ArrayPredictionContext(spc_a)
}
if (b is SingletonPredictionContext) {
b = ArrayPredictionContext(b as! SingletonPredictionContext)
if let spc_b = b as? SingletonPredictionContext {
b = ArrayPredictionContext(spc_b)
}
return mergeArrays(a as! ArrayPredictionContext, b as! ArrayPredictionContext,
rootIsWildcard, &mergeCache)
@ -225,7 +212,7 @@ public class PredictionContext: Hashable, CustomStringConvertible {
_ mergeCache: inout DoubleKeyMap<PredictionContext, PredictionContext, PredictionContext>?) -> PredictionContext {
if let mergeCache = mergeCache {
var previous: PredictionContext? = mergeCache.get(a, b)
var previous = mergeCache.get(a, b)
if previous != nil {
return previous!
}
@ -243,45 +230,45 @@ public class PredictionContext: Hashable, CustomStringConvertible {
return rootMerge
}
if (a.returnState == b.returnState) {
if a.returnState == b.returnState {
// a == b
let parent: PredictionContext = merge(a.parent!, b.parent!, rootIsWildcard, &mergeCache);
let parent = merge(a.parent!, b.parent!, rootIsWildcard, &mergeCache)
// if parent is same as existing a or b parent or reduced to a parent, return it
if (parent === a.parent!) {
if parent === a.parent! {
return a
} // ax + bx = ax, if a=b
if (parent === b.parent!) {
if parent === b.parent! {
return b
} // ax + bx = bx, if a=b
// else: ax + ay = a'[x,y]
// merge parents x and y, giving array node with x,y then remainders
// of those graphs. dup a, a' points at merged array
// new joined parent so create new singleton pointing to it, a'
let a_: PredictionContext = SingletonPredictionContext.create(parent, a.returnState);
if (mergeCache != nil) {
let a_ = SingletonPredictionContext.create(parent, a.returnState);
if mergeCache != nil {
mergeCache!.put(a, b, a_)
}
return a_
} else {
// a != b payloads differ
// see if we can collapse parents due to $+x parents if local ctx
var singleParent: PredictionContext? = nil;
var singleParent: PredictionContext? = nil
//added by janyou
if a === b || (a.parent != nil && a.parent! == b.parent) {
// ax + bx = [a,b]x
singleParent = a.parent
}
if (singleParent != nil) {
if singleParent != nil {
// parents are same
// sort payloads and use same parent
var payloads: [Int] = [a.returnState, b.returnState];
if (a.returnState > b.returnState) {
var payloads = [a.returnState, b.returnState]
if a.returnState > b.returnState {
payloads[0] = b.returnState
payloads[1] = a.returnState
}
let parents: [PredictionContext?] = [singleParent, singleParent]
let a_: PredictionContext = ArrayPredictionContext(parents, payloads)
if (mergeCache != nil) {
let parents = [singleParent, singleParent]
let a_ = ArrayPredictionContext(parents, payloads)
if mergeCache != nil {
mergeCache!.put(a, b, a_)
}
return a_
@ -289,19 +276,19 @@ public class PredictionContext: Hashable, CustomStringConvertible {
// parents differ and can't merge them. Just pack together
// into array; can't merge.
// ax + by = [ax,by]
var payloads: [Int] = [a.returnState, b.returnState]
var parents: [PredictionContext?] = [a.parent, b.parent];
if (a.returnState > b.returnState) {
var payloads = [a.returnState, b.returnState]
var parents = [a.parent, b.parent]
if a.returnState > b.returnState {
// sort by payload
payloads[0] = b.returnState
payloads[1] = a.returnState
parents = [b.parent, a.parent]
}
if a is EmptyPredictionContext {
// print("parenet is null")
// print("parent is null")
}
let a_: PredictionContext = ArrayPredictionContext(parents, payloads);
if (mergeCache != nil) {
let a_ = ArrayPredictionContext(parents, payloads)
if mergeCache != nil {
mergeCache!.put(a, b, a_)
}
return a_
@ -349,31 +336,29 @@ public class PredictionContext: Hashable, CustomStringConvertible {
public static func mergeRoot(_ a: SingletonPredictionContext,
_ b: SingletonPredictionContext,
_ rootIsWildcard: Bool) -> PredictionContext? {
if (rootIsWildcard) {
if (a === PredictionContext.EMPTY) {
if rootIsWildcard {
if a === PredictionContext.EMPTY {
return PredictionContext.EMPTY
} // * + b = *
if (b === PredictionContext.EMPTY) {
if b === PredictionContext.EMPTY {
return PredictionContext.EMPTY
} // a + * = *
} else {
if (a === PredictionContext.EMPTY && b === PredictionContext.EMPTY) {
if a === PredictionContext.EMPTY && b === PredictionContext.EMPTY {
return PredictionContext.EMPTY
} // $ + $ = $
if (a === PredictionContext.EMPTY) {
if a === PredictionContext.EMPTY {
// $ + x = [$,x]
let payloads: [Int] = [b.returnState, EMPTY_RETURN_STATE]
let parents: [PredictionContext?] = [b.parent, nil]
let joined: PredictionContext =
ArrayPredictionContext(parents, payloads)
return joined;
let payloads = [b.returnState, EMPTY_RETURN_STATE]
let parents = [b.parent, nil]
let joined = ArrayPredictionContext(parents, payloads)
return joined
}
if (b === PredictionContext.EMPTY) {
if b === PredictionContext.EMPTY {
// x + $ = [$,x] ($ is always first if present)
let payloads: [Int] = [a.returnState, EMPTY_RETURN_STATE]
let parents: [PredictionContext?] = [a.parent, nil]
let joined: PredictionContext =
ArrayPredictionContext(parents, payloads)
let payloads = [a.returnState, EMPTY_RETURN_STATE]
let parents = [a.parent, nil]
let joined = ArrayPredictionContext(parents, payloads)
return joined
}
}
@ -405,30 +390,29 @@ public class PredictionContext: Hashable, CustomStringConvertible {
_ rootIsWildcard: Bool,
_ mergeCache: inout DoubleKeyMap<PredictionContext, PredictionContext, PredictionContext>?) -> PredictionContext {
if (mergeCache != nil) {
var previous: PredictionContext? = mergeCache!.get(a, b)
if (previous != nil) {
if mergeCache != nil {
var previous = mergeCache!.get(a, b)
if previous != nil {
return previous!
}
previous = mergeCache!.get(b, a)
if (previous != nil) {
if previous != nil {
return previous!
}
}
// merge sorted payloads a + b => M
var i: Int = 0 // walks a
var j: Int = 0 // walks b
var k: Int = 0// walks target M array
var i = 0 // walks a
var j = 0 // walks b
var k = 0 // walks target M array
let aReturnStatesLength = a.returnStates.count
let bReturnStatesLength = b.returnStates.count
let mergedReturnStatesLength = aReturnStatesLength + bReturnStatesLength
var mergedReturnStates: [Int] = [Int](repeating: 0, count: mergedReturnStatesLength)
var mergedReturnStates = [Int](repeating: 0, count: mergedReturnStatesLength)
var mergedParents: [PredictionContext?] = [PredictionContext?](repeating: nil, count: mergedReturnStatesLength)
//new PredictionContext[a.returnStates.length + b.returnStates.length];
var mergedParents = [PredictionContext?](repeating: nil, count: mergedReturnStatesLength)
// walk and merge to yield mergedParents, mergedReturnStates
let aReturnStates = a.returnStates
let bReturnStates = b.returnStates
@ -436,35 +420,27 @@ public class PredictionContext: Hashable, CustomStringConvertible {
let bParents = b.parents
while i < aReturnStatesLength && j < bReturnStatesLength {
let a_parent: PredictionContext? = aParents[i]
let b_parent: PredictionContext? = bParents[j]
if (aReturnStates[i] == bReturnStates[j]) {
let a_parent = aParents[i]
let b_parent = bParents[j]
if aReturnStates[i] == bReturnStates[j] {
// same payload (stack tops are equal), must yield merged singleton
let payload: Int = aReturnStates[i]
let payload = aReturnStates[i]
// $+$ = $
var both$: Bool = (payload == EMPTY_RETURN_STATE)
both$ = both$ && a_parent == nil
both$ = both$ && b_parent == nil
// let both$: Bool = ((payload == EMPTY_RETURN_STATE) &&
// a_parent == nil && b_parent == nil)
var ax_ax: Bool = (a_parent != nil && b_parent != nil)
ax_ax = ax_ax && a_parent! == b_parent!
// let ax_ax: Bool = (a_parent != nil && b_parent != nil) && a_parent! == b_parent! // ax+ax -> ax
let both$ = ((payload == EMPTY_RETURN_STATE) && a_parent == nil && b_parent == nil)
let ax_ax = (a_parent != nil && b_parent != nil && a_parent! == b_parent!)
if (both$ || ax_ax) {
if both$ || ax_ax {
mergedParents[k] = a_parent // choose left
mergedReturnStates[k] = payload
} else {
// ax+ay -> a'[x,y]
let mergedParent: PredictionContext =
merge(a_parent!, b_parent!, rootIsWildcard, &mergeCache)
let mergedParent = merge(a_parent!, b_parent!, rootIsWildcard, &mergeCache)
mergedParents[k] = mergedParent
mergedReturnStates[k] = payload
}
i += 1 // hop over left one as usual
j += 1 // but also skip one in right side since we merge
} else if (aReturnStates[i] < bReturnStates[j]) {
} else if aReturnStates[i] < bReturnStates[j] {
// copy a[i] to M
mergedParents[k] = a_parent
mergedReturnStates[k] = aReturnStates[i]
@ -479,7 +455,7 @@ public class PredictionContext: Hashable, CustomStringConvertible {
}
// copy over any payloads remaining in either array
if (i < aReturnStatesLength) {
if i < aReturnStatesLength {
for p in i..<aReturnStatesLength {
mergedParents[k] = aParents[p]
@ -495,14 +471,12 @@ public class PredictionContext: Hashable, CustomStringConvertible {
}
// trim merged if we combined a few that had same stack tops
if (k < mergedParents.count) {
if k < mergedParents.count {
// write index < last position; trim
if (k == 1) {
if k == 1 {
// for just one merged element, return singleton top
let a_: PredictionContext =
SingletonPredictionContext.create(mergedParents[0],
mergedReturnStates[0])
if (mergeCache != nil) {
let a_ = SingletonPredictionContext.create(mergedParents[0], mergedReturnStates[0])
if mergeCache != nil {
mergeCache!.put(a, b, a_)
}
//print("merge array 1 \(a_)")
@ -512,8 +486,7 @@ public class PredictionContext: Hashable, CustomStringConvertible {
mergedReturnStates = Array(mergedReturnStates[0 ..< k])
}
let M: ArrayPredictionContext =
ArrayPredictionContext(mergedParents, mergedReturnStates)
let M = ArrayPredictionContext(mergedParents, mergedReturnStates)
// if we created same array as a or b, return that instead
// TODO: track whether this is possible above during merge sort for speed
@ -542,24 +515,24 @@ public class PredictionContext: Hashable, CustomStringConvertible {
}
public static func toDOTString(_ context: PredictionContext?) -> String {
if (context == nil) {
if context == nil {
return ""
}
let buf: StringBuilder = StringBuilder()
let buf = StringBuilder()
buf.append("digraph G {\n")
buf.append("rankdir=LR;\n")
var nodes: Array<PredictionContext> = getAllContextNodes(context!)
var nodes = getAllContextNodes(context!)
nodes.sort(by: { $0.id > $1.id })
nodes.sort { $0.id > $1.id }
for current: PredictionContext in nodes {
if (current is SingletonPredictionContext) {
let s: String = String(current.id)
for current in nodes {
if current is SingletonPredictionContext {
let s = String(current.id)
buf.append(" s").append(s)
var returnState: String = String(current.getReturnState(0))
if (current is EmptyPredictionContext) {
var returnState = String(current.getReturnState(0))
if current is EmptyPredictionContext {
returnState = "$"
}
buf.append(" [label=\"")
@ -567,17 +540,17 @@ public class PredictionContext: Hashable, CustomStringConvertible {
buf.append("\"];\n")
continue
}
let arr: ArrayPredictionContext = current as! ArrayPredictionContext
let arr = current as! ArrayPredictionContext
buf.append(" s").append(arr.id)
buf.append(" [shape=box, label=\"")
buf.append("[")
var first: Bool = true
var first = true
let returnStates = arr.returnStates
for inv: Int in returnStates {
if (!first) {
for inv in returnStates {
if !first {
buf.append(", ")
}
if (inv == EMPTY_RETURN_STATE) {
if inv == EMPTY_RETURN_STATE {
buf.append("$")
} else {
buf.append(inv)
@ -588,8 +561,8 @@ public class PredictionContext: Hashable, CustomStringConvertible {
buf.append("\"];\n")
}
for current: PredictionContext in nodes {
if (current === EMPTY) {
for current in nodes {
if current === EMPTY {
continue
}
let length = current.size()
@ -597,13 +570,13 @@ public class PredictionContext: Hashable, CustomStringConvertible {
guard let currentParent = current.getParent(i) else {
continue
}
let s: String = String(current.id)
let s = String(current.id)
buf.append(" s").append(s)
buf.append("->")
buf.append("s")
buf.append(currentParent.id)
if (current.size() > 1) {
buf.append(" [label=\"parent[\(i)]\"];\n");
if current.size() > 1 {
buf.append(" [label=\"parent[\(i)]\"];\n")
} else {
buf.append(";\n")
}
@ -619,23 +592,23 @@ public class PredictionContext: Hashable, CustomStringConvertible {
_ context: PredictionContext,
_ contextCache: PredictionContextCache,
_ visited: HashMap<PredictionContext, PredictionContext>) -> PredictionContext {
if (context.isEmpty()) {
if context.isEmpty() {
return context
}
var existing: PredictionContext? = visited[context]
if (existing != nil) {
var existing = visited[context]
if existing != nil {
return existing!
}
existing = contextCache.get(context)
if (existing != nil) {
if existing != nil {
visited[context] = existing!
return existing!
}
var changed: Bool = false
var parents: [PredictionContext?] = [PredictionContext?](repeating: nil, count: context.size())
var changed = false
var parents = [PredictionContext?](repeating: nil, count: context.size())
let length = parents.count
for i in 0..<length {
//added by janyou
@ -643,10 +616,10 @@ public class PredictionContext: Hashable, CustomStringConvertible {
return context
}
let parent: PredictionContext = getCachedContext(context.getParent(i)!, contextCache, visited)
let parent = getCachedContext(context.getParent(i)!, contextCache, visited)
//modified by janyou != !==
if (changed || parent !== context.getParent(i)) {
if (!changed) {
if changed || parent !== context.getParent(i) {
if !changed {
parents = [PredictionContext?](repeating: nil, count: context.size())
for j in 0..<context.size() {
@ -660,22 +633,22 @@ public class PredictionContext: Hashable, CustomStringConvertible {
}
}
if (!changed) {
if !changed {
contextCache.add(context)
visited[context] = context
return context
}
var updated: PredictionContext
if (parents.count == 0) {
let updated: PredictionContext
if parents.isEmpty {
updated = EMPTY
} else {
if (parents.count == 1) {
updated = SingletonPredictionContext.create(parents[0], context.getReturnState(0))
} else {
let arrayPredictionContext: ArrayPredictionContext = context as! ArrayPredictionContext
updated = ArrayPredictionContext(parents, arrayPredictionContext.returnStates)
}
}
else if parents.count == 1 {
updated = SingletonPredictionContext.create(parents[0], context.getReturnState(0))
}
else {
let arrayPredictionContext = context as! ArrayPredictionContext
updated = ArrayPredictionContext(parents, arrayPredictionContext.returnStates)
}
contextCache.add(updated)
@ -688,20 +661,17 @@ public class PredictionContext: Hashable, CustomStringConvertible {
// ter's recursive version of Sam's getAllNodes()
public static func getAllContextNodes(_ context: PredictionContext) -> Array<PredictionContext> {
var nodes: Array<PredictionContext> = Array<PredictionContext>()
let visited: HashMap<PredictionContext, PredictionContext> =
HashMap<PredictionContext, PredictionContext>()
public static func getAllContextNodes(_ context: PredictionContext) -> [PredictionContext] {
var nodes = [PredictionContext]()
let visited = HashMap<PredictionContext, PredictionContext>()
getAllContextNodes_(context, &nodes, visited)
return nodes
}
public static func getAllContextNodes_(_ context: PredictionContext?,
_ nodes: inout Array<PredictionContext>,
_ nodes: inout [PredictionContext],
_ visited: HashMap<PredictionContext, PredictionContext>) {
//if (context == nil || visited.keys.contains(context!)) {
guard let context = context , visited[context] == nil else {
guard let context = context, visited[context] == nil else {
return
}
visited[context] = context
@ -712,67 +682,66 @@ public class PredictionContext: Hashable, CustomStringConvertible {
}
}
public func toString<T:ATNSimulator>(_ recog: Recognizer<T>) -> String {
public func toString<T>(_ recog: Recognizer<T>) -> String {
return NSStringFromClass(PredictionContext.self)
// return toString(recog, ParserRuleContext.EMPTY);
}
public func toStrings<T:ATNSimulator>(_ recognizer: Recognizer<T>, _ currentState: Int) -> [String] {
public func toStrings<T>(_ recognizer: Recognizer<T>, _ currentState: Int) -> [String] {
return toStrings(recognizer, PredictionContext.EMPTY, currentState)
}
// FROM SAM
public func toStrings<T:ATNSimulator>(_ recognizer: Recognizer<T>?, _ stop: PredictionContext, _ currentState: Int) -> [String] {
var result: Array<String> = Array<String>()
var perm: Int = 0
public func toStrings<T>(_ recognizer: Recognizer<T>?, _ stop: PredictionContext, _ currentState: Int) -> [String] {
var result = [String]()
var perm = 0
outer: while true {
var offset: Int = 0
var last: Bool = true
var p: PredictionContext = self
var stateNumber: Int = currentState
let localBuffer: StringBuilder = StringBuilder()
var offset = 0
var last = true
var p = self
var stateNumber = currentState
let localBuffer = StringBuilder()
localBuffer.append("[")
while !p.isEmpty() && p !== stop {
var index: Int = 0
if (p.size() > 0) {
var bits: Int = 1
var index = 0
if p.size() > 0 {
var bits = 1
while (1 << bits) < p.size() {
bits += 1
}
let mask: Int = (1 << bits) - 1
let mask = (1 << bits) - 1
index = (perm >> offset) & mask
//last &= index >= p.size() - 1;
//last = Bool(Int(last) & (index >= p.size() - 1));
last = last && (index >= p.size() - 1)
if (index >= p.size()) {
if index >= p.size() {
continue outer
}
offset += bits
}
if let recognizer = recognizer {
if (localBuffer.length > 1) {
if localBuffer.length > 1 {
// first char is '[', if more than that this isn't the first rule
localBuffer.append(" ")
}
let atn: ATN = recognizer.getATN()
let s: ATNState = atn.states[stateNumber]!
let ruleName: String = recognizer.getRuleNames()[s.ruleIndex!]
let atn = recognizer.getATN()
let s = atn.states[stateNumber]!
let ruleName = recognizer.getRuleNames()[s.ruleIndex!]
localBuffer.append(ruleName)
} else {
if (p.getReturnState(index) != PredictionContext.EMPTY_RETURN_STATE) {
if (!p.isEmpty()) {
if (localBuffer.length > 1) {
// first char is '[', if more than that this isn't the first rule
localBuffer.append(" ")
}
localBuffer.append(p.getReturnState(index))
}
else if p.getReturnState(index) != PredictionContext.EMPTY_RETURN_STATE {
if !p.isEmpty() {
if localBuffer.length > 1 {
// first char is '[', if more than that this isn't the first rule
localBuffer.append(" ")
}
localBuffer.append(p.getReturnState(index))
}
}
stateNumber = p.getReturnState(index)
@ -781,7 +750,7 @@ public class PredictionContext: Hashable, CustomStringConvertible {
localBuffer.append("]")
result.append(localBuffer.toString())
if (last) {
if last {
break
}
@ -792,17 +761,18 @@ public class PredictionContext: Hashable, CustomStringConvertible {
}
public var description: String {
return String(describing: PredictionContext.self) + "@" + String(Unmanaged.passUnretained(self).toOpaque().hashValue)
}
}
public func ==(lhs: RuleContext, rhs: ParserRuleContext) -> Bool {
if !(lhs is ParserRuleContext) {
if let lhs = lhs as? ParserRuleContext {
return lhs === rhs
}
else {
return false
}
return (lhs as! ParserRuleContext) === rhs
}
public func ==(lhs: PredictionContext, rhs: PredictionContext) -> Bool {
@ -810,16 +780,16 @@ public func ==(lhs: PredictionContext, rhs: PredictionContext) -> Bool {
if lhs === rhs {
return true
}
if (lhs is EmptyPredictionContext) {
if lhs is EmptyPredictionContext {
return lhs === rhs
}
if (lhs is SingletonPredictionContext) && (rhs is SingletonPredictionContext) {
return (lhs as! SingletonPredictionContext) == (rhs as! SingletonPredictionContext)
if let lhs = lhs as? SingletonPredictionContext, let rhs = rhs as? SingletonPredictionContext {
return lhs == rhs
}
if (lhs is ArrayPredictionContext) && (rhs is ArrayPredictionContext) {
return (lhs as! ArrayPredictionContext) == (rhs as! ArrayPredictionContext)
if let lhs = lhs as? ArrayPredictionContext, let rhs = rhs as? ArrayPredictionContext {
return lhs == rhs
}
return false

View File

@ -164,7 +164,7 @@ public enum PredictionMode {
/// the configurations to strip out all of the predicates so that a standard
/// _org.antlr.v4.runtime.atn.ATNConfigSet_ will merge everything ignoring predicates.
///
public static func hasSLLConflictTerminatingPrediction(_ mode: PredictionMode,_ configs: ATNConfigSet) throws -> Bool {
public static func hasSLLConflictTerminatingPrediction(_ mode: PredictionMode,_ configs: ATNConfigSet) -> Bool {
var configs = configs
///
/// Configs in rule stop states indicate reaching the end of the decision
@ -183,17 +183,16 @@ public enum PredictionMode {
// since we'll often fail over anyway.
if configs.hasSemanticContext {
// dup configs, tossing out semantic predicates
configs = try configs.dupConfigsWithoutSemanticPredicates()
configs = configs.dupConfigsWithoutSemanticPredicates()
}
// now we have combined contexts for configs with dissimilar preds
}
// pure SLL or combined SLL+LL mode parsing
let altsets: Array<BitSet> = try getConflictingAltSubsets(configs)
let altsets = getConflictingAltSubsets(configs)
let heuristic: Bool =
try hasConflictingAltSet(altsets) && !hasStateAssociatedWithOneAlt(configs)
let heuristic = hasConflictingAltSet(altsets) && !hasStateAssociatedWithOneAlt(configs)
return heuristic
}
@ -364,8 +363,8 @@ public enum PredictionMode {
/// we need exact ambiguity detection when the sets look like
/// `A={{1,2`}} or `{{1,2`,{1,2}}}, etc...
///
public static func resolvesToJustOneViableAlt(_ altsets: Array<BitSet>) throws -> Int {
return try getSingleViableAlt(altsets)
public static func resolvesToJustOneViableAlt(_ altsets: [BitSet]) -> Int {
return getSingleViableAlt(altsets)
}
///
@ -376,7 +375,7 @@ public enum PredictionMode {
/// - returns: `true` if every _java.util.BitSet_ in `altsets` has
/// _java.util.BitSet#cardinality cardinality_ &gt; 1, otherwise `false`
///
public static func allSubsetsConflict(_ altsets: Array<BitSet>) -> Bool {
public static func allSubsetsConflict(_ altsets: [BitSet]) -> Bool {
return !hasNonConflictingAltSet(altsets)
}
@ -388,7 +387,7 @@ public enum PredictionMode {
/// - returns: `true` if `altsets` contains a _java.util.BitSet_ with
/// _java.util.BitSet#cardinality cardinality_ 1, otherwise `false`
///
public static func hasNonConflictingAltSet(_ altsets: Array<BitSet>) -> Bool {
public static func hasNonConflictingAltSet(_ altsets: [BitSet]) -> Bool {
for alts: BitSet in altsets {
if alts.cardinality() == 1 {
return true
@ -405,7 +404,7 @@ public enum PredictionMode {
/// - returns: `true` if `altsets` contains a _java.util.BitSet_ with
/// _java.util.BitSet#cardinality cardinality_ &gt; 1, otherwise `false`
///
public static func hasConflictingAltSet(_ altsets: Array<BitSet>) -> Bool {
public static func hasConflictingAltSet(_ altsets: [BitSet]) -> Bool {
for alts: BitSet in altsets {
if alts.cardinality() > 1 {
return true
@ -421,7 +420,7 @@ public enum PredictionMode {
/// - returns: `true` if every member of `altsets` is equal to the
/// others, otherwise `false`
///
public static func allSubsetsEqual(_ altsets: Array<BitSet>) -> Bool {
public static func allSubsetsEqual(_ altsets: [BitSet]) -> Bool {
let first: BitSet = altsets[0]
for it in altsets {
@ -440,10 +439,10 @@ public enum PredictionMode {
///
/// - parameter altsets: a collection of alternative subsets
///
public static func getUniqueAlt(_ altsets: Array<BitSet>) throws -> Int {
public static func getUniqueAlt(_ altsets: [BitSet]) -> Int {
let all: BitSet = getAlts(altsets)
if all.cardinality() == 1 {
return try all.nextSetBit(0)
return all.firstSetBit()
}
return ATN.INVALID_ALT_NUMBER
}
@ -467,9 +466,8 @@ public enum PredictionMode {
///
/// Get union of all alts from configs. - Since: 4.5.1
///
public static func getAlts(_ configs: ATNConfigSet) throws -> BitSet {
return try configs.getAltBitSet()
public static func getAlts(_ configs: ATNConfigSet) -> BitSet {
return configs.getAltBitSet()
}
@ -483,9 +481,8 @@ public enum PredictionMode {
///
///
public static func getConflictingAltSubsets(_ configs: ATNConfigSet) throws -> Array<BitSet> {
return try configs.getConflictingAltSubsets()
public static func getConflictingAltSubsets(_ configs: ATNConfigSet) -> [BitSet] {
return configs.getConflictingAltSubsets()
}
///
@ -496,16 +493,13 @@ public enum PredictionMode {
/// map[c._org.antlr.v4.runtime.atn.ATNConfig#state state_] U= c._org.antlr.v4.runtime.atn.ATNConfig#alt alt_
///
///
public static func getStateToAltMap(_ configs: ATNConfigSet) throws -> HashMap<ATNState, BitSet> {
return try configs.getStateToAltMap()
public static func getStateToAltMap(_ configs: ATNConfigSet) -> HashMap<ATNState, BitSet> {
return configs.getStateToAltMap()
}
public static func hasStateAssociatedWithOneAlt(_ configs: ATNConfigSet) throws -> Bool {
let x: HashMap<ATNState, BitSet> = try getStateToAltMap(configs)
let values = x.values
for alts: BitSet in values {
public static func hasStateAssociatedWithOneAlt(_ configs: ATNConfigSet) -> Bool {
let x = getStateToAltMap(configs)
for alts in x.values {
if alts.cardinality() == 1 {
return true
}
@ -513,17 +507,17 @@ public enum PredictionMode {
return false
}
public static func getSingleViableAlt(_ altsets: Array<BitSet>) throws -> Int {
let viableAlts: BitSet = BitSet()
for alts: BitSet in altsets {
let minAlt: Int = try alts.nextSetBit(0)
try viableAlts.set(minAlt)
public static func getSingleViableAlt(_ altsets: [BitSet]) -> Int {
let viableAlts = BitSet()
for alts in altsets {
let minAlt = alts.firstSetBit()
try! viableAlts.set(minAlt)
if viableAlts.cardinality() > 1 {
// more than 1 viable alt
return ATN.INVALID_ALT_NUMBER
}
}
return try viableAlts.nextSetBit(0)
return viableAlts.firstSetBit()
}
}

View File

@ -113,7 +113,7 @@ public class ProfilingATNSimulator: ParserATNSimulator {
override
internal func computeTargetState(_ dfa: DFA, _ previousD: DFAState, _ t: Int) throws -> DFAState {
let state: DFAState = try super.computeTargetState(dfa, previousD, t)
let state = try super.computeTargetState(dfa, previousD, t)
currentState = state
return state
}
@ -126,7 +126,7 @@ public class ProfilingATNSimulator: ParserATNSimulator {
_llStopIndex = _input.index()
}
let reachConfigs: ATNConfigSet? = try super.computeReachSet(closure, t, fullCtx)
let reachConfigs = try super.computeReachSet(closure, t, fullCtx)
if fullCtx {
decisions[currentDecision].LL_ATNTransitions += 1 // count computation even if error
if reachConfigs != nil {
@ -152,12 +152,12 @@ public class ProfilingATNSimulator: ParserATNSimulator {
override
internal func evalSemanticContext(_ pred: SemanticContext, _ parserCallStack: ParserRuleContext, _ alt: Int, _ fullCtx: Bool) throws -> Bool {
let result: Bool = try super.evalSemanticContext(pred, parserCallStack, alt, fullCtx)
let result = try super.evalSemanticContext(pred, parserCallStack, alt, fullCtx)
if !(pred is SemanticContext.PrecedencePredicate) {
let fullContext: Bool = _llStopIndex >= 0
let stopIndex: Int = fullContext ? _llStopIndex : _sllStopIndex
let fullContext = _llStopIndex >= 0
let stopIndex = fullContext ? _llStopIndex : _sllStopIndex
decisions[currentDecision].predicateEvals.append(
PredicateEvalInfo(currentDecision, _input, _startIndex, stopIndex, pred, result, alt, fullCtx)
PredicateEvalInfo(currentDecision, _input, _startIndex, stopIndex, pred, result, alt, fullCtx)
)
}
@ -165,34 +165,36 @@ public class ProfilingATNSimulator: ParserATNSimulator {
}
override
internal func reportAttemptingFullContext(_ dfa: DFA, _ conflictingAlts: BitSet?, _ configs: ATNConfigSet, _ startIndex: Int, _ stopIndex: Int) throws {
internal func reportAttemptingFullContext(_ dfa: DFA, _ conflictingAlts: BitSet?, _ configs: ATNConfigSet, _ startIndex: Int, _ stopIndex: Int) {
if let conflictingAlts = conflictingAlts {
conflictingAltResolvedBySLL = try conflictingAlts.nextSetBit(0)
conflictingAltResolvedBySLL = conflictingAlts.firstSetBit()
} else {
conflictingAltResolvedBySLL = try configs.getAlts().nextSetBit(0)
let configAlts = configs.getAlts()
conflictingAltResolvedBySLL = configAlts.firstSetBit()
}
decisions[currentDecision].LL_Fallback += 1
try super.reportAttemptingFullContext(dfa, conflictingAlts, configs, startIndex, stopIndex)
super.reportAttemptingFullContext(dfa, conflictingAlts, configs, startIndex, stopIndex)
}
override
internal func reportContextSensitivity(_ dfa: DFA, _ prediction: Int, _ configs: ATNConfigSet, _ startIndex: Int, _ stopIndex: Int) throws {
internal func reportContextSensitivity(_ dfa: DFA, _ prediction: Int, _ configs: ATNConfigSet, _ startIndex: Int, _ stopIndex: Int) {
if prediction != conflictingAltResolvedBySLL {
decisions[currentDecision].contextSensitivities.append(
ContextSensitivityInfo(currentDecision, configs, _input, startIndex, stopIndex)
)
}
try super.reportContextSensitivity(dfa, prediction, configs, startIndex, stopIndex)
super.reportContextSensitivity(dfa, prediction, configs, startIndex, stopIndex)
}
override
internal func reportAmbiguity(_ dfa: DFA, _ D: DFAState, _ startIndex: Int, _ stopIndex: Int, _ exact: Bool,
_ ambigAlts: BitSet?, _ configs: ATNConfigSet) throws {
_ ambigAlts: BitSet?, _ configs: ATNConfigSet) {
var prediction: Int
if let ambigAlts = ambigAlts {
prediction = try ambigAlts.nextSetBit(0)
prediction = ambigAlts.firstSetBit()
} else {
prediction = try configs.getAlts().nextSetBit(0)
let configAlts = configs.getAlts()
prediction = configAlts.firstSetBit()
}
if configs.fullCtx && prediction != conflictingAltResolvedBySLL {
// Even though this is an ambiguity we are reporting, we can
@ -208,7 +210,7 @@ public class ProfilingATNSimulator: ParserATNSimulator {
AmbiguityInfo(currentDecision, configs, ambigAlts!,
_input, startIndex, stopIndex, configs.fullCtx)
)
try super.reportAmbiguity(dfa, D, startIndex, stopIndex, exact, ambigAlts!, configs)
super.reportAmbiguity(dfa, D, startIndex, stopIndex, exact, ambigAlts!, configs)
}

View File

@ -22,9 +22,8 @@ public final class RangeTransition: Transition, CustomStringConvertible {
}
override
//old label()
public func labelIntervalSet() throws -> IntervalSet {
return try IntervalSet.of(from, to)
public func labelIntervalSet() -> IntervalSet? {
return IntervalSet.of(from, to)
}
override

View File

@ -37,9 +37,8 @@ public class SemanticContext: Hashable, CustomStringConvertible {
/// prediction, so we passed in the outer context here in case of context
/// dependent predicate evaluation.
///
public func eval<T:ATNSimulator>(_ parser: Recognizer<T>, _ parserCallStack: RuleContext) throws -> Bool {
RuntimeException(#function + " must be overridden")
return false
public func eval<T>(_ parser: Recognizer<T>, _ parserCallStack: RuleContext) throws -> Bool {
fatalError(#function + " must be overridden")
}
///
@ -58,16 +57,16 @@ public class SemanticContext: Hashable, CustomStringConvertible {
/// * A non-`null` _org.antlr.v4.runtime.atn.SemanticContext_: the new simplified
/// semantic context after precedence predicates are evaluated.
///
public func evalPrecedence<T:ATNSimulator>(_ parser: Recognizer<T>, _ parserCallStack: RuleContext) throws -> SemanticContext? {
public func evalPrecedence<T>(_ parser: Recognizer<T>, _ parserCallStack: RuleContext) throws -> SemanticContext? {
return self
}
public var hashValue: Int {
RuntimeException(#function + " must be overridden")
return 0
fatalError(#function + " must be overridden")
}
public var description: String {
RuntimeException(#function + " must be overridden")
return ""
fatalError(#function + " must be overridden")
}
public class Predicate: SemanticContext {
@ -90,19 +89,18 @@ public class SemanticContext: Hashable, CustomStringConvertible {
}
override
public func eval<T:ATNSimulator>(_ parser: Recognizer<T>, _ parserCallStack: RuleContext) throws -> Bool {
let localctx: RuleContext? = isCtxDependent ? parserCallStack : nil
public func eval<T>(_ parser: Recognizer<T>, _ parserCallStack: RuleContext) throws -> Bool {
let localctx = isCtxDependent ? parserCallStack : nil
return try parser.sempred(localctx, ruleIndex, predIndex)
}
override
public var hashValue: Int {
var hashCode: Int = MurmurHash.initialize()
var hashCode = MurmurHash.initialize()
hashCode = MurmurHash.update(hashCode, ruleIndex)
hashCode = MurmurHash.update(hashCode, predIndex)
hashCode = MurmurHash.update(hashCode, isCtxDependent ? 1 : 0)
hashCode = MurmurHash.finish(hashCode, 3)
return hashCode
return MurmurHash.finish(hashCode, 3)
}
@ -126,13 +124,13 @@ public class SemanticContext: Hashable, CustomStringConvertible {
}
override
public func eval<T:ATNSimulator>(_ parser: Recognizer<T>, _ parserCallStack: RuleContext) throws -> Bool {
return try parser.precpred(parserCallStack, precedence)
public func eval<T>(_ parser: Recognizer<T>, _ parserCallStack: RuleContext) throws -> Bool {
return parser.precpred(parserCallStack, precedence)
}
override
public func evalPrecedence<T:ATNSimulator>(_ parser: Recognizer<T>, _ parserCallStack: RuleContext) throws -> SemanticContext? {
if try parser.precpred(parserCallStack, precedence) {
public func evalPrecedence<T>(_ parser: Recognizer<T>, _ parserCallStack: RuleContext) throws -> SemanticContext? {
if parser.precpred(parserCallStack, precedence) {
return SemanticContext.NONE
} else {
return nil
@ -172,8 +170,7 @@ public class SemanticContext: Hashable, CustomStringConvertible {
///
public func getOperands() -> Array<SemanticContext> {
RuntimeException(" must overriden ")
return Array<SemanticContext>()
fatalError(#function + " must be overridden")
}
}
@ -186,36 +183,33 @@ public class SemanticContext: Hashable, CustomStringConvertible {
public let opnds: [SemanticContext]
public init(_ a: SemanticContext, _ b: SemanticContext) {
var operands: Set<SemanticContext> = Set<SemanticContext>()
if a is AND {
operands.formUnion((a as! AND).opnds)
//operands.addAll(Arrays.asList((a as AND).opnds));
var operands = Set<SemanticContext>()
if let aAnd = a as? AND {
operands.formUnion(aAnd.opnds)
} else {
operands.insert(a)
}
if b is AND {
operands.formUnion((b as! AND).opnds)
//operands.addAll(Arrays.asList((b as AND).opnds));
if let bAnd = b as? AND {
operands.formUnion(bAnd.opnds)
} else {
operands.insert(b)
}
let precedencePredicates: Array<PrecedencePredicate> =
SemanticContext.filterPrecedencePredicates(&operands)
let precedencePredicates = SemanticContext.filterPrecedencePredicates(&operands)
if !precedencePredicates.isEmpty {
// interested in the transition with the lowest precedence
let reduced: PrecedencePredicate = precedencePredicates.sorted {
let reduced = precedencePredicates.sorted {
$0.precedence < $1.precedence
}.first! //Collections.min(precedencePredicates);
operands.insert(reduced)
}
operands.insert(reduced[0])
}
opnds = Array(operands) //.toArray(new, SemanticContext[operands.size()]);
opnds = Array(operands)
}
override
public func getOperands() -> Array<SemanticContext> {
public func getOperands() -> [SemanticContext] {
return opnds
}
@ -236,8 +230,8 @@ public class SemanticContext: Hashable, CustomStringConvertible {
/// unordered.
///
override
public func eval<T:ATNSimulator>(_ parser: Recognizer<T>, _ parserCallStack: RuleContext) throws -> Bool {
for opnd: SemanticContext in opnds {
public func eval<T>(_ parser: Recognizer<T>, _ parserCallStack: RuleContext) throws -> Bool {
for opnd in opnds {
if try !opnd.eval(parser, parserCallStack) {
return false
}
@ -246,11 +240,11 @@ public class SemanticContext: Hashable, CustomStringConvertible {
}
override
public func evalPrecedence<T:ATNSimulator>(_ parser: Recognizer<T>, _ parserCallStack: RuleContext) throws -> SemanticContext? {
var differs: Bool = false
var operands: Array<SemanticContext> = Array<SemanticContext>()
for context: SemanticContext in opnds {
let evaluated: SemanticContext? = try context.evalPrecedence(parser, parserCallStack)
public func evalPrecedence<T>(_ parser: Recognizer<T>, _ parserCallStack: RuleContext) throws -> SemanticContext? {
var differs = false
var operands = [SemanticContext]()
for context in opnds {
let evaluated = try context.evalPrecedence(parser, parserCallStack)
//TODO differs |= (evaluated != context)
//differs |= (evaluated != context);
differs = differs || (evaluated != context)
@ -258,11 +252,10 @@ public class SemanticContext: Hashable, CustomStringConvertible {
if evaluated == nil {
// The AND context is false if any element is false
return nil
} else {
if evaluated != SemanticContext.NONE {
// Reduce the result by skipping true elements
operands.append(evaluated!)
}
}
else if evaluated != SemanticContext.NONE {
// Reduce the result by skipping true elements
operands.append(evaluated!)
}
}
@ -275,7 +268,7 @@ public class SemanticContext: Hashable, CustomStringConvertible {
return SemanticContext.NONE
}
var result: SemanticContext = operands[0]
var result = operands[0]
let length = operands.count
for i in 1..<length {
result = SemanticContext.and(result, operands[i])
@ -305,35 +298,33 @@ public class SemanticContext: Hashable, CustomStringConvertible {
public init(_ a: SemanticContext, _ b: SemanticContext) {
var operands: Set<SemanticContext> = Set<SemanticContext>()
if a is OR {
operands.formUnion((a as! OR).opnds)
// operands.addAll(Arrays.asList((a as OR).opnds));
if let aOr = a as? OR {
operands.formUnion(aOr.opnds)
} else {
operands.insert(a)
}
if b is OR {
operands.formUnion((b as! OR).opnds)
//operands.addAll(Arrays.asList((b as OR).opnds));
if let bOr = b as? OR {
operands.formUnion(bOr.opnds)
} else {
operands.insert(b)
}
let precedencePredicates: Array<PrecedencePredicate> = SemanticContext.filterPrecedencePredicates(&operands)
let precedencePredicates = SemanticContext.filterPrecedencePredicates(&operands)
if !precedencePredicates.isEmpty {
// interested in the transition with the highest precedence
let reduced: PrecedencePredicate = precedencePredicates.sorted {
let reduced = precedencePredicates.sorted {
$0.precedence > $1.precedence
}.first!
//var reduced : PrecedencePredicate = Collections.max(precedencePredicates);
operands.insert(reduced)
}
operands.insert(reduced[0])
}
self.opnds = Array(operands) //operands.toArray(new, SemanticContext[operands.size()]);
self.opnds = Array(operands)
}
override
public func getOperands() -> Array<SemanticContext> {
return opnds //Arrays.asList(opnds);
public func getOperands() -> [SemanticContext] {
return opnds
}
@ -351,8 +342,8 @@ public class SemanticContext: Hashable, CustomStringConvertible {
/// unordered.
///
override
public func eval<T:ATNSimulator>(_ parser: Recognizer<T>, _ parserCallStack: RuleContext) throws -> Bool {
for opnd: SemanticContext in opnds {
public func eval<T>(_ parser: Recognizer<T>, _ parserCallStack: RuleContext) throws -> Bool {
for opnd in opnds {
if try opnd.eval(parser, parserCallStack) {
return true
}
@ -361,22 +352,19 @@ public class SemanticContext: Hashable, CustomStringConvertible {
}
override
public func evalPrecedence<T:ATNSimulator>(_ parser: Recognizer<T>, _ parserCallStack: RuleContext) throws -> SemanticContext? {
var differs: Bool = false
var operands: Array<SemanticContext> = Array<SemanticContext>()
for context: SemanticContext in opnds {
let evaluated: SemanticContext? = try context.evalPrecedence(parser, parserCallStack)
//differs |= (evaluated != context);
public func evalPrecedence<T>(_ parser: Recognizer<T>, _ parserCallStack: RuleContext) throws -> SemanticContext? {
var differs = false
var operands = [SemanticContext]()
for context in opnds {
let evaluated = try context.evalPrecedence(parser, parserCallStack)
differs = differs || (evaluated != context)
if evaluated == SemanticContext.NONE {
// The OR context is true if any element is true
return SemanticContext.NONE
} else {
if evaluated != nil {
// Reduce the result by skipping false elements
operands.append(evaluated!)
//operands.add(evaluated);
}
}
else if let evaluated = evaluated {
// Reduce the result by skipping false elements
operands.append(evaluated)
}
}
@ -389,7 +377,7 @@ public class SemanticContext: Hashable, CustomStringConvertible {
return nil
}
var result: SemanticContext = operands[0]
var result = operands[0]
let length = operands.count
for i in 1..<length {
result = SemanticContext.or(result, operands[i])
@ -447,21 +435,14 @@ public class SemanticContext: Hashable, CustomStringConvertible {
return result
}
private static func filterPrecedencePredicates(
_ collection: inout Set<SemanticContext>) ->
Array<PrecedencePredicate> {
let result = collection.filter {
$0 is PrecedencePredicate
private static func filterPrecedencePredicates(_ collection: inout Set<SemanticContext>) -> [PrecedencePredicate] {
let result = collection.flatMap {
$0 as? PrecedencePredicate
}
collection = Set<SemanticContext>(collection.filter {
!($0 is PrecedencePredicate)
})
//if (result == nil) {
//return Array<PrecedencePredicate>();
//}
return (result as! Array<PrecedencePredicate>)
return result
}
}

View File

@ -26,10 +26,7 @@ public class SetTransition: Transition, CustomStringConvertible {
}
override
///
/// /old label()
///
public func labelIntervalSet() -> IntervalSet {
public func labelIntervalSet() -> IntervalSet? {
return set
}

View File

@ -82,8 +82,7 @@ public class Transition {
}
public func getSerializationType() -> Int {
RuntimeException(#function + " must be overridden")
fatalError()
fatalError(#function + " must be overridden")
}
///
@ -100,12 +99,11 @@ public class Transition {
}
public func labelIntervalSet() throws -> IntervalSet? {
public func labelIntervalSet() -> IntervalSet? {
return nil
}
public func matches(_ symbol: Int, _ minVocabSymbol: Int, _ maxVocabSymbol: Int) -> Bool {
RuntimeException(#function + " must be overridden")
fatalError()
fatalError(#function + " must be overridden")
}
}

View File

@ -170,23 +170,12 @@ public class DFA: CustomStringConvertible {
return description
}
///
/// - Use _#toString(org.antlr.v4.runtime.Vocabulary)_ instead.
///
public func toString(_ tokenNames: [String?]?) -> String {
if s0 == nil {
return ""
}
let serializer: DFASerializer = DFASerializer(self, tokenNames)
return serializer.toString()
}
public func toString(_ vocabulary: Vocabulary) -> String {
if s0 == nil {
return ""
}
let serializer: DFASerializer = DFASerializer(self, vocabulary)
let serializer = DFASerializer(self, vocabulary)
return serializer.toString()
}
@ -194,7 +183,7 @@ public class DFA: CustomStringConvertible {
if s0 == nil {
return ""
}
let serializer: DFASerializer = LexerDFASerializer(self)
let serializer = LexerDFASerializer(self)
return serializer.toString()
}

View File

@ -10,19 +10,9 @@
///
public class DFASerializer: CustomStringConvertible {
private let dfa: DFA
private let vocabulary: Vocabulary
///
/// - Use _#DFASerializer(org.antlr.v4.runtime.dfa.DFA, org.antlr.v4.runtime.Vocabulary)_ instead.
///
//@Deprecated
public convenience init(_ dfa: DFA, _ tokenNames: [String?]?) {
self.init(dfa, Vocabulary.fromTokenNames(tokenNames))
}
public init(_ dfa: DFA, _ vocabulary: Vocabulary) {
self.dfa = dfa
self.vocabulary = vocabulary
@ -32,18 +22,17 @@ public class DFASerializer: CustomStringConvertible {
if dfa.s0 == nil {
return ""
}
let buf: StringBuilder = StringBuilder()
let states: Array<DFAState> = dfa.getStates()
for s: DFAState in states {
var n: Int = 0
if let sEdges = s.edges {
n = sEdges.count
let buf = StringBuilder()
let states = dfa.getStates()
for s in states {
guard let edges = s.edges else {
continue
}
let n = edges.count
for i in 0..<n {
let t: DFAState? = s.edges![i]
if let t = t , t.stateNumber != Int.max {
if let t = s.edges![i], t.stateNumber != Int.max {
buf.append(getStateString(s))
let label: String = getEdgeLabel(i)
let label = getEdgeLabel(i)
buf.append("-")
buf.append(label)
buf.append("->")
@ -53,7 +42,7 @@ public class DFASerializer: CustomStringConvertible {
}
}
let output: String = buf.toString()
let output = buf.toString()
if output.length == 0 {
return ""
}
@ -72,16 +61,16 @@ public class DFASerializer: CustomStringConvertible {
internal func getStateString(_ s: DFAState) -> String {
let n: Int = s.stateNumber
let n = s.stateNumber
let s1 = s.isAcceptState ? ":" : ""
let s2 = s.requiresFullContext ? "^" : ""
let baseStateStr: String = s1 + "s" + String(n) + s2
let baseStateStr = s1 + "s" + String(n) + s2
if s.isAcceptState {
if let predicates = s.predicates {
return baseStateStr + "=>\(predicates)"
} else {
return baseStateStr + "=>\(s.prediction!)"
return baseStateStr + "=>\(s.prediction)"
}
} else {
return baseStateStr

Some files were not shown because too many files have changed in this diff Show More