Merge branch 'master' into master

This commit is contained in:
Terence Parr 2017-10-21 13:18:11 -07:00 committed by GitHub
commit 98745bbdf8
68 changed files with 915 additions and 532 deletions

View File

@ -1,5 +1,8 @@
root = true
[*]
tab_width = 4
[*.{java,stg}]
charset = utf-8
insert_final_newline = true

View File

@ -2,30 +2,26 @@ sudo: true
language: java
cache:
directories:
- $HOME/.m2
- $HOME/Library/Caches/Homebrew
stages:
- smoke-test
- main-test
- extended-test
matrix:
include:
- os: linux
compiler: clang
jdk: oraclejdk7
jdk: openjdk7
env:
- TARGET=cpp
- CXX=g++-5
- GROUP=ALL
addons:
apt:
sources:
- ubuntu-toolchain-r-test
- llvm-toolchain-precise-3.7
packages:
- g++-5
- uuid-dev
- clang-3.7
- os: osx
compiler: clang
osx_image: xcode8.1
env:
- TARGET=cpp
- GROUP=LEXER
stage: main-test
addons:
apt:
sources:
@ -35,106 +31,150 @@ matrix:
- g++-5
- uuid-dev
- clang-3.7
- os: osx
compiler: clang
osx_image: xcode8.1
env:
- TARGET=cpp
- GROUP=PARSER
addons:
apt:
sources:
- ubuntu-toolchain-r-test
- llvm-toolchain-precise-3.7
packages:
- g++-5
- uuid-dev
- clang-3.7
- os: osx
compiler: clang
osx_image: xcode8.1
env:
- TARGET=cpp
- GROUP=RECURSION
addons:
apt:
sources:
- ubuntu-toolchain-r-test
- llvm-toolchain-precise-3.7
packages:
- g++-5
- uuid-dev
- clang-3.7
- os: osx
compiler: clang
osx_image: xcode8.1
env:
- TARGET=swift
- GROUP=LEXER
- os: osx
compiler: clang
osx_image: xcode8.1
env:
- TARGET=swift
- GROUP=PARSER
- os: osx
compiler: clang
osx_image: xcode8.1
env:
- TARGET=swift
- GROUP=RECURSION
- os: linux
compiler: clang
jdk: openjdk7
env:
- TARGET=cpp
- CXX=g++-5
- GROUP=PARSER
stage: main-test
addons:
apt:
sources:
- ubuntu-toolchain-r-test
- llvm-toolchain-precise-3.7
packages:
- g++-5
- uuid-dev
- clang-3.7
- os: linux
compiler: clang
jdk: openjdk7
env:
- TARGET=cpp
- CXX=g++-5
- GROUP=RECURSION
stage: main-test
addons:
apt:
sources:
- ubuntu-toolchain-r-test
- llvm-toolchain-precise-3.7
packages:
- g++-5
- uuid-dev
- clang-3.7
- os: osx
compiler: clang
osx_image: xcode9
env:
- TARGET=cpp
- GROUP=LEXER
stage: extended-test
- os: osx
compiler: clang
osx_image: xcode9
env:
- TARGET=cpp
- GROUP=PARSER
stage: extended-test
- os: osx
compiler: clang
osx_image: xcode9
env:
- TARGET=cpp
- GROUP=RECURSION
stage: extended-test
- os: osx
compiler: clang
osx_image: xcode9
env:
- TARGET=swift
- GROUP=LEXER
stage: main-test
- os: osx
compiler: clang
osx_image: xcode9
env:
- TARGET=swift
- GROUP=PARSER
stage: main-test
- os: osx
compiler: clang
osx_image: xcode9
env:
- TARGET=swift
- GROUP=RECURSION
stage: main-test
- os: linux
dist: trusty
compiler: clang
env:
- TARGET=swift
- GROUP=ALL
stage: extended-test
- os: osx
osx_image: xcode8.2
osx_image: xcode9
env:
- TARGET=dotnet
- GROUP=LEXER
stage: extended-test
- os: osx
osx_image: xcode8.2
osx_image: xcode9
env:
- TARGET=dotnet
- GROUP=PARSER
stage: extended-test
- os: osx
osx_image: xcode8.2
osx_image: xcode9
env:
- TARGET=dotnet
- GROUP=RECURSION
stage: extended-test
- os: linux
jdk: oraclejdk7
jdk: openjdk7
env: TARGET=java
stage: extended-test
- os: linux
jdk: openjdk8
env: TARGET=java
stage: extended-test
- os: linux
jdk: oraclejdk8
env: TARGET=java
stage: smoke-test
- os: linux
jdk: oraclejdk7
jdk: openjdk7
env: TARGET=csharp
stage: extended-test
- os: linux
jdk: oraclejdk8
dist: trusty
env:
- TARGET=dotnet
- GROUP=LEXER
stage: main-test
- os: linux
jdk: oraclejdk8
jdk: openjdk8
dist: trusty
env:
- TARGET=dotnet
- GROUP=PARSER
stage: main-test
- os: linux
jdk: oraclejdk8
dist: trusty
env:
- TARGET=dotnet
- GROUP=RECURSION
stage: main-test
- os: linux
jdk: oraclejdk7
jdk: openjdk7
env: TARGET=python2
stage: extended-test
- os: linux
jdk: oraclejdk7
jdk: openjdk7
env: TARGET=python3
addons:
apt:
@ -142,16 +182,20 @@ matrix:
- deadsnakes # source required so it finds the package definition below
packages:
- python3.5
stage: main-test
- os: linux
jdk: oraclejdk7
dist: trusty
jdk: openjdk8
env: TARGET=javascript
stage: main-test
- os: linux
jdk: oraclejdk7
dist: trusty
jdk: openjdk8
env: TARGET=go
stage: main-test
before_install:
- ./.travis/before-install-$TRAVIS_OS_NAME-$TARGET.sh
- f="./.travis/before-install-$TRAVIS_OS_NAME-$TARGET.sh"; ! [ -x "$f" ] || "$f"
script:
- cd runtime-testsuite; ../.travis/run-tests-$TARGET.sh
- cd runtime-testsuite; travis_wait 40 ../.travis/run-tests-$TARGET.sh

View File

@ -1,14 +1,12 @@
set -euo pipefail
# make sure we use trusty repositories (travis by default uses precise)
curl https://repogen.simplylinux.ch/txt/trusty/sources_c4aa56bd26c0f54f391d8fae3e687ef5f6e97c26.txt | sudo tee /etc/apt/sources.list
# install dependencies
# some packages below will be update, swift assumes newer versions
# of, for example, sqlite3 and libicu, without the update some
# tools will not work
sudo apt-get update
sudo apt-get install clang libicu-dev libxml2 sqlite3
sudo apt-get install clang-3.6 libxml2
sudo update-alternatives --install /usr/bin/clang clang /usr/bin/clang-3.6 100
# This would fix a know linker issue mentioned in:
# https://bugs.swift.org/browse/SR-2299

View File

@ -1,13 +0,0 @@
#!/bin/bash
set -euo pipefail
thisdir=$(dirname "$0")
brew update
brew install cmake
# Work around apparent rvm bug that is in Travis's Xcode image.
# https://github.com/direnv/direnv/issues/210
# https://github.com/travis-ci/travis-ci/issues/6307
shell_session_update() { :; }

View File

@ -4,9 +4,7 @@ set -euo pipefail
thisdir=$(dirname "$0")
# pre-requisites for dotnet core
brew update
brew install openssl
# OpenSSL setup for dotnet core
mkdir -p /usr/local/lib
ln -s /usr/local/opt/openssl/lib/libcrypto.1.0.0.dylib /usr/local/lib/
ln -s /usr/local/opt/openssl/lib/libssl.1.0.0.dylib /usr/local/lib/
@ -19,9 +17,3 @@ sudo installer -pkg /tmp/dotnet-dev-osx-x64.1.0.4.pkg -target /
# make the link
ln -s /usr/local/share/dotnet/dotnet /usr/local/bin/
# Work around apparent rvm bug that is in Travis's Xcode image.
# https://github.com/direnv/direnv/issues/210
# https://github.com/travis-ci/travis-ci/issues/6307
shell_session_update() { :; }

View File

@ -1,12 +0,0 @@
#!/bin/bash
set -euo pipefail
thisdir=$(dirname "$0")
brew update
# Work around apparent rvm bug that is in Travis's Xcode image.
# https://github.com/direnv/direnv/issues/210
# https://github.com/travis-ci/travis-ci/issues/6307
shell_session_update() { :; }

View File

@ -4,7 +4,7 @@
# here since environment variables doesn't pass
# across scripts
if [ $TRAVIS_OS_NAME == "linux" ]; then
export SWIFT_VERSION=swift-3.1.1
export SWIFT_VERSION=swift-4.0
export SWIFT_HOME=$(pwd)/swift/$SWIFT_VERSION-RELEASE-ubuntu14.04/usr/bin/
export PATH=$SWIFT_HOME:$PATH

View File

@ -1,4 +1,4 @@
version: '4.6-SNAPSHOT+AppVeyor.{build}'
version: '4.7.1-SNAPSHOT+AppVeyor.{build}'
os: Windows Server 2012
build_script:
- mvn -DskipTests install -q --batch-mode

View File

@ -151,7 +151,19 @@ YYYY/MM/DD, github id, Full name, email
2017/06/11, erikbra, Erik A. Brandstadmoen, erik@brandstadmoen.net
2017/06/10, jm-mikkelsen, Jan Martin Mikkelsen, janm@transactionware.com
2017/06/25, alimg, Alim Gökkaya, alim.gokkaya@gmail.com
2017/06/28, jBugman, Sergey Parshukov, codedby@bugman.me
2017/07/09, neatnerd, Mike Arshinskiy, neatnerd@users.noreply.github.com
2017/07/11, dhalperi, Daniel Halperin, daniel@halper.in
2017/07/17, vaibhavaingankar09, Vaibhav Vaingankar, vbhvvaingankar9@gmail.com
2017/07/23, venkatperi, Venkat Peri, venkatperi@gmail.com
2017/07/27, shirou, WAKAYAMA Shirou, shirou.faw@gmail.com
2017/07/09, neatnerd, Mike Arshinskiy, neatnerd@users.noreply.github.com
2017/07/27, matthauck, Matt Hauck, matthauck@gmail.com
2017/07/27, shirou, WAKAYAMA Shirou, shirou.faw@gmail.com
2017/08/20, tiagomazzutti, Tiago Mazzutti, tiagomzt@gmail.com
2017/08/20, milanaleksic, Milan Aleksic, milanaleksic@gmail.com
2017/08/29, Eddy Reyes, eddy@mindsight.io
2017/09/09, brauliobz, Bráulio Bezerra, brauliobezerra@gmail.com
2017/09/11, sachinjain024, Sachin Jain, sachinjain024@gmail.com
2017/10/06, bramp, Andrew Brampton, brampton@gmail.com
2017/10/15, simkimsia, Sim Kim Sia, kimcity@gmail.com

View File

@ -6,7 +6,7 @@ Hi and welcome to the version 4 release of ANTLR! It's named after the fearless
ANTLR is really two things: a tool that translates your grammar to a parser/lexer in Java (or other target language) and the runtime needed by the generated parsers/lexers. Even if you are using the ANTLR Intellij plug-in or ANTLRWorks to run the ANTLR tool, the generated code will still need the runtime library.
The first thing you should do is probably download and install a development tool plug-in. Even if you only use such tools for editing, they are great. Then, follow the instructions below to get the runtime environment available to your system to run generated parsers/lexers. In what follows, I talk about antlr-4.5.3-complete.jar, which has the tool and the runtime and any other support libraries (e.g., ANTLR v4 is written in v3).
The first thing you should do is probably download and install a development tool plug-in. Even if you only use such tools for editing, they are great. Then, follow the instructions below to get the runtime environment available to your system to run generated parsers/lexers. In what follows, I talk about antlr-4.7-complete.jar, which has the tool and the runtime and any other support libraries (e.g., ANTLR v4 is written in v3).
If you are going to integrate ANTLR into your existing build system using mvn, ant, or want to get ANTLR into your IDE such as eclipse or intellij, see Integrating ANTLR into Development Systems.
@ -16,16 +16,18 @@ If you are going to integrate ANTLR into your existing build system using mvn, a
1. Download
```
$ cd /usr/local/lib
$ curl -O http://www.antlr.org/download/antlr-4.5.3-complete.jar
$ curl -O http://www.antlr.org/download/antlr-4.7-complete.jar
```
Or just download in browser from website:
[http://www.antlr.org/download.html](http://www.antlr.org/download.html)
and put it somewhere rational like `/usr/local/lib`.
2. Add `antlr-4.5.3-complete.jar` to your `CLASSPATH`:
```
$ export CLASSPATH=".:/usr/local/lib/antlr-4.5.3-complete.jar:$CLASSPATH"
```
It's also a good idea to put this in your `.bash_profile` or whatever your startup script is.
3. Create aliases for the ANTLR Tool, and `TestRig`.
```
$ alias antlr4='java -Xmx500M -cp "/usr/local/lib/antlr-4.5.3-complete.jar:$CLASSPATH" org.antlr.v4.Tool'
@ -39,11 +41,11 @@ $ alias grun='java org.antlr.v4.gui.TestRig'
0. Install Java (version 1.6 or higher)
1. Download antlr-4.5.3-complete.jar (or whatever version) from [http://www.antlr.org/download/](http://www.antlr.org/download/)
Save to your directory for 3rd party Java libraries, say `C:\Javalib`
2. Add `antlr-4.5-complete.jar` to CLASSPATH, either:
2. Add `antlr-4.5.3-complete.jar` to CLASSPATH, either:
* Permanently: Using System Properties dialog > Environment variables > Create or append to `CLASSPATH` variable
* Temporarily, at command line:
```
SET CLASSPATH=.;C:\Javalib\antlr-4.5.3-complete.jar;%CLASSPATH%
SET CLASSPATH=.;C:\Javalib\antlr-4.7-complete.jar;%CLASSPATH%
```
3. Create short convenient commands for the ANTLR Tool, and TestRig, using batch files or doskey commands:
* Batch files (in directory in system PATH) antlr4.bat and grun.bat
@ -65,7 +67,7 @@ Either launch org.antlr.v4.Tool directly:
```
$ java org.antlr.v4.Tool
ANTLR Parser Generator Version 4.5.3
ANTLR Parser Generator Version 4.7
-o ___ specify output directory where all output is generated
-lib ___ specify location of .tokens files
...
@ -74,8 +76,8 @@ ANTLR Parser Generator Version 4.5.3
or use -jar option on java:
```
$ java -jar /usr/local/lib/antlr-4.5.3-complete.jar
ANTLR Parser Generator Version 4.5.3
$ java -jar /usr/local/lib/antlr-4.7-complete.jar
ANTLR Parser Generator Version 4.7
-o ___ specify output directory where all output is generated
-lib ___ specify location of .tokens files
...

Binary file not shown.

After

Width:  |  Height:  |  Size: 379 KiB

View File

@ -6,7 +6,6 @@ To use ANTLR4 Swift target in production environment, make sure to turn on compi
Conclusion is, you need to turn on `release` mode (which will have all the optimization pre configured for you) so the ANTLR4 Swift target can have reasonable parsing speed.
## Install ANTLR4
Make sure you have the ANTLR
@ -25,58 +24,120 @@ For a full list of antlr4 tool options, please visit the
## Build your Swift project with ANTLR runtime
The following instructions are assuming Xcode as the IDE:
### Note
* __Add parser/lexer to project__. Make sure the parsers/lexers
We use __boot.py__ script located at the root of the Swift runtime folder
`antlr4/runtime/Swift` to provide additional support for both Xcode-based
projects and SPM-based projects. Below sections are organized for both of
the flavors. If you want to quickly get started, try:
```
python boot.py --help
```
for information about this script.
### Xcode Projects
Note that even if you are otherwise using ANTLR from a binary distribution,
you should compile the ANTLR Swift runtime from source, because the Swift
language does not yet have a stable ABI.
ANTLR uses Swift Package Manager to generate Xcode project files. Note that
Swift Package Manager does not currently support iOS, watchOS, or tvOS, so
if you wish to use those platforms, you will need to alter the project build
settings manually as appropriate.
#### Download source code for ANTLR
```
git clone https://github.com/antlr/antlr4
```
#### Generate Xcode project for ANTLR runtime
The `boot.py` script includes a wrapper around `swift package
generate-xcodeproj`. Use this to generate `Antlr4.xcodeproj` for the ANTLR
Swift runtime. (using _swift package generate-xcodeproj_ is not recommended)
since the project is dependent on some parser files generated by _boot.py_.
```
cd antlr4/runtime/Swift
python boot.py --gen-xcodeproj
```
#### Import ANTLR Swift runtime into your project
Open your own project in Xcode.
Open Finder in the `runtime/Swift` directory:
```
# From antlr4/runtime/Swift
open .
```
Drag `Antlr4.xcodeproj` into your project.
After this is done, your Xcode project navigator will be something like the
screenshot below. In this example, your own project is "Smalltalk", and you
will be able to see `Antlr4.xcodeproj` shown as a contained project.
<img src=images/xcodenav.png width="300">
#### Edit the build settings if necessary
Swift Package Manager currently does not support iOS, watchOS, or tvOS. If
you wish to build for those platforms, you will need to alter the project
build settings manually.
#### Add generated parser and lexer to project
Make sure the parsers/lexers
generated in __step 2__ are added to the project. To do this, you can
drag the generated files from Finder to the Xcode IDE. Remember to
check __Copy items if needed__ to make sure the files are actually
moved into the project folder instead of symbolic links (see the
screenshot below). After moving you will be able to see your files in
the project navigator. But when you open one of the files, you will
see Xcode complaining the module "Antlr4" could not be found at the
import statement. This is expected, since we still need the ANTLR
Swift runtime for those missing symbols.
the project navigator. Make sure that the Target Membership settings
are correct for your project.
<img src=images/dragfile.png width="500">
* __Download ANTLR runtime__. Due to unstable ABI of Swift language,
there will not be a single "library" for the Swift ANTLR runtime for
now. To get Swift ANTLR runtime, clone the ANTLR repository. Open it
in finder. From the root directory of the repo, go to runtime/Swift
folder. You will see the Xcode project manifest file:
__Antlr4.xcodeproj__.
#### Add the ANTLR Swift runtime as a dependency
* __Import ANTLR Swift runtime into project__. Drag Antlr4.xcodeproj
into your project, after this is done, your Xcode project navigator
will be something like the screenshot below. In this case, your own
project is "Smalltalk", and you will be able to see the
Antlr4.xcodeproj shown as a contained project. The error message will
still be there, that's because we still need to tell Xcode how to find
the runtime.
Select your own project in Xcode and go to the Build Phases settings panel.
Add the ANTLR runtime under __Target Dependencies__ and __Link Binary With
Libraries__.
<img src=images/xcodenav.png width="300">
<img src=images/xcodedep.png width="800">
* __Build ANTLR runtime__. By expanding the "Products" folder in the
inner project (Antlr4.xcodeproj), you will see two Antlr4.framework
files. ".framework" file is the swift version of ".jar", ".a" as in
JAVA, C/C++ Initially those two files should be red, that's because
they are not built. To build, click the "target selection" button
right next to your Xcode run button. And in the drop down select the
target you want to build. And you will see the two Antlr4.framework
files are for iOS and OSX, as shown below. After target selection,
press "CMD+B", and Xcode will build the framework for you. Then you
will see one of the frameworks become black.
#### Build your project
<img src=images/targetselection.png width="500">
The runtime and generated grammar should now build correctly.
* __Add dependencies__. Simply adding ANTLR Swift runtime and build
the artifact is not enough. You still need to specify
dependencies. Click your own project (Smalltalk), and you will see
project setting page. Go to "Build Phase", and inside it make sure
your ANTLR Swift runtime framework is added to both "__Target
Dependencies__" and "__Link Binary With Libraries__" sections, as
shown below. After correctly added dependencies, the error message for
importing library will be gone.
### Swift Package Manager Projects
<img src=images/xcodedep.png width="800">
Since we cannot have a separate repository for Swift target (see issue [#1774](https://github.com/antlr/antlr4/issues/1774)),
and Swift is currently not ABI stable. We currently support support SPM-based
projects by creating temporary local repository.
For people using [Swift Package Manager](https://swift.org/package-manager/),
the __boot.py__ script supports generating local repository that can be used
as a dependency to your project. Simply run:
```
python boot.py --gen-spm-module
```
The prompt will show something like below:
<img src=images/gen_spm_module.png width="800">
Put the SPM directive that contains the url to temporary repository to your
project's Package.swift. And run `swift build` in your project.
The project is generated in your system's `/tmp/` directory, if you find it
inconvenient, consider copy that generated ANTLR repository to some place
that won't be cleaned automatically and update `url` parameter in your
`Package.swift` file.

View File

@ -23,6 +23,7 @@ public class TestCodePointCharStream {
CodePointCharStream s = CharStreams.fromString("");
assertEquals(0, s.size());
assertEquals(0, s.index());
assertEquals("", s.toString());
}
@Test

View File

@ -145,7 +145,7 @@ public class BaseSwiftTest implements RuntimeTestSupport {
String projectName = "testcase-" + System.currentTimeMillis();
String projectDir = getTmpDir() + "/" + projectName;
buildProject(projectDir);
buildProject(projectDir, projectName);
return execTest(projectDir, projectName);
}
@ -183,12 +183,12 @@ public class BaseSwiftTest implements RuntimeTestSupport {
Collections.addAll(this.sourceFiles, files);
}
private void buildProject(String projectDir) {
private void buildProject(String projectDir, String projectName) {
mkdir(projectDir);
fastFailRunProcess(projectDir, SWIFT_CMD, "package", "init", "--type", "executable");
for (String sourceFile: sourceFiles) {
String absPath = getTmpDir() + "/" + sourceFile;
fastFailRunProcess(getTmpDir(), "mv", "-f", absPath, projectDir + "/Sources/");
fastFailRunProcess(getTmpDir(), "mv", "-f", absPath, projectDir + "/Sources/" + projectName);
}
fastFailRunProcess(getTmpDir(), "mv", "-f", "input", projectDir);
@ -201,7 +201,7 @@ public class BaseSwiftTest implements RuntimeTestSupport {
"-Xlinker", "-rpath",
"-Xlinker", dylibPath);
if (buildResult.b.length() > 0) {
throw new RuntimeException("unit test build failed: " + buildResult.b);
throw new RuntimeException("unit test build failed: " + buildResult.a + "\n" + buildResult.b);
}
} catch (IOException | InterruptedException e) {
e.printStackTrace();
@ -251,7 +251,7 @@ public class BaseSwiftTest implements RuntimeTestSupport {
addSourceFiles("main.swift");
String projectName = "testcase-" + System.currentTimeMillis();
String projectDir = getTmpDir() + "/" + projectName;
buildProject(projectDir);
buildProject(projectDir, projectName);
return execTest(projectDir, projectName);
}

View File

@ -1092,7 +1092,10 @@ nextTransition_continue: ;
protected internal Guid ReadUUID()
{
byte[] d = BitConverter.GetBytes (ReadLong ());
Array.Reverse(d);
if(BitConverter.IsLittleEndian)
{
Array.Reverse(d);
}
short c = (short)ReadInt();
short b = (short)ReadInt();
int a = ReadInt32();

View File

@ -33,6 +33,7 @@ endif()
if(CMAKE_VERSION VERSION_EQUAL "3.3.0" OR
CMAKE_VERSION VERSION_GREATER "3.3.0")
CMAKE_POLICY(SET CMP0059 OLD)
CMAKE_POLICY(SET CMP0054 OLD)
endif()
if(CMAKE_SYSTEM_NAME MATCHES "Linux")
@ -61,7 +62,11 @@ if (WITH_DEMO)
endif()
endif(WITH_DEMO)
set(MY_CXX_WARNING_FLAGS " -Wall -pedantic -W")
if (MSVC_VERSION)
set(MY_CXX_WARNING_FLAGS " /W4")
else()
set(MY_CXX_WARNING_FLAGS " -Wall -pedantic -W")
endif()
# Initialize CXXFLAGS.
if("${CMAKE_VERSION}" VERSION_GREATER 3.1.0)
@ -75,11 +80,18 @@ else()
set(CMAKE_CXX_FLAGS_RELWITHDEBINFO "${CMAKE_CXX_FLAGS_RELWITHDEBINFO} -std=c++11")
endif()
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall ${MY_CXX_WARNING_FLAGS}")
set(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} -O0 -g ${MY_CXX_WARNING_FLAGS}")
set(CMAKE_CXX_FLAGS_MINSIZEREL "${CMAKE_CXX_FLAGS_MINSIZEREL} -Os -DNDEBUG ${MY_CXX_WARNING_FLAGS}")
set(CMAKE_CXX_FLAGS_RELEASE "${CMAKE_CXX_FLAGS_RELEASE} -O3 -DNDEBUG ${MY_CXX_WARNING_FLGAS}")
set(CMAKE_CXX_FLAGS_RELWITHDEBINFO "${CMAKE_CXX_FLAGS_RELWITHDEBINFO} -O2 -g ${MY_CXX_WARNING_FLAGS}")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${MY_CXX_WARNING_FLAGS}")
if (MSVC_VERSION)
set(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} /Od /Zi /MP ${MY_CXX_WARNING_FLAGS}")
set(CMAKE_CXX_FLAGS_MINSIZEREL "${CMAKE_CXX_FLAGS_MINSIZEREL} /O1 /Oi /Ob2 /Gy /MP /DNDEBUG ${MY_CXX_WARNING_FLAGS}")
set(CMAKE_CXX_FLAGS_RELEASE "${CMAKE_CXX_FLAGS_RELEASE} /O2 /Oi /Ob2 /Gy /MP /DNDEBUG ${MY_CXX_WARNING_FLGAS}")
set(CMAKE_CXX_FLAGS_RELWITHDEBINFO "${CMAKE_CXX_FLAGS_RELWITHDEBINFO} /O2 /Oi /Ob2 /Gy /MP /Zi ${MY_CXX_WARNING_FLAGS}")
else()
set(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} -O0 -g ${MY_CXX_WARNING_FLAGS}")
set(CMAKE_CXX_FLAGS_MINSIZEREL "${CMAKE_CXX_FLAGS_MINSIZEREL} -Os -DNDEBUG ${MY_CXX_WARNING_FLAGS}")
set(CMAKE_CXX_FLAGS_RELEASE "${CMAKE_CXX_FLAGS_RELEASE} -O3 -DNDEBUG ${MY_CXX_WARNING_FLGAS}")
set(CMAKE_CXX_FLAGS_RELWITHDEBINFO "${CMAKE_CXX_FLAGS_RELWITHDEBINFO} -O2 -g ${MY_CXX_WARNING_FLAGS}")
endif()
# Compiler-specific C++11 activation.
if ("${CMAKE_CXX_COMPILER_ID}" MATCHES "GNU")
@ -101,6 +113,8 @@ elseif ("${CMAKE_CXX_COMPILER_ID}" MATCHES "Clang" AND CMAKE_SYSTEM_NAME MATCHES
if (WITH_LIBCXX)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -stdlib=libc++")
endif()
elseif ( MSVC_VERSION GREATER 1800 OR MSVC_VERSION EQUAL 1800 )
# Visual Studio 2012+ supports c++11 features
else ()
message(FATAL_ERROR "Your C++ compiler does not support C++11.")
endif ()

View File

@ -44,7 +44,11 @@ elseif(APPLE)
target_link_libraries(antlr4_static ${COREFOUNDATION_LIBRARY})
endif()
set(disabled_compile_warnings "-Wno-overloaded-virtual")
if (MSVC_VERSION)
set(disabled_compile_warnings "/wd4251")
else()
set(disabled_compile_warnings "-Wno-overloaded-virtual")
endif()
if ("${CMAKE_CXX_COMPILER_ID}" MATCHES "Clang")
set(disabled_compile_warnings "${disabled_compile_warnings} -Wno-dollar-in-identifier-extension -Wno-four-char-constants")
elseif("${CMAKE_CXX_COMPILER_ID}" MATCHES "GNU")
@ -57,6 +61,15 @@ if (WIN32)
set(extra_share_compile_flags "-DANTLR4CPP_EXPORTS")
set(extra_static_compile_flags "-DANTLR4CPP_STATIC")
endif(WIN32)
if (MSVC_VERSION)
target_compile_options(antlr4_shared PRIVATE "/MD$<$<CONFIG:Debug>:d>")
target_compile_options(antlr4_static PRIVATE "/MT$<$<CONFIG:Debug>:d>")
endif()
set(static_lib_suffix "")
if (MSVC_VERSION)
set(static_lib_suffix "-static")
endif()
set_target_properties(antlr4_shared
PROPERTIES VERSION ${ANTLR_VERSION}
@ -72,7 +85,7 @@ set_target_properties(antlr4_shared
set_target_properties(antlr4_static
PROPERTIES VERSION ${ANTLR_VERSION}
SOVERSION ${ANTLR_VERSION}
OUTPUT_NAME antlr4-runtime
OUTPUT_NAME "antlr4-runtime${static_lib_suffix}"
ARCHIVE_OUTPUT_DIRECTORY ${LIB_OUTPUT_DIR}
COMPILE_FLAGS "${disabled_compile_warnings} ${extra_static_compile_flags}")

View File

@ -15,7 +15,7 @@ namespace atn {
* utility methods for analyzing configuration sets for conflicts and/or
* ambiguities.
*/
enum class ANTLR4CPP_PUBLIC PredictionMode {
enum class PredictionMode {
/**
* The SLL(*) prediction mode. This prediction mode ignores the current
* parser context when making predictions. This is the fastest prediction

View File

@ -0,0 +1,154 @@
// Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.
// Use of this file is governed by the BSD 3-clause license that
// can be found in the LICENSE.txt file in the project root.
package antlr
import (
"testing"
)
type commonTokenStreamTestLexer struct {
*BaseLexer
tokens []Token
i int
}
func (l *commonTokenStreamTestLexer) NextToken() Token {
tmp := l.tokens[l.i]
l.i++
return tmp
}
func TestCommonTokenStreamOffChannel(t *testing.T) {
assert := assertNew(t)
lexEngine := &commonTokenStreamTestLexer{
tokens: []Token{
newTestCommonToken(1, " ", LexerHidden), // 0
newTestCommonToken(1, "x", LexerDefaultTokenChannel), // 1
newTestCommonToken(1, " ", LexerHidden), // 2
newTestCommonToken(1, "=", LexerDefaultTokenChannel), // 3
newTestCommonToken(1, "34", LexerDefaultTokenChannel), // 4
newTestCommonToken(1, " ", LexerHidden), // 5
newTestCommonToken(1, " ", LexerHidden), // 6
newTestCommonToken(1, ";", LexerDefaultTokenChannel), // 7
newTestCommonToken(1, "\n", LexerHidden), // 9
newTestCommonToken(TokenEOF, "", LexerDefaultTokenChannel), // 10
},
}
tokens := NewCommonTokenStream(lexEngine, TokenDefaultChannel)
assert.Equal("x", tokens.LT(1).GetText()) // must skip first off channel token
tokens.Consume()
assert.Equal("=", tokens.LT(1).GetText())
assert.Equal("x", tokens.LT(-1).GetText())
tokens.Consume()
assert.Equal("34", tokens.LT(1).GetText())
assert.Equal("=", tokens.LT(-1).GetText())
tokens.Consume()
assert.Equal(";", tokens.LT(1).GetText())
assert.Equal("34", tokens.LT(-1).GetText())
tokens.Consume()
assert.Equal(TokenEOF, tokens.LT(1).GetTokenType())
assert.Equal(";", tokens.LT(-1).GetText())
assert.Equal("34", tokens.LT(-2).GetText())
assert.Equal("=", tokens.LT(-3).GetText())
assert.Equal("x", tokens.LT(-4).GetText())
}
func TestCommonTokenStreamFetchOffChannel(t *testing.T) {
assert := assertNew(t)
lexEngine := &commonTokenStreamTestLexer{
tokens: []Token{
newTestCommonToken(1, " ", LexerHidden), // 0
newTestCommonToken(1, "x", LexerDefaultTokenChannel), // 1
newTestCommonToken(1, " ", LexerHidden), // 2
newTestCommonToken(1, "=", LexerDefaultTokenChannel), // 3
newTestCommonToken(1, "34", LexerDefaultTokenChannel), // 4
newTestCommonToken(1, " ", LexerHidden), // 5
newTestCommonToken(1, " ", LexerHidden), // 6
newTestCommonToken(1, ";", LexerDefaultTokenChannel), // 7
newTestCommonToken(1, " ", LexerHidden), // 8
newTestCommonToken(1, "\n", LexerHidden), // 9
newTestCommonToken(TokenEOF, "", LexerDefaultTokenChannel), // 10
},
}
tokens := NewCommonTokenStream(lexEngine, TokenDefaultChannel)
tokens.Fill()
assert.Nil(tokens.getHiddenTokensToLeft(0, -1))
assert.Nil(tokens.getHiddenTokensToRight(0, -1))
assert.Equal("[[@0,0:0=' ',<1>,channel=1,0:-1]]", tokensToString(tokens.getHiddenTokensToLeft(1, -1)))
assert.Equal("[[@2,0:0=' ',<1>,channel=1,0:-1]]", tokensToString(tokens.getHiddenTokensToRight(1, -1)))
assert.Nil(tokens.getHiddenTokensToLeft(2, -1))
assert.Nil(tokens.getHiddenTokensToRight(2, -1))
assert.Equal("[[@2,0:0=' ',<1>,channel=1,0:-1]]", tokensToString(tokens.getHiddenTokensToLeft(3, -1)))
assert.Nil(tokens.getHiddenTokensToRight(3, -1))
assert.Nil(tokens.getHiddenTokensToLeft(4, -1))
assert.Equal("[[@5,0:0=' ',<1>,channel=1,0:-1], [@6,0:0=' ',<1>,channel=1,0:-1]]",
tokensToString(tokens.getHiddenTokensToRight(4, -1)))
assert.Nil(tokens.getHiddenTokensToLeft(5, -1))
assert.Equal("[[@6,0:0=' ',<1>,channel=1,0:-1]]",
tokensToString(tokens.getHiddenTokensToRight(5, -1)))
assert.Equal("[[@5,0:0=' ',<1>,channel=1,0:-1]]",
tokensToString(tokens.getHiddenTokensToLeft(6, -1)))
assert.Nil(tokens.getHiddenTokensToRight(6, -1))
assert.Equal("[[@5,0:0=' ',<1>,channel=1,0:-1], [@6,0:0=' ',<1>,channel=1,0:-1]]",
tokensToString(tokens.getHiddenTokensToLeft(7, -1)))
assert.Equal("[[@8,0:0=' ',<1>,channel=1,0:-1], [@9,0:0='\\n',<1>,channel=1,0:-1]]",
tokensToString(tokens.getHiddenTokensToRight(7, -1)))
assert.Nil(tokens.getHiddenTokensToLeft(8, -1))
assert.Equal("[[@9,0:0='\\n',<1>,channel=1,0:-1]]",
tokensToString(tokens.getHiddenTokensToRight(8, -1)))
assert.Equal("[[@8,0:0=' ',<1>,channel=1,0:-1]]",
tokensToString(tokens.getHiddenTokensToLeft(9, -1)))
assert.Nil(tokens.getHiddenTokensToRight(9, -1))
}
type commonTokenStreamTestLexerSingleEOF struct {
*BaseLexer
tokens []Token
i int
}
func (l *commonTokenStreamTestLexerSingleEOF) NextToken() Token {
return newTestCommonToken(TokenEOF, "", LexerDefaultTokenChannel)
}
func TestCommonTokenStreamSingleEOF(t *testing.T) {
assert := assertNew(t)
lexEngine := &commonTokenStreamTestLexerSingleEOF{}
tokens := NewCommonTokenStream(lexEngine, TokenDefaultChannel)
tokens.Fill()
assert.Equal(TokenEOF, tokens.LA(1))
assert.Equal(0, tokens.index)
assert.Equal(1, tokens.Size())
}
func TestCommonTokenStreamCannotConsumeEOF(t *testing.T) {
assert := assertNew(t)
lexEngine := &commonTokenStreamTestLexerSingleEOF{}
tokens := NewCommonTokenStream(lexEngine, TokenDefaultChannel)
tokens.Fill()
assert.Equal(TokenEOF, tokens.LA(1))
assert.Equal(0, tokens.index)
assert.Equal(1, tokens.Size())
assert.Panics(tokens.Consume)
}

View File

@ -21,11 +21,11 @@ type Lexer interface {
Emit() Token
setChannel(int)
pushMode(int)
popMode() int
setType(int)
setMode(int)
SetChannel(int)
PushMode(int)
PopMode() int
SetType(int)
SetMode(int)
}
type BaseLexer struct {
@ -150,7 +150,7 @@ func (b *BaseLexer) GetSourceName() string {
return b.GrammarFileName
}
func (b *BaseLexer) setChannel(v int) {
func (b *BaseLexer) SetChannel(v int) {
b.channel = v
}
@ -250,11 +250,11 @@ func (b *BaseLexer) More() {
b.thetype = LexerMore
}
func (b *BaseLexer) setMode(m int) {
func (b *BaseLexer) SetMode(m int) {
b.mode = m
}
func (b *BaseLexer) pushMode(m int) {
func (b *BaseLexer) PushMode(m int) {
if LexerATNSimulatorDebug {
fmt.Println("pushMode " + strconv.Itoa(m))
}
@ -262,7 +262,7 @@ func (b *BaseLexer) pushMode(m int) {
b.mode = m
}
func (b *BaseLexer) popMode() int {
func (b *BaseLexer) PopMode() int {
if len(b.modeStack) == 0 {
panic("Empty Stack")
}
@ -331,7 +331,7 @@ func (b *BaseLexer) GetType() int {
return b.thetype
}
func (b *BaseLexer) setType(t int) {
func (b *BaseLexer) SetType(t int) {
b.thetype = t
}
@ -361,7 +361,7 @@ func (b *BaseLexer) GetATN() *ATN {
// Return a list of all Token objects in input char stream.
// Forces load of all tokens. Does not include EOF token.
// /
func (b *BaseLexer) getAllTokens() []Token {
func (b *BaseLexer) GetAllTokens() []Token {
vl := b.Virt
tokens := make([]Token, 0)
t := vl.NextToken()

View File

@ -101,7 +101,7 @@ func NewLexerTypeAction(thetype int) *LexerTypeAction {
}
func (l *LexerTypeAction) execute(lexer Lexer) {
lexer.setType(l.thetype)
lexer.SetType(l.thetype)
}
func (l *LexerTypeAction) hash() int {
@ -145,7 +145,7 @@ func NewLexerPushModeAction(mode int) *LexerPushModeAction {
// <p>This action is implemented by calling {@link Lexer//pushMode} with the
// value provided by {@link //getMode}.</p>
func (l *LexerPushModeAction) execute(lexer Lexer) {
lexer.pushMode(l.mode)
lexer.PushMode(l.mode)
}
func (l *LexerPushModeAction) hash() int {
@ -190,7 +190,7 @@ var LexerPopModeActionINSTANCE = NewLexerPopModeAction()
// <p>This action is implemented by calling {@link Lexer//popMode}.</p>
func (l *LexerPopModeAction) execute(lexer Lexer) {
lexer.popMode()
lexer.PopMode()
}
func (l *LexerPopModeAction) String() string {
@ -242,7 +242,7 @@ func NewLexerModeAction(mode int) *LexerModeAction {
// <p>This action is implemented by calling {@link Lexer//mode} with the
// value provided by {@link //getMode}.</p>
func (l *LexerModeAction) execute(lexer Lexer) {
lexer.setMode(l.mode)
lexer.SetMode(l.mode)
}
func (l *LexerModeAction) hash() int {
@ -341,7 +341,7 @@ func NewLexerChannelAction(channel int) *LexerChannelAction {
// <p>This action is implemented by calling {@link Lexer//setChannel} with the
// value provided by {@link //getChannel}.</p>
func (l *LexerChannelAction) execute(lexer Lexer) {
lexer.setChannel(l.channel)
lexer.SetChannel(l.channel)
}
func (l *LexerChannelAction) hash() int {

View File

@ -0,0 +1,98 @@
// Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.
// Use of this file is governed by the BSD 3-clause license that
// can be found in the LICENSE.txt file in the project root.
// These assert functions are borrowed from https://github.com/stretchr/testify/ (MIT License)
package antlr
import (
"fmt"
"reflect"
"testing"
)
type assert struct {
t *testing.T
}
func assertNew(t *testing.T) *assert {
return &assert{
t: t,
}
}
func (a *assert) Equal(expected, actual interface{}) bool {
if !objectsAreEqual(expected, actual) {
return a.Fail(fmt.Sprintf("Not equal:\n"+
"expected: %#v\n"+
" actual: %#v\n", expected, actual))
}
return true
}
func objectsAreEqual(expected, actual interface{}) bool {
if expected == nil || actual == nil {
return expected == actual
}
return reflect.DeepEqual(expected, actual)
}
func (a *assert) Nil(object interface{}) bool {
if isNil(object) {
return true
}
return a.Fail(fmt.Sprintf("Expected nil, but got: %#v", object))
}
func (a *assert) NotNil(object interface{}) bool {
if !isNil(object) {
return true
}
return a.Fail("Expected value not to be nil.")
}
// isNil checks if a specified object is nil or not, without Failing.
func isNil(object interface{}) bool {
if object == nil {
return true
}
value := reflect.ValueOf(object)
kind := value.Kind()
if kind >= reflect.Chan && kind <= reflect.Slice && value.IsNil() {
return true
}
return false
}
func (a *assert) Panics(f func()) bool {
if funcDidPanic, panicValue := didPanic(f); !funcDidPanic {
return a.Fail(fmt.Sprintf("func %#v should panic\n\r\tPanic value:\t%v", f, panicValue))
}
return true
}
// Fail reports a failure through
func (a *assert) Fail(failureMessage string) bool {
a.t.Errorf("%s", failureMessage)
return false
}
// didPanic returns true if the function passed to it panics. Otherwise, it returns false.
func didPanic(f func()) (bool, interface{}) {
didPanic := false
var message interface{}
func() {
defer func() {
if message = recover(); message != nil {
didPanic = true
}
}()
// call the target function
f()
}()
return didPanic, message
}

View File

@ -0,0 +1,107 @@
// Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.
// Use of this file is governed by the BSD 3-clause license that
// can be found in the LICENSE.txt file in the project root.
package antlr
/*
LexerB is a lexer for testing purpose.
This file is generated from this grammer.
lexer grammar LexerB;
ID : 'a'..'z'+;
INT : '0'..'9'+;
SEMI : ';';
ASSIGN : '=';
PLUS : '+';
MULT : '*';
WS : ' '+;
*/
var lexerB_serializedLexerAtn = []uint16{
3, 24715, 42794, 33075, 47597, 16764, 15335, 30598, 22884, 2, 9, 40, 8,
1, 4, 2, 9, 2, 4, 3, 9, 3, 4, 4, 9, 4, 4, 5, 9, 5, 4, 6, 9, 6, 4, 7, 9,
7, 4, 8, 9, 8, 3, 2, 6, 2, 19, 10, 2, 13, 2, 14, 2, 20, 3, 3, 6, 3, 24,
10, 3, 13, 3, 14, 3, 25, 3, 4, 3, 4, 3, 5, 3, 5, 3, 6, 3, 6, 3, 7, 3, 7,
3, 8, 6, 8, 37, 10, 8, 13, 8, 14, 8, 38, 2, 2, 9, 3, 3, 5, 4, 7, 5, 9,
6, 11, 7, 13, 8, 15, 9, 3, 2, 2, 2, 42, 2, 3, 3, 2, 2, 2, 2, 5, 3, 2, 2,
2, 2, 7, 3, 2, 2, 2, 2, 9, 3, 2, 2, 2, 2, 11, 3, 2, 2, 2, 2, 13, 3, 2,
2, 2, 2, 15, 3, 2, 2, 2, 3, 18, 3, 2, 2, 2, 5, 23, 3, 2, 2, 2, 7, 27, 3,
2, 2, 2, 9, 29, 3, 2, 2, 2, 11, 31, 3, 2, 2, 2, 13, 33, 3, 2, 2, 2, 15,
36, 3, 2, 2, 2, 17, 19, 4, 99, 124, 2, 18, 17, 3, 2, 2, 2, 19, 20, 3, 2,
2, 2, 20, 18, 3, 2, 2, 2, 20, 21, 3, 2, 2, 2, 21, 4, 3, 2, 2, 2, 22, 24,
4, 50, 59, 2, 23, 22, 3, 2, 2, 2, 24, 25, 3, 2, 2, 2, 25, 23, 3, 2, 2,
2, 25, 26, 3, 2, 2, 2, 26, 6, 3, 2, 2, 2, 27, 28, 7, 61, 2, 2, 28, 8, 3,
2, 2, 2, 29, 30, 7, 63, 2, 2, 30, 10, 3, 2, 2, 2, 31, 32, 7, 45, 2, 2,
32, 12, 3, 2, 2, 2, 33, 34, 7, 44, 2, 2, 34, 14, 3, 2, 2, 2, 35, 37, 7,
34, 2, 2, 36, 35, 3, 2, 2, 2, 37, 38, 3, 2, 2, 2, 38, 36, 3, 2, 2, 2, 38,
39, 3, 2, 2, 2, 39, 16, 3, 2, 2, 2, 6, 2, 20, 25, 38, 2,
}
var lexerB_lexerDeserializer = NewATNDeserializer(nil)
var lexerB_lexerAtn = lexerB_lexerDeserializer.DeserializeFromUInt16(lexerB_serializedLexerAtn)
var lexerB_lexerChannelNames = []string{
"DEFAULT_TOKEN_CHANNEL", "HIDDEN",
}
var lexerB_lexerModeNames = []string{
"DEFAULT_MODE",
}
var lexerB_lexerLiteralNames = []string{
"", "", "", "';'", "'='", "'+'", "'*'",
}
var lexerB_lexerSymbolicNames = []string{
"", "ID", "INT", "SEMI", "ASSIGN", "PLUS", "MULT", "WS",
}
var lexerB_lexerRuleNames = []string{
"ID", "INT", "SEMI", "ASSIGN", "PLUS", "MULT", "WS",
}
type LexerB struct {
*BaseLexer
channelNames []string
modeNames []string
// TODO: EOF string
}
var lexerB_lexerDecisionToDFA = make([]*DFA, len(lexerB_lexerAtn.DecisionToState))
func init() {
for index, ds := range lexerB_lexerAtn.DecisionToState {
lexerB_lexerDecisionToDFA[index] = NewDFA(ds, index)
}
}
func NewLexerB(input CharStream) *LexerB {
l := new(LexerB)
l.BaseLexer = NewBaseLexer(input)
l.Interpreter = NewLexerATNSimulator(l, lexerB_lexerAtn, lexerB_lexerDecisionToDFA, NewPredictionContextCache())
l.channelNames = lexerB_lexerChannelNames
l.modeNames = lexerB_lexerModeNames
l.RuleNames = lexerB_lexerRuleNames
l.LiteralNames = lexerB_lexerLiteralNames
l.SymbolicNames = lexerB_lexerSymbolicNames
l.GrammarFileName = "LexerB.g4"
// TODO: l.EOF = TokenEOF
return l
}
// LexerB tokens.
const (
LexerBID = 1
LexerBINT = 2
LexerBSEMI = 3
LexerBASSIGN = 4
LexerBPLUS = 5
LexerBMULT = 6
LexerBWS = 7
)

View File

@ -0,0 +1,30 @@
package antlr
import (
"fmt"
"strings"
)
// newTestCommonToken create common token with tokentype, text and channel
// notice: test purpose only
func newTestCommonToken(tokenType int, text string, channel int) *CommonToken {
t := new(CommonToken)
t.BaseToken = new(BaseToken)
t.tokenType = tokenType
t.channel = channel
t.text = text
t.line = 0
t.column = -1
return t
}
// tokensToString returnes []Tokens string
// notice: test purpose only
func tokensToString(tokens []Token) string {
buf := make([]string, len(tokens))
for i, token := range tokens {
buf[i] = fmt.Sprintf("%v", token)
}
return "[" + strings.Join(buf, ", ") + "]"
}

View File

@ -27,6 +27,7 @@
<plugin> <!-- create src jar -->
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-source-plugin</artifactId>
<version>3.0.1</version>
<executions>
<execution>
<goals>

View File

@ -151,8 +151,8 @@ public abstract class CodePointCharStream implements CharStream {
/** Return the UTF-16 encoded string for the given interval */
@Override
public String getText(Interval interval) {
int startIdx = Math.min(interval.a, size - 1);
int len = Math.min(interval.b - interval.a + 1, size);
int startIdx = Math.min(interval.a, size);
int len = Math.min(interval.b - interval.a + 1, size - startIdx);
// We know the maximum code point in byteArray is U+00FF,
// so we can treat this as if it were ISO-8859-1, aka Latin-1,

View File

@ -1541,11 +1541,6 @@ public class ParserATNSimulator extends ATNSimulator {
ATNConfig c = getEpsilonTarget(config, t, continueCollecting,
depth == 0, fullCtx, treatEofAsEpsilon);
if ( c!=null ) {
if (!t.isEpsilon() && !closureBusy.add(c)) {
// avoid infinite recursion for EOF* and EOF+
continue;
}
int newDepth = depth;
if ( config.state instanceof RuleStopState) {
assert !fullCtx;
@ -1555,11 +1550,6 @@ public class ParserATNSimulator extends ATNSimulator {
// come in handy and we avoid evaluating context dependent
// preds if this is > 0.
if (!closureBusy.add(c)) {
// avoid infinite recursion for right-recursive rules
continue;
}
if (_dfa != null && _dfa.isPrecedenceDfa()) {
int outermostPrecedenceReturn = ((EpsilonTransition)t).outermostPrecedenceReturn();
if (outermostPrecedenceReturn == _dfa.atnStartState.ruleIndex) {
@ -1568,15 +1558,28 @@ public class ParserATNSimulator extends ATNSimulator {
}
c.reachesIntoOuterContext++;
if (!closureBusy.add(c)) {
// avoid infinite recursion for right-recursive rules
continue;
}
configs.dipsIntoOuterContext = true; // TODO: can remove? only care when we add to set per middle of this method
assert newDepth > Integer.MIN_VALUE;
newDepth--;
if ( debug ) System.out.println("dips into outer ctx: "+c);
}
else if (t instanceof RuleTransition) {
// latch when newDepth goes negative - once we step out of the entry context we can't return
if (newDepth >= 0) {
newDepth++;
else {
if (!t.isEpsilon() && !closureBusy.add(c)) {
// avoid infinite recursion for EOF* and EOF+
continue;
}
if (t instanceof RuleTransition) {
// latch when newDepth goes negative - once we step out of the entry context we can't return
if (newDepth >= 0) {
newDepth++;
}
}
}

View File

@ -401,11 +401,11 @@ DoubleDict.prototype.set = function (a, b, o) {
function escapeWhitespace(s, escapeSpaces) {
s = s.replace("\t", "\\t");
s = s.replace("\n", "\\n");
s = s.replace("\r", "\\r");
s = s.replace(/\t/g, "\\t")
.replace(/\n/g, "\\n")
.replace(/\r/g, "\\r");
if (escapeSpaces) {
s = s.replace(" ", "\u00B7");
s = s.replace(/ /g, "\u00B7");
}
return s;
}
@ -443,4 +443,4 @@ exports.hashStuff = hashStuff;
exports.escapeWhitespace = escapeWhitespace;
exports.arrayToString = arrayToString;
exports.titleCase = titleCase;
exports.equalArrays = equalArrays;
exports.equalArrays = equalArrays;

View File

@ -218,6 +218,13 @@ class Parser (Recognizer):
self._ctx.exitRule(listener)
listener.exitEveryRule(self._ctx)
# Gets the number of syntax errors reported during parsing. This value is
# incremented each time {@link #notifyErrorListeners} is called.
#
# @see #notifyErrorListeners
#
def getNumberOfSyntaxErrors(self):
return self._syntaxErrors
def getTokenFactory(self):
return self._input.tokenSource._factory

View File

@ -36,14 +36,13 @@ class RuleTagToken(Token):
self.tokenIndex = -1 # from 0..n-1 of the token object in the input stream
self.line = 0 # line=1..n of the 1st character
self.column = -1 # beginning of the line at which it occurs, 0..n-1
self.label = label
self.label = unicode(label)
self._text = self.getText() # text of the token.
self.ruleName = ruleName
self.ruleName = unicode(ruleName)
def getText(self):
if self.label is None:
return "<" + self.ruleName + ">"
return u"<" + self.ruleName + u">"
else:
return "<" + self.label + ":" + self.ruleName + ">"
return u"<" + self.label + ":" + self.ruleName + u">"

View File

@ -24,8 +24,8 @@ class TokenTagToken(CommonToken):
#
def __init__(self, tokenName, type, label=None):
super(TokenTagToken, self).__init__(type=type)
self.tokenName = tokenName
self.label = label
self.tokenName = unicode(tokenName)
self.label = unicode(label)
self._text = self.getText()
#
@ -36,9 +36,9 @@ class TokenTagToken(CommonToken):
#
def getText(self):
if self.label is None:
return "<" + self.tokenName + ">"
return u"<" + self.tokenName + u">"
else:
return "<" + self.label + ":" + self.tokenName + ">"
return u"<" + self.label + u":" + self.tokenName + u">"
# <p>The implementation for {@link TokenTagToken} returns a string of the form
# {@code tokenName:type}.</p>

View File

@ -108,13 +108,13 @@ class TerminalNodeImpl(TerminalNode):
return visitor.visitTerminal(self)
def getText(self):
return self.symbol.text
return unicode(self.symbol.text)
def __unicode__(self):
if self.symbol.type == Token.EOF:
return "<EOF>"
return u"<EOF>"
else:
return self.symbol.text
return unicode(self.symbol.text)
# Represents a token that was consumed during resynchronization
# rather than during a valid match operation. For example,

View File

@ -227,6 +227,14 @@ class Parser (Recognizer):
listener.exitEveryRule(self._ctx)
# Gets the number of syntax errors reported during parsing. This value is
# incremented each time {@link #notifyErrorListeners} is called.
#
# @see #notifyErrorListeners
#
def getNumberOfSyntaxErrors(self):
return self._syntaxErrors
def getTokenFactory(self):
return self._input.tokenSource._factory

View File

@ -12,7 +12,7 @@ from antlr4.atn.LexerATNSimulator import LexerATNSimulator
from antlr4.atn.ParserATNSimulator import ParserATNSimulator
from antlr4.atn.PredictionMode import PredictionMode
from antlr4.PredictionContext import PredictionContextCache
from antlr4.ParserRuleContext import ParserRuleContext
from antlr4.ParserRuleContext import RuleContext, ParserRuleContext
from antlr4.tree.Tree import ParseTreeListener, ParseTreeVisitor, ParseTreeWalker, TerminalNode, ErrorNode, RuleNode
from antlr4.error.Errors import RecognitionException, IllegalStateException, NoViableAltException
from antlr4.error.ErrorStrategy import BailErrorStrategy

View File

@ -1 +1,4 @@
.build/
Antlr4.xcodeproj/
Tests/Antlr4Tests/gen/
xcuserdata/

View File

@ -1,3 +1,4 @@
// swift-tools-version:4.0
// Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.
// Use of this file is governed by the BSD 3-clause license that
// can be found in the LICENSE.txt file in the project root.
@ -5,15 +6,19 @@
import PackageDescription
let package = Package(
name: "Antlr4"
)
products.append(
Product(
name: "Antlr4",
type: .Library(.Dynamic),
modules: [
"Antlr4"
]
)
name: "Antlr4",
products: [
.library(
name: "Antlr4",
type: .dynamic,
targets: ["Antlr4"]),
],
targets: [
.target(
name: "Antlr4",
dependencies: []),
.testTarget(
name: "Antlr4Tests",
dependencies: ["Antlr4"]),
]
)

View File

@ -39,12 +39,12 @@ public protocol ANTLRErrorListener: class {
/// the parser was able to recover in line without exiting the
/// surrounding rule.
///
func syntaxError<T:ATNSimulator>(_ recognizer: Recognizer<T>,
_ offendingSymbol: AnyObject?,
_ line: Int,
_ charPositionInLine: Int,
_ msg: String,
_ e: AnyObject?
func syntaxError<T>(_ recognizer: Recognizer<T>,
_ offendingSymbol: AnyObject?,
_ line: Int,
_ charPositionInLine: Int,
_ msg: String,
_ e: AnyObject?
)
///

View File

@ -17,12 +17,12 @@ open class BaseErrorListener: ANTLRErrorListener {
public init() {
}
open func syntaxError<T:ATNSimulator>(_ recognizer: Recognizer<T>,
_ offendingSymbol: AnyObject?,
_ line: Int,
_ charPositionInLine: Int,
_ msg: String,
_ e: AnyObject?
open func syntaxError<T>(_ recognizer: Recognizer<T>,
_ offendingSymbol: AnyObject?,
_ line: Int,
_ charPositionInLine: Int,
_ msg: String,
_ e: AnyObject?
) {
}

View File

@ -19,7 +19,6 @@ public protocol CharStream: IntStream {
/// - parameter interval: an interval within the stream
/// - returns: the text of the specified interval
///
/// - throws: _ANTLRError.nullPointer_ if `interval` is `null`
/// - throws: _ANTLRError.illegalArgument_ if `interval.a < 0`, or if
/// `interval.b < interval.a - 1`, or if `interval.b` lies at or
/// past the end of the stream

View File

@ -25,12 +25,12 @@ public class ConsoleErrorListener: BaseErrorListener {
/// line __line__:__charPositionInLine__ __msg__
///
///
override public func syntaxError<T:ATNSimulator>(_ recognizer: Recognizer<T>,
_ offendingSymbol: AnyObject?,
_ line: Int,
_ charPositionInLine: Int,
_ msg: String,
_ e: AnyObject?
override public func syntaxError<T>(_ recognizer: Recognizer<T>,
_ offendingSymbol: AnyObject?,
_ line: Int,
_ charPositionInLine: Int,
_ msg: String,
_ e: AnyObject?
) {
if Parser.ConsoleError {
errPrint("line \(line):\(charPositionInLine) \(msg)")

View File

@ -405,7 +405,7 @@ open class Lexer: Recognizer<LexerATNSimulator>
}
}
open func notifyListeners<T:ATNSimulator>(_ e: LexerNoViableAltException, recognizer: Recognizer<T>) {
open func notifyListeners<T>(_ e: LexerNoViableAltException, recognizer: Recognizer<T>) {
let text: String = _input!.getText(Interval.of(_tokenStartCharIndex, _input!.index()))
let msg: String = "token recognition error at: '\(getErrorDisplay(text))'"

View File

@ -336,8 +336,6 @@ open class Parser: Recognizer<ParserATNSimulator> {
///
/// - Parameter listener: the listener to add
///
/// - Throws: _ANTLRError.nullPointer_ if listener is `null`
///
public func addParseListener(_ listener: ParseTreeListener) {
if _parseListeners == nil {
_parseListeners = Array<ParseTreeListener>()

View File

@ -20,13 +20,13 @@ public class ProxyErrorListener: ANTLRErrorListener {
self.delegates = delegates
}
public func syntaxError<T:ATNSimulator>(_ recognizer: Recognizer<T>,
_ offendingSymbol: AnyObject?,
_ line: Int,
_ charPositionInLine: Int,
_ msg: String,
_ e: AnyObject?)
{
public func syntaxError<T>(_ recognizer: Recognizer<T>,
_ offendingSymbol: AnyObject?,
_ line: Int,
_ charPositionInLine: Int,
_ msg: String,
_ e: AnyObject?)
{
for listener: ANTLRErrorListener in delegates {
listener.syntaxError(recognizer, offendingSymbol, line, charPositionInLine, msg, e)
}

View File

@ -228,9 +228,6 @@ open class Recognizer<ATNInterpreter:ATNSimulator> {
return "\(s)"
}
///
/// - Throws: ANTLRError.nullPointer if `listener` is `null`.
///
open func addErrorListener(_ listener: ANTLRErrorListener) {
_listeners.append(listener)

View File

@ -233,7 +233,7 @@ open class RuleContext: RuleNode {
return description
}
public final func toString<T:ATNSimulator>(_ recog: Recognizer<T>) -> String {
public final func toString<T>(_ recog: Recognizer<T>) -> String {
return toString(recog, ParserRuleContext.EMPTY)
}
@ -242,7 +242,7 @@ open class RuleContext: RuleNode {
}
// recog null unless ParserRuleContext, in which case we use subclass toString(...)
open func toString<T:ATNSimulator>(_ recog: Recognizer<T>?, _ stop: RuleContext) -> String {
open func toString<T>(_ recog: Recognizer<T>?, _ stop: RuleContext) -> String {
let ruleNames: [String]? = recog != nil ? recog!.getRuleNames() : nil
let ruleNamesList: Array<String>? = ruleNames ?? nil
return toString(ruleNamesList, stop)

View File

@ -63,7 +63,6 @@ public protocol TokenStream: IntStream {
///
/// - Parameter interval: The interval of tokens within this stream to get text
/// for.
/// - Throws: ANTLRError.nullPointer if `interval` is `null`
/// - Returns: The text of all tokens within the specified interval in this
/// stream.
///

View File

@ -167,7 +167,7 @@ public class ATNConfig: Hashable, CustomStringConvertible {
//return "MyClass \(string)"
return toString(nil, true)
}
public func toString<T:ATNSimulator>(_ recog: Recognizer<T>?, _ showAlt: Bool) -> String {
public func toString<T>(_ recog: Recognizer<T>?, _ showAlt: Bool) -> String {
let buf: StringBuilder = StringBuilder()
buf.append("(")
buf.append(state)

View File

@ -238,12 +238,9 @@ public class ATNConfigSet: Hashable, CustomStringConvertible {
private var configsHashValue: Int {
var hashCode = 1
for item in configs {
hashCode = Int.multiplyWithOverflow(3, hashCode).0
hashCode = Int.addWithOverflow(hashCode, item.hashValue).0
hashCode = hashCode &* 3 &+ item.hashValue
}
return hashCode
}
public final var count: Int {

View File

@ -175,7 +175,7 @@ public class ATNDeserializer {
if let s = s as? BlockStartState {
let endStateNumber: Int = toInt(data[p])
p += 1
endStateNumbers.append(s, endStateNumber)
endStateNumbers.append((s, endStateNumber))
}
}
atn.addState(s)

View File

@ -712,17 +712,17 @@ public class PredictionContext: Hashable, CustomStringConvertible {
}
}
public func toString<T:ATNSimulator>(_ recog: Recognizer<T>) -> String {
public func toString<T>(_ recog: Recognizer<T>) -> String {
return NSStringFromClass(PredictionContext.self)
// return toString(recog, ParserRuleContext.EMPTY);
}
public func toStrings<T:ATNSimulator>(_ recognizer: Recognizer<T>, _ currentState: Int) -> [String] {
public func toStrings<T>(_ recognizer: Recognizer<T>, _ currentState: Int) -> [String] {
return toStrings(recognizer, PredictionContext.EMPTY, currentState)
}
// FROM SAM
public func toStrings<T:ATNSimulator>(_ recognizer: Recognizer<T>?, _ stop: PredictionContext, _ currentState: Int) -> [String] {
public func toStrings<T>(_ recognizer: Recognizer<T>?, _ stop: PredictionContext, _ currentState: Int) -> [String] {
var result: Array<String> = Array<String>()
var perm: Int = 0
outer: while true {

View File

@ -37,7 +37,7 @@ public class SemanticContext: Hashable, CustomStringConvertible {
/// prediction, so we passed in the outer context here in case of context
/// dependent predicate evaluation.
///
public func eval<T:ATNSimulator>(_ parser: Recognizer<T>, _ parserCallStack: RuleContext) throws -> Bool {
public func eval<T>(_ parser: Recognizer<T>, _ parserCallStack: RuleContext) throws -> Bool {
RuntimeException(#function + " must be overridden")
return false
}
@ -58,7 +58,7 @@ public class SemanticContext: Hashable, CustomStringConvertible {
/// * A non-`null` _org.antlr.v4.runtime.atn.SemanticContext_: the new simplified
/// semantic context after precedence predicates are evaluated.
///
public func evalPrecedence<T:ATNSimulator>(_ parser: Recognizer<T>, _ parserCallStack: RuleContext) throws -> SemanticContext? {
public func evalPrecedence<T>(_ parser: Recognizer<T>, _ parserCallStack: RuleContext) throws -> SemanticContext? {
return self
}
public var hashValue: Int {
@ -90,7 +90,7 @@ public class SemanticContext: Hashable, CustomStringConvertible {
}
override
public func eval<T:ATNSimulator>(_ parser: Recognizer<T>, _ parserCallStack: RuleContext) throws -> Bool {
public func eval<T>(_ parser: Recognizer<T>, _ parserCallStack: RuleContext) throws -> Bool {
let localctx: RuleContext? = isCtxDependent ? parserCallStack : nil
return try parser.sempred(localctx, ruleIndex, predIndex)
}
@ -126,12 +126,12 @@ public class SemanticContext: Hashable, CustomStringConvertible {
}
override
public func eval<T:ATNSimulator>(_ parser: Recognizer<T>, _ parserCallStack: RuleContext) throws -> Bool {
public func eval<T>(_ parser: Recognizer<T>, _ parserCallStack: RuleContext) throws -> Bool {
return try parser.precpred(parserCallStack, precedence)
}
override
public func evalPrecedence<T:ATNSimulator>(_ parser: Recognizer<T>, _ parserCallStack: RuleContext) throws -> SemanticContext? {
public func evalPrecedence<T>(_ parser: Recognizer<T>, _ parserCallStack: RuleContext) throws -> SemanticContext? {
if try parser.precpred(parserCallStack, precedence) {
return SemanticContext.NONE
} else {
@ -187,31 +187,28 @@ public class SemanticContext: Hashable, CustomStringConvertible {
public init(_ a: SemanticContext, _ b: SemanticContext) {
var operands: Set<SemanticContext> = Set<SemanticContext>()
if a is AND {
operands.formUnion((a as! AND).opnds)
//operands.addAll(Arrays.asList((a as AND).opnds));
if let aAnd = a as? AND {
operands.formUnion(aAnd.opnds)
} else {
operands.insert(a)
}
if b is AND {
operands.formUnion((b as! AND).opnds)
//operands.addAll(Arrays.asList((b as AND).opnds));
if let bAnd = b as? AND {
operands.formUnion(bAnd.opnds)
} else {
operands.insert(b)
}
let precedencePredicates: Array<PrecedencePredicate> =
SemanticContext.filterPrecedencePredicates(&operands)
let precedencePredicates = SemanticContext.filterPrecedencePredicates(&operands)
if !precedencePredicates.isEmpty {
// interested in the transition with the lowest precedence
let reduced: PrecedencePredicate = precedencePredicates.sorted {
let reduced = precedencePredicates.sorted {
$0.precedence < $1.precedence
}.first! //Collections.min(precedencePredicates);
operands.insert(reduced)
}
operands.insert(reduced[0])
}
opnds = Array(operands) //.toArray(new, SemanticContext[operands.size()]);
opnds = Array(operands)
}
override
@ -236,7 +233,7 @@ public class SemanticContext: Hashable, CustomStringConvertible {
/// unordered.
///
override
public func eval<T:ATNSimulator>(_ parser: Recognizer<T>, _ parserCallStack: RuleContext) throws -> Bool {
public func eval<T>(_ parser: Recognizer<T>, _ parserCallStack: RuleContext) throws -> Bool {
for opnd: SemanticContext in opnds {
if try !opnd.eval(parser, parserCallStack) {
return false
@ -246,7 +243,7 @@ public class SemanticContext: Hashable, CustomStringConvertible {
}
override
public func evalPrecedence<T:ATNSimulator>(_ parser: Recognizer<T>, _ parserCallStack: RuleContext) throws -> SemanticContext? {
public func evalPrecedence<T>(_ parser: Recognizer<T>, _ parserCallStack: RuleContext) throws -> SemanticContext? {
var differs: Bool = false
var operands: Array<SemanticContext> = Array<SemanticContext>()
for context: SemanticContext in opnds {
@ -305,30 +302,28 @@ public class SemanticContext: Hashable, CustomStringConvertible {
public init(_ a: SemanticContext, _ b: SemanticContext) {
var operands: Set<SemanticContext> = Set<SemanticContext>()
if a is OR {
operands.formUnion((a as! OR).opnds)
// operands.addAll(Arrays.asList((a as OR).opnds));
if let aOr = a as? OR {
operands.formUnion(aOr.opnds)
} else {
operands.insert(a)
}
if b is OR {
operands.formUnion((b as! OR).opnds)
//operands.addAll(Arrays.asList((b as OR).opnds));
if let bOr = b as? OR {
operands.formUnion(bOr.opnds)
} else {
operands.insert(b)
}
let precedencePredicates: Array<PrecedencePredicate> = SemanticContext.filterPrecedencePredicates(&operands)
let precedencePredicates = SemanticContext.filterPrecedencePredicates(&operands)
if !precedencePredicates.isEmpty {
// interested in the transition with the highest precedence
let reduced: PrecedencePredicate = precedencePredicates.sorted {
let reduced = precedencePredicates.sorted {
$0.precedence > $1.precedence
}.first!
//var reduced : PrecedencePredicate = Collections.max(precedencePredicates);
operands.insert(reduced)
}
operands.insert(reduced[0])
}
self.opnds = Array(operands) //operands.toArray(new, SemanticContext[operands.size()]);
self.opnds = Array(operands)
}
override
@ -351,7 +346,7 @@ public class SemanticContext: Hashable, CustomStringConvertible {
/// unordered.
///
override
public func eval<T:ATNSimulator>(_ parser: Recognizer<T>, _ parserCallStack: RuleContext) throws -> Bool {
public func eval<T>(_ parser: Recognizer<T>, _ parserCallStack: RuleContext) throws -> Bool {
for opnd: SemanticContext in opnds {
if try opnd.eval(parser, parserCallStack) {
return true
@ -361,7 +356,7 @@ public class SemanticContext: Hashable, CustomStringConvertible {
}
override
public func evalPrecedence<T:ATNSimulator>(_ parser: Recognizer<T>, _ parserCallStack: RuleContext) throws -> SemanticContext? {
public func evalPrecedence<T>(_ parser: Recognizer<T>, _ parserCallStack: RuleContext) throws -> SemanticContext? {
var differs: Bool = false
var operands: Array<SemanticContext> = Array<SemanticContext>()
for context: SemanticContext in opnds {
@ -447,21 +442,14 @@ public class SemanticContext: Hashable, CustomStringConvertible {
return result
}
private static func filterPrecedencePredicates(
_ collection: inout Set<SemanticContext>) ->
Array<PrecedencePredicate> {
let result = collection.filter {
$0 is PrecedencePredicate
private static func filterPrecedencePredicates(_ collection: inout Set<SemanticContext>) -> [PrecedencePredicate] {
let result = collection.flatMap {
$0 as? PrecedencePredicate
}
collection = Set<SemanticContext>(collection.filter {
!($0 is PrecedencePredicate)
})
//if (result == nil) {
//return Array<PrecedencePredicate>();
//}
return (result as! Array<PrecedencePredicate>)
return result
}
}

View File

@ -69,7 +69,7 @@ public final class ArrayWrapper<T: Hashable>: ExpressibleByArrayLiteral, Hashabl
}
public func == <Element: Equatable>(lhs: ArrayWrapper<Element>, rhs: ArrayWrapper<Element>) -> Bool {
public func == <Element>(lhs: ArrayWrapper<Element>, rhs: ArrayWrapper<Element>) -> Bool {
if lhs === rhs {
return true
}

View File

@ -32,10 +32,6 @@ import Foundation
/// implementation. The length of a bit set relates to logical length
/// of a bit set and is defined independently of implementation.
///
/// Unless otherwise noted, passing a null parameter to any of the
/// methods in a `BitSet` will result in a
/// `ANTLRError.nullPointer`.
///
/// A `BitSet` is not safe for multithreaded use without
/// external synchronization.
///
@ -648,12 +644,12 @@ public class BitSet: Hashable, CustomStringConvertible {
return 64
}
var n: Int32 = 63
y = Int32(truncatingBitPattern: i)
y = Int32(truncatingIfNeeded: i)
if y != 0 {
n = n - 32
x = y
} else {
x = Int32(truncatingBitPattern: i >>> 32)
x = Int32(truncatingIfNeeded: i >>> 32)
}
y = x << 16

View File

@ -41,7 +41,7 @@ final class Entry<K: Hashable,V>: CustomStringConvertible {
var description: String { return "\(getKey())=\(getValue())" }
}
func == <K: Hashable, V: Equatable>(lhs: Entry<K,V>, rhs: Entry<K,V>) -> Bool {
func == <K, V: Equatable>(lhs: Entry<K,V>, rhs: Entry<K,V>) -> Bool {
if lhs === rhs {
return true
}
@ -52,7 +52,7 @@ func == <K: Hashable, V: Equatable>(lhs: Entry<K,V>, rhs: Entry<K,V>) -> Bool {
}
return false
}
func == <K: Hashable, V: Equatable>(lhs: Entry<K,V?>, rhs: Entry<K,V?>) -> Bool {
func == <K, V: Equatable>(lhs: Entry<K,V?>, rhs: Entry<K,V?>) -> Bool {
if lhs === rhs {
return true
}

View File

@ -49,19 +49,19 @@ public final class MurmurHash {
let m: Int32 = 5
let n: Int32 = -430675100//0xE6546B64;
var k: Int32 = Int32(truncatingBitPattern: value)
k = Int32.multiplyWithOverflow(k, c1).0
var k: Int32 = Int32(truncatingIfNeeded: value)
k = k.multipliedReportingOverflow(by: c1).partialValue
// (k,_) = UInt32.multiplyWithOverflow(k, c1) ;//( k * c1);
//TODO: CHECKE >>>
k = (k << r1) | (k >>> (Int32(32) - r1)) //k = (k << r1) | (k >>> (32 - r1));
//k = UInt32 (truncatingBitPattern:Int64(Int64(k) * Int64(c2)));//( k * c2);
//(k,_) = UInt32.multiplyWithOverflow(k, c2)
k = Int32.multiplyWithOverflow(k, c2).0
k = k.multipliedReportingOverflow(by: c2).partialValue
var hash = Int32(hashIn)
hash = hash ^ k
hash = (hash << r2) | (hash >>> (Int32(32) - r2))//hash = (hash << r2) | (hash >>> (32 - r2));
(hash, _) = Int32.multiplyWithOverflow(hash, m)
(hash, _) = Int32.addWithOverflow(hash, n)
hash = hash.multipliedReportingOverflow(by: m).partialValue
hash = hash.addingReportingOverflow(n).partialValue
//hash = hash * m + n;
// print("murmur update2 : \(hash)")
return Int(hash)
@ -90,12 +90,12 @@ public final class MurmurHash {
public static func finish(_ hashin: Int, _ numberOfWordsIn: Int) -> Int {
var hash = Int32(hashin)
let numberOfWords = Int32(numberOfWordsIn)
hash = hash ^ Int32.multiplyWithOverflow(numberOfWords, Int32(4)).0 //(numberOfWords * UInt32(4));
hash = hash ^ numberOfWords.multipliedReportingOverflow(by: 4).partialValue //(numberOfWords * UInt32(4));
hash = hash ^ (hash >>> Int32(16)) //hash = hash ^ (hash >>> 16);
(hash, _) = Int32.multiplyWithOverflow(hash, Int32(-2048144789))//hash * UInt32(0x85EBCA6B);
hash = hash.multipliedReportingOverflow(by: -2048144789).partialValue //hash * UInt32(0x85EBCA6B);
hash = hash ^ (hash >>> Int32(13))//hash = hash ^ (hash >>> 13);
//hash = UInt32(truncatingBitPattern: UInt64(hash) * UInt64(0xC2B2AE35)) ;
(hash, _) = Int32.multiplyWithOverflow(hash, Int32(-1028477387))
hash = hash.multipliedReportingOverflow(by: -1028477387).partialValue
hash = hash ^ (hash >>> Int32(16))// hash = hash ^ (hash >>> 16);
//print("murmur finish : \(hash)")
return Int(hash)

View File

@ -14,7 +14,6 @@
import Foundation
public enum ANTLRError: Error {
case nullPointer(msg:String)
case unsupportedOperation(msg:String)
case indexOutOfBounds(msg:String)
case illegalState(msg:String)

View File

@ -11,14 +11,6 @@ import Foundation
extension String {
func split(_ separator: String) -> [String] {
return self.components(separatedBy: separator)
}
func containsIgnoreCase(_ find: String) -> Bool {
return self.lowercased().range(of: find.lowercased()) != nil
}
var length: Int {
return self.characters.count
}
@ -60,32 +52,6 @@ extension String {
return index
}
func substringAfter(_ string: String) -> String {
if let range = self.range(of: string) {
let intIndex: Int = self.characters.distance(from: self.startIndex, to: range.upperBound)
return self.substring(from: self.characters.index(self.startIndex, offsetBy: intIndex))
}
return self
}
var lowercaseFirstChar: String {
var result = self
if self.length > 0 {
let startIndex = self.startIndex
result.replaceSubrange(startIndex ... startIndex, with: String(self[startIndex]).lowercased())
}
return result
}
func substringWithRange(_ range: Range<Int>) -> String {
let start = self.characters.index(self.startIndex, offsetBy: range.lowerBound)
let end = self.characters.index(self.startIndex, offsetBy: range.upperBound)
return self.substring(with: start ..< end)
}
subscript(integerIndex: Int) -> Character {
let index = characters.index(startIndex, offsetBy: integerIndex)
return self[index]
@ -95,118 +61,7 @@ extension String {
let start = characters.index(startIndex, offsetBy: integerRange.lowerBound)
let end = characters.index(startIndex, offsetBy: integerRange.upperBound)
let range = start ..< end
return self[range]
}
func charAt(_ index: Int) -> Character {
return self[self.characters.index(self.startIndex, offsetBy: index)]
}
}
// Mapping from XML/HTML character entity reference to character
// From http://en.wikipedia.org/wiki/List_of_XML_and_HTML_character_entity_references
private let characterEntities: [String:Character] = [
// XML predefined entities:
"&quot;": "\"",
"&amp;": "&",
"&apos;": "'",
"&lt;": "<",
"&gt;": ">",
// HTML character entity references:
"&nbsp;": "\u{00a0}",
// ...
"&diams;": "",
]
extension String {
///
/// Returns a new string made by replacing in the `String`
/// all HTML character entity references with the corresponding
/// character.
///
var stringByDecodingHTMLEntities: String {
// Convert the number in the string to the corresponding
// Unicode character, e.g.
// decodeNumeric("64", 10) --> "@"
// decodeNumeric("20ac", 16) --> ""
func decodeNumeric(_ string: String, base: Int32) -> Character? {
let code = UInt32(strtoul(string, nil, base))
return Character(UnicodeScalar(code)!)
}
// Decode the HTML character entity to the corresponding
// Unicode character, return `nil` for invalid input.
// decode("&#64;") --> "@"
// decode("&#x20ac;") --> ""
// decode("&lt;") --> "<"
// decode("&foo;") --> nil
func decode(_ entity: String) -> Character? {
if entity.hasPrefix("&#x") || entity.hasPrefix("&#X") {
return decodeNumeric(entity.substring(from: entity.characters.index(entity.startIndex, offsetBy: 3)), base: 16)
} else if entity.hasPrefix("&#") {
return decodeNumeric(entity.substring(from: entity.characters.index(entity.startIndex, offsetBy: 2)), base: 10)
} else {
return characterEntities[entity]
}
}
var result = ""
var position = startIndex
// Find the next '&' and copy the characters preceding it to `result`:
while let ampRange = self.range(of: "&", range: position ..< endIndex) {
result.append(self[position ..< ampRange.lowerBound])
position = ampRange.lowerBound
// Find the next ';' and copy everything from '&' to ';' into `entity`
if let semiRange = self.range(of: ";", range: position ..< endIndex) {
let entity = self[position ..< semiRange.upperBound]
position = semiRange.upperBound
if let decoded = decode(entity) {
// Replace by decoded character:
result.append(decoded)
} else {
// Invalid entity, copy verbatim:
result.append(entity)
}
} else {
// No matching ';'.
break
}
}
// Copy remaining characters to `result`:
result.append(self[position ..< endIndex])
return result
return String(self[range])
}
}
extension String {
static let htmlEscapedDictionary = [
"&amp;": "&",
"&quot;": "\"",
"&#x27;": "'",
"&#x39;": "'",
"&#x92;": "'",
"&#x96;": "'",
"&gt;": ">",
"&lt;": "<"
]
public var escapedHtmlString: String {
var newString = "\(self)"
for (key, value) in String.htmlEscapedDictionary {
newString = newString.replacingOccurrences(of: value, with: key)
}
return newString
}
}

View File

@ -320,7 +320,6 @@ public class ParseTreePatternMatcher {
// add special rule token or conjure up new token from name
let firstStr = String(tagChunk.getTag()[0])
if firstStr.lowercased() != firstStr {
//if ( Character.isUpperCase(tagChunk.getTag().charAt(0)) ) {
let ttype: Int = parser.getTokenType(tagChunk.getTag())
if ttype == CommonToken.INVALID_TYPE {
throw ANTLRError.illegalArgument(msg: "Unknown token " + tagChunk.getTag() + " in pattern: " + pattern)
@ -329,7 +328,6 @@ public class ParseTreePatternMatcher {
tokens.append(t)
} else {
if firstStr.uppercased() != firstStr {
// if ( Character.isLowerCase(tagChunk.getTag().charAt(0)) ) {
let ruleIndex: Int = parser.getRuleIndex(tagChunk.getTag())
if ruleIndex == -1 {
throw ANTLRError.illegalArgument(msg: "Unknown rule " + tagChunk.getTag() + " in pattern: " + pattern)

View File

@ -62,10 +62,10 @@ class VisitorTests: XCTestCase {
var errors = [String]()
override func syntaxError<T : ATNSimulator>(_ recognizer: Recognizer<T>,
_ offendingSymbol: AnyObject?,
_ line: Int, _ charPositionInLine: Int,
_ msg: String, _ e: AnyObject?) {
override func syntaxError<T>(_ recognizer: Recognizer<T>,
_ offendingSymbol: AnyObject?,
_ line: Int, _ charPositionInLine: Int,
_ msg: String, _ e: AnyObject?) {
errors.append("line \(line):\(charPositionInLine) \(msg)")
}
}

View File

@ -110,7 +110,7 @@ def get_argument_parser():
"Use this command if you want to include ANTLR4 as SPM dependency.", )
p.add_argument("--gen-xcodeproj",
action="store_true",
help="<DEVELOPER> Generates an Xcode project for ANTLR4 Swift runtime. "
help="<DEVELOPER, USER> Generates an Xcode project for ANTLR4 Swift runtime. "
"This directive will generate all the required parsers for the project. "
"Feel free to re-run whenever you updated the test grammar files.")
p.add_argument("--test",
@ -147,7 +147,11 @@ def generate_spm_module(in_folder=TMP_FOLDER):
call(["git", "tag", "{}.0.0".format(MAJOR_VERSION)])
antlr_says("Created local repository.")
antlr_says("Put .Package(url: \"{}\", majorVersion: {}) in Package.swift.".format(os.getcwd(), MAJOR_VERSION))
antlr_says("(swift-tools-version:3.0) "
"Put .Package(url: \"{}\", majorVersion: {}) in Package.swift.".format(os.getcwd(), MAJOR_VERSION))
antlr_says("(swift-tools-wersion:4.0) "
"Put .package(url: \"{}\", from: \"{}.0.0\") in Package.swift "
"and add \"Antlr4\" to target dependencies. ".format(os.getcwd(), MAJOR_VERSION))
def generate_xcodeproj():

View File

@ -8,6 +8,7 @@ package org.antlr.v4.test.tool;
import org.antlr.v4.Tool;
import org.antlr.v4.tool.ErrorType;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
@ -60,6 +61,16 @@ public class TestToolSyntaxErrors extends BaseJavaToolTest {
super.testSetUp();
}
@Test
public void AllErrorCodesDistinct() {
ErrorType[] errorTypes = ErrorType.class.getEnumConstants();
for (int i = 0; i < errorTypes.length; i++) {
for (int j = i + 1; j < errorTypes.length; j++) {
Assert.assertNotEquals(errorTypes[i].code, errorTypes[j].code);
}
}
}
@Test public void testA() { super.testErrors(A, true); }
@Test public void testExtraColon() {

View File

@ -58,6 +58,7 @@
<plugin> <!-- create src jar -->
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-source-plugin</artifactId>
<version>3.0.1</version>
<executions>
<execution>
<goals>
@ -86,6 +87,7 @@
<plugin> <!-- include code-generated sources -->
<groupId>org.codehaus.mojo</groupId>
<artifactId>build-helper-maven-plugin</artifactId>
<version>3.0.0</version>
<executions>
<execution>
<phase>generate-sources</phase>

View File

@ -1,5 +1,5 @@
fileHeader(grammarFileName, ANTLRVersion) ::= <<
// Generated from <grammarFileName; format="java-escape"> by ANTLR <ANTLRVersion>.
// Code generated from <grammarFileName; format="java-escape"> by ANTLR <ANTLRVersion>. DO NOT EDIT.
>>
ParserFile(file, parser, namedActions, contextSuperClass) ::= <<
@ -777,29 +777,31 @@ MatchSet(m, expr, capture) ::= "<CommonSetStuff(m, expr, capture, false)>"
MatchNotSet(m, expr, capture) ::= "<CommonSetStuff(m, expr, capture, true)>"
CommonSetStuff(m, expr, capture, invert) ::= <<
p.SetState(<m.stateNumber>)
<if(m.labels)>
var _lt = p.GetTokenStream().LT(1)<! TODO: Should LT be called always like InvokeRule and MatchToken? !>
<m.labels:{l | <labelref(l)> = _lt}; separator="\n">
<endif>
<if(capture)>
<capture>
<endif>
<if(invert)>if <m.varName> \<= 0 || <expr> <else>if !(<expr>)<endif> {
{
p.SetState(<m.stateNumber>)
<if(m.labels)>
var _ri = p.GetErrorHandler().RecoverInline(p)
<m.labels:{l | <labelref(l)> = _ri}; separator="\n">
<else>
p.GetErrorHandler().RecoverInline(p)
var _lt = p.GetTokenStream().LT(1)<! TODO: Should LT be called always like InvokeRule and MatchToken? !>
<m.labels:{l | <labelref(l)> = _lt}; separator="\n">
<endif>
} else {
p.GetErrorHandler().ReportMatch(p)
p.Consume()
<if(capture)>
<capture>
<endif>
<if(invert)>if <m.varName> \<= 0 || <expr> <else>if !(<expr>)<endif> {
<if(m.labels)>
var _ri = p.GetErrorHandler().RecoverInline(p)
<m.labels:{l | <labelref(l)> = _ri}; separator="\n">
<else>
p.GetErrorHandler().RecoverInline(p)
<endif>
} else {
p.GetErrorHandler().ReportMatch(p)
p.Consume()
}
}
>>

View File

@ -894,8 +894,10 @@ public class <lexer.name> extends <superClass; null="Lexer"> {
protected static final DFA[] _decisionToDFA;
protected static final PredictionContextCache _sharedContextCache =
new PredictionContextCache();
<if(lexer.tokens)>
public static final int
<lexer.tokens:{k | <k>=<lexer.tokens.(k)>}; separator=", ", wrap, anchor>;
<endif>
<if(lexer.channels)>
public static final int
<lexer.channels:{c | <c>=<lexer.channels.(c)>}; separator=", ", wrap, anchor>;

View File

@ -36,12 +36,12 @@
* REQUIRED.
*/
pythonTypeInitMap ::= [
"bool":"False",
javascriptTypeInitMap ::= [
"bool":"false",
"int":"0",
"float":"0.0",
"str":"",
default:"None" // anything other than a primitive type is an object
default:"{}" // anything other than a primitive type is an object
]
// args must be <object-model-object>, <fields-resulting-in-STs>
@ -802,6 +802,9 @@ var antlr4 = require('antlr4/index');
>>
Lexer(lexer, atn, actionFuncs, sempredFuncs, superClass) ::= <<
<if(superClass)>
var <superClass> = require('./<superClass>').<superClass>;
<endif>
<atn>
@ -860,7 +863,7 @@ var serializedATN = ["<model.serialized; wrap={",<\n> "}>"].join("");
* must be an object, default value is "null".
*/
initValue(typeName) ::= <<
<javaTypeInitMap.(typeName)>
<javacriptTypeInitMap.(typeName)>
>>
codeFileExtension() ::= ".js"

View File

@ -809,7 +809,7 @@ def serializedATN():
* must be an object, default value is "null".
*/
initValue(typeName) ::= <<
<javaTypeInitMap.(typeName)>
<pythonTypeInitMap.(typeName)>
>>
codeFileExtension() ::= ".py"

View File

@ -816,7 +816,7 @@ def serializedATN():
* must be an object, default value is "null".
*/
initValue(typeName) ::= <<
<javaTypeInitMap.(typeName)>
<pythonTypeInitMap.(typeName)>
>>
codeFileExtension() ::= ".py"

View File

@ -394,11 +394,11 @@ public enum ErrorType {
*/
IMPORT_NAME_CLASH(113, "<arg.typeString> grammar <arg.name> and imported <arg2.typeString> grammar <arg2.name> both generate <arg2.recognizerName>", ErrorSeverity.ERROR),
/**
* Compiler Error 160.
* Compiler Error 114.
*
* <p>cannot find tokens file <em>filename</em></p>
*/
CANNOT_FIND_TOKENS_FILE_REFD_IN_GRAMMAR(160, "cannot find tokens file <arg>", ErrorSeverity.ERROR),
CANNOT_FIND_TOKENS_FILE_REFD_IN_GRAMMAR(114, "cannot find tokens file <arg>", ErrorSeverity.ERROR),
/**
* Compiler Warning 118.
*
@ -522,7 +522,7 @@ public enum ErrorType {
*/
USE_OF_BAD_WORD(134, "symbol <arg> conflicts with generated code in target language or runtime", ErrorSeverity.ERROR),
/**
* Compiler Error 134.
* Compiler Error 183.
*
* <p>rule reference <em>rule</em> is not currently supported in a set</p>
*
@ -530,7 +530,7 @@ public enum ErrorType {
* Note: This error has the same number as the unrelated error
* {@link #USE_OF_BAD_WORD}.</p>
*/
UNSUPPORTED_REFERENCE_IN_LEXER_SET(134, "rule reference <arg> is not currently supported in a set", ErrorSeverity.ERROR),
UNSUPPORTED_REFERENCE_IN_LEXER_SET(183, "rule reference <arg> is not currently supported in a set", ErrorSeverity.ERROR),
/**
* Compiler Error 135.
*