Merge branch 'master' into interpreter2

This commit is contained in:
Mike Lischke 2017-11-11 11:06:53 +01:00
commit eaed562cb3
112 changed files with 2810 additions and 2789 deletions

View File

@ -2,9 +2,13 @@ sudo: true
language: java
before_cache:
- rm -rf $HOME/.m2/repository/org/antlr
cache:
timeout: 600
directories:
- $HOME/.m2
- $HOME/Library/Caches/Antlr4
- $HOME/Library/Caches/Homebrew
stages:

View File

@ -2,6 +2,11 @@
set -euo pipefail
cache_dir="$HOME/Library/Caches/Antlr4"
dotnet_url='https://download.microsoft.com/download/B/9/F/B9F1AF57-C14A-4670-9973-CDF47209B5BF/dotnet-dev-osx-x64.1.0.4.pkg'
dotnet_file=$(basename "$dotnet_url")
dotnet_shasum='63b5d99028cd8b2454736076106c96ba7d05f0fc'
thisdir=$(dirname "$0")
# OpenSSL setup for dotnet core
@ -10,10 +15,23 @@ ln -s /usr/local/opt/openssl/lib/libcrypto.1.0.0.dylib /usr/local/lib/
ln -s /usr/local/opt/openssl/lib/libssl.1.0.0.dylib /usr/local/lib/
# download dotnet core
curl https://download.microsoft.com/download/B/9/F/B9F1AF57-C14A-4670-9973-CDF47209B5BF/dotnet-dev-osx-x64.1.0.4.pkg -o /tmp/dotnet-dev-osx-x64.1.0.4.pkg
mkdir -p "$cache_dir"
(cd "$cache_dir"
if [ -f "$dotnet_file" ]
then
if ! shasum -s -c <<<"$dotnet_shasum $dotnet_file"
then
rm -f "$dotnet_file"
fi
fi
if ! [ -f "$dotnet_file" ]
then
curl "$dotnet_url" -o "$dotnet_file"
fi
)
# install dotnet core
sudo installer -pkg /tmp/dotnet-dev-osx-x64.1.0.4.pkg -target /
sudo installer -pkg "$cache_dir/$dotnet_file" -target /
# make the link
ln -s /usr/local/share/dotnet/dotnet /usr/local/bin/

View File

@ -1,5 +1,7 @@
#!/bin/bash
set -euo pipefail
# linux specific setup, those setup have to be
# here since environment variables doesn't pass
# across scripts
@ -13,10 +15,20 @@ if [ $TRAVIS_OS_NAME == "linux" ]; then
curl https://swift.org/builds/$SWIFT_VERSION-release/ubuntu1404/$SWIFT_VERSION-RELEASE/$SWIFT_VERSION-RELEASE-ubuntu14.04.tar.gz -s | tar xz -C swift &> /dev/null
fi
if [ -z "${JAVA_HOME-}" ]
then
export JAVA_HOME="$(dirname $(java -XshowSettings:properties -version 2>&1 |
grep 'java\.home' | awk '{ print $3 }'))"
fi
# check swift
swift --version
swift build --version
pushd ../runtime/Swift
./boot.py --test
popd
if [ $GROUP == "LEXER" ]; then
mvn -q -Dgroups="org.antlr.v4.test.runtime.category.LexerTests" -Dtest=swift.* test
elif [ $GROUP == "PARSER" ]; then

View File

@ -1,8 +1,11 @@
version: '4.7.1-SNAPSHOT+AppVeyor.{build}'
os: Windows Server 2012
cache:
- '%USERPROFILE%\.m2'
- '%USERPROFILE%\.nuget\packages -> **\project.json'
build: off
build_script:
- mvn -DskipTests install -q --batch-mode
- mvn -DskipTests install --batch-mode
- msbuild runtime/CSharp/runtime/CSharp/Antlr4.vs2013.sln /logger:"C:\Program Files\AppVeyor\BuildAgent\Appveyor.MSBuildLogger.dll" /verbosity:detailed
- msbuild ./runtime-testsuite/target/classes/CSharp/runtime/CSharp/Antlr4.vs2013.sln /logger:"C:\Program Files\AppVeyor\BuildAgent\Appveyor.MSBuildLogger.dll" /verbosity:detailed
test_script:
- mvn install -q -Dantlr-python2-python="C:\Python27\python.exe" -Dantlr-python3-python="C:\Python35\python.exe" -Dantlr-javascript-nodejs="C:\Program Files (x86)\nodejs\node.exe" --batch-mode
build:
verbosity: minimal
- mvn install -Dantlr-python2-python="C:\Python27\python.exe" -Dantlr-python3-python="C:\Python35\python.exe" -Dantlr-javascript-nodejs="C:\Program Files (x86)\nodejs\node.exe" --batch-mode

View File

@ -161,8 +161,13 @@ YYYY/MM/DD, github id, Full name, email
2017/07/27, matthauck, Matt Hauck, matthauck@gmail.com
2017/07/27, shirou, WAKAYAMA Shirou, shirou.faw@gmail.com
2017/08/20, tiagomazzutti, Tiago Mazzutti, tiagomzt@gmail.com
2017/08/20, milanaleksic, Milan Aleksic, milanaleksic@gmail.com
2017/08/29, Eddy Reyes, eddy@mindsight.io
2017/09/09, brauliobz, Bráulio Bezerra, brauliobezerra@gmail.com
2017/09/11, sachinjain024, Sachin Jain, sachinjain024@gmail.com
2017/10/06, bramp, Andrew Brampton, brampton@gmail.com
2017/10/15, simkimsia, Sim Kim Sia, kimcity@gmail.com
2017/10/27, Griffon26, Maurice van der Pot, griffon26@kfk4ever.com
2017/05/29, rlfnb, Ralf Neeb, rlfnb@rlfnb.de
2017/10/29, gendalph, Максим Прохоренко, Maxim\dotProhorenko@gm@il.com
2017/11/02, jasonmoo, Jason Mooberry, jason.mooberry@gmail.com

View File

@ -59,8 +59,7 @@ Now, make sure C# runtime is built and installed locally.
```bash
cd ~/antlr/code/antlr4/runtime/CSharp/runtime/CSharp
# kill previous ones manually as "xbuild /t:Clean" didn't seem to do it
rm Antlr4.Runtime/bin/net20/Release/Antlr4.Runtime.dll
rm Antlr4.Runtime/obj/net20/Release/Antlr4.Runtime.dll
find . -name '*.dll' -exec rm {} \;
# build
xbuild /p:Configuration=Release Antlr4.Runtime/Antlr4.Runtime.mono.csproj
```

View File

@ -22,15 +22,15 @@ Or just download in browser from website:
[http://www.antlr.org/download.html](http://www.antlr.org/download.html)
and put it somewhere rational like `/usr/local/lib`.
2. Add `antlr-4.5.3-complete.jar` to your `CLASSPATH`:
2. Add `antlr-4.7-complete.jar` to your `CLASSPATH`:
```
$ export CLASSPATH=".:/usr/local/lib/antlr-4.5.3-complete.jar:$CLASSPATH"
$ export CLASSPATH=".:/usr/local/lib/antlr-4.7-complete.jar:$CLASSPATH"
```
It's also a good idea to put this in your `.bash_profile` or whatever your startup script is.
3. Create aliases for the ANTLR Tool, and `TestRig`.
```
$ alias antlr4='java -Xmx500M -cp "/usr/local/lib/antlr-4.5.3-complete.jar:$CLASSPATH" org.antlr.v4.Tool'
$ alias antlr4='java -Xmx500M -cp "/usr/local/lib/antlr-4.7-complete.jar:$CLASSPATH" org.antlr.v4.Tool'
$ alias grun='java org.antlr.v4.gui.TestRig'
```

View File

@ -19,6 +19,16 @@ except you need to specify the language target, for example:
```
$ antlr4 -Dlanguage=Swift MyGrammar.g4
```
If you integrate this as a build step inside Xcode, then you should use the
"gnu" message format to have any error messages parsed by Xcode. You may
also want to use the `-o` option to put the autogenerated files in a
separate subdirectory.
```
antlr4 -Dlanguage=Swift -message-format gnu -o Autogen MyGrammar.g4
```
For a full list of antlr4 tool options, please visit the
[tool documentation page](tool-options.md).

14
pom.xml
View File

@ -130,6 +130,20 @@
<directory>test</directory>
</testResource>
</testResources>
<plugins>
<plugin>
<artifactId>maven-clean-plugin</artifactId>
<version>3.0.0</version>
<configuration>
<filesets>
<fileset>
<directory>runtime/Swift/.build</directory>
<directory>runtime/Swift/Tests/Antlr4Tests/gen</directory>
</fileset>
</filesets>
</configuration>
</plugin>
</plugins>
<pluginManagement>
<plugins>
<plugin>

View File

@ -72,7 +72,7 @@ TokenStartColumnEquals(i) ::= <%self._tokenStartCharPositionInLine == <i>%>
ImportListener(X) ::= ""
GetExpectedTokenNames() ::= "try self.getExpectedTokens().toString(self.tokenNames)"
GetExpectedTokenNames() ::= "try self.getExpectedTokens().toString(self.getVocabulary())"
RuleInvocationStack() ::= "getRuleInvocationStack().description.replacingOccurrences(of: \"\\\"\", with: \"\")"

View File

@ -25,7 +25,8 @@ project(LIBANTLR4)
if(CMAKE_VERSION VERSION_EQUAL "3.0.0" OR
CMAKE_VERSION VERSION_GREATER "3.0.0")
CMAKE_POLICY(SET CMP0026 OLD)
CMAKE_POLICY(SET CMP0026 NEW)
CMAKE_POLICY(SET CMP0054 OLD)
CMAKE_POLICY(SET CMP0045 OLD)
CMAKE_POLICY(SET CMP0042 OLD)
endif()

View File

@ -206,11 +206,13 @@ macro(antlr4cpp_process_grammar
# export generated cpp files into list
foreach(generated_file ${generated_files})
list(APPEND antlr4cpp_src_files_${antlr4cpp_project_namespace} ${generated_file})
if (NOT CMAKE_CXX_COMPILER_ID MATCHES "MSVC")
set_source_files_properties(
${generated_file}
PROPERTIES
COMPILE_FLAGS -Wno-overloaded-virtual
)
endif ()
endforeach(generated_file)
message(STATUS "Antlr4Cpp ${antlr4cpp_project_namespace} Generated: ${generated_files}")

View File

@ -1,6 +1,6 @@
# -*- mode:cmake -*-
if(NOT UNIX)
message(FATAL "Unsupported operating system")
message(WARNING "Unsupported operating system")
endif()
set(antlr4-demo-GENERATED_SRC
@ -45,11 +45,17 @@ set(antlr4-demo_SRC
${antlr4-demo-GENERATED_SRC}
)
if (NOT CMAKE_CXX_COMPILER_ID MATCHES "MSVC")
set (flags_1 "-Wno-overloaded-virtual")
else()
set (flags_1 "-MP /wd4251")
endif()
foreach( src_file ${antlr4-demo_SRC} )
set_source_files_properties(
${src_file}
PROPERTIES
COMPILE_FLAGS -Wno-overloaded-virtual
COMPILE_FLAGS "${COMPILE_FLAGS} ${flags_1}"
)
endforeach( src_file ${antlr4-demo_SRC} )
@ -58,6 +64,10 @@ add_executable(antlr4-demo
)
#add_precompiled_header(antlr4-demo ${PROJECT_SOURCE_DIR}/runtime/src/antlrcpp-Prefix.h)
if (CMAKE_CXX_COMPILER_ID MATCHES "MSVC")
target_compile_options(antlr4-demo PRIVATE "/MT$<$<CONFIG:Debug>:d>")
endif()
add_dependencies(antlr4-demo GenerateParser)
target_link_libraries(antlr4-demo antlr4_static)

View File

@ -44,11 +44,13 @@ elseif(APPLE)
target_link_libraries(antlr4_static ${COREFOUNDATION_LIBRARY})
endif()
if (MSVC_VERSION)
if (CMAKE_CXX_COMPILER_ID MATCHES "MSVC")
set(disabled_compile_warnings "/wd4251")
else()
set(disabled_compile_warnings "-Wno-overloaded-virtual")
endif ()
if ("${CMAKE_CXX_COMPILER_ID}" MATCHES "Clang")
set(disabled_compile_warnings "${disabled_compile_warnings} -Wno-dollar-in-identifier-extension -Wno-four-char-constants")
elseif("${CMAKE_CXX_COMPILER_ID}" MATCHES "GNU")
@ -61,16 +63,21 @@ if (WIN32)
set(extra_share_compile_flags "-DANTLR4CPP_EXPORTS")
set(extra_static_compile_flags "-DANTLR4CPP_STATIC")
endif(WIN32)
if (MSVC_VERSION)
if (CMAKE_CXX_COMPILER_ID MATCHES "MSVC")
target_compile_options(antlr4_shared PRIVATE "/MD$<$<CONFIG:Debug>:d>")
target_compile_options(antlr4_static PRIVATE "/MT$<$<CONFIG:Debug>:d>")
endif()
set(static_lib_suffix "")
if (MSVC_VERSION)
if (CMAKE_CXX_COMPILER_ID MATCHES "MSVC")
set(static_lib_suffix "-static")
endif()
if (CMAKE_CXX_COMPILER_ID MATCHES "MSVC")
set(extra_share_compile_flags "-DANTLR4CPP_EXPORTS -MP /wd4251")
set(extra_static_compile_flags "-DANTLR4CPP_STATIC -MP")
endif()
set_target_properties(antlr4_shared
PROPERTIES VERSION ${ANTLR_VERSION}
SOVERSION ${ANTLR_VERSION}

View File

@ -7,6 +7,7 @@
#include <algorithm>
#include <assert.h>
#include <atomic>
#include <codecvt>
#include <chrono>
#include <fstream>

View File

@ -87,21 +87,21 @@ misc::IntervalSet ATN::nextTokens(ATNState *s, RuleContext *ctx) const {
}
misc::IntervalSet& ATN::nextTokens(ATNState *s) const {
if (!s->nextTokenWithinRule.isReadOnly()) {
misc::IntervalSet const& ATN::nextTokens(ATNState *s) const {
if (!s->_nextTokenUpdated) {
std::unique_lock<std::mutex> lock { _mutex };
if (!s->nextTokenWithinRule.isReadOnly()) {
s->nextTokenWithinRule = nextTokens(s, nullptr);
s->nextTokenWithinRule.setReadOnly(true);
if (!s->_nextTokenUpdated) {
s->_nextTokenWithinRule = nextTokens(s, nullptr);
s->_nextTokenUpdated = true;
}
}
return s->nextTokenWithinRule;
return s->_nextTokenWithinRule;
}
void ATN::addState(ATNState *state) {
if (state != nullptr) {
//state->atn = this;
state->stateNumber = (int)states.size();
state->stateNumber = static_cast<int>(states.size());
}
states.push_back(state);
@ -114,7 +114,7 @@ void ATN::removeState(ATNState *state) {
int ATN::defineDecisionState(DecisionState *s) {
decisionToState.push_back(s);
s->decision = (int)decisionToState.size() - 1;
s->decision = static_cast<int>(decisionToState.size() - 1);
return s->decision;
}
@ -154,7 +154,7 @@ misc::IntervalSet ATN::getExpectedTokens(size_t stateNumber, RuleContext *contex
if (ctx->parent == nullptr) {
break;
}
ctx = (RuleContext *)ctx->parent;
ctx = static_cast<RuleContext *>(ctx->parent);
}
if (following.contains(Token::EPSILON)) {

View File

@ -70,7 +70,7 @@ namespace atn {
/// staying in same rule. <seealso cref="Token#EPSILON"/> is in set if we reach end of
/// rule.
/// </summary>
virtual misc::IntervalSet& nextTokens(ATNState *s) const;
virtual misc::IntervalSet const& nextTokens(ATNState *s) const;
virtual void addState(ATNState *state);

View File

@ -70,12 +70,17 @@ namespace atn {
///
/// <embed src="images/OptionalNonGreedy.svg" type="image/svg+xml"/>
/// </summary>
class ANTLR4CPP_PUBLIC ATN;
class ANTLR4CPP_PUBLIC ATNState {
public:
ATNState();
ATNState(ATNState const&) = delete;
virtual ~ATNState();
ATNState& operator=(ATNState const&) = delete;
static const size_t INITIAL_NUM_TRANSITIONS = 4;
static const size_t INVALID_STATE_NUMBER = static_cast<size_t>(-1); // std::numeric_limits<size_t>::max();
@ -102,9 +107,6 @@ namespace atn {
bool epsilonOnlyTransitions = false;
public:
/// Used to cache lookahead during parsing, not used during construction.
misc::IntervalSet nextTokenWithinRule;
virtual size_t hashCode();
bool operator == (const ATNState &other);
@ -117,6 +119,14 @@ namespace atn {
virtual void addTransition(size_t index, Transition *e);
virtual Transition* removeTransition(size_t index);
virtual size_t getStateType() = 0;
private:
/// Used to cache lookahead during parsing, not used during construction.
misc::IntervalSet _nextTokenWithinRule;
std::atomic<bool> _nextTokenUpdated { false };
friend class ATN;
};
} // namespace atn

View File

@ -181,23 +181,21 @@ size_t LexerATNSimulator::execATN(CharStream *input, dfa::DFAState *ds0) {
}
dfa::DFAState *LexerATNSimulator::getExistingTargetState(dfa::DFAState *s, size_t t) {
if (s->edges.empty()|| /*t < MIN_DFA_EDGE ||*/ t > MAX_DFA_EDGE) { // MIN_DFA_EDGE is 0, hence code gives a warning, if left in.
return nullptr;
}
dfa::DFAState* retval = nullptr;
_edgeLock.readLock();
if (t <= MAX_DFA_EDGE) {
auto iterator = s->edges.find(t - MIN_DFA_EDGE);
#if DEBUG_ATN == 1
if (iterator != s->edges.end()) {
std::cout << std::string("reuse state ") << s->stateNumber << std::string(" edge to ") << iterator->second->stateNumber << std::endl;
}
#endif
if (iterator != s->edges.end())
retval = iterator->second;
}
_edgeLock.readUnlock();
if (iterator == s->edges.end())
return nullptr;
return iterator->second;
return retval;
}
dfa::DFAState *LexerATNSimulator::computeTargetState(CharStream *input, dfa::DFAState *s, size_t t) {

View File

@ -86,6 +86,7 @@ namespace atn {
LexerATNSimulator(const ATN &atn, std::vector<dfa::DFA> &decisionToDFA, PredictionContextCache &sharedContextCache);
LexerATNSimulator(Lexer *recog, const ATN &atn, std::vector<dfa::DFA> &decisionToDFA, PredictionContextCache &sharedContextCache);
virtual ~LexerATNSimulator () {}
virtual void copyState(LexerATNSimulator *simulator);
virtual size_t match(CharStream *input, size_t mode);

View File

@ -101,7 +101,7 @@ size_t LexerActionExecutor::generateHashCode() const {
for (auto lexerAction : _lexerActions) {
hash = MurmurHash::update(hash, lexerAction);
}
MurmurHash::finish(hash, _lexerActions.size());
hash = MurmurHash::finish(hash, _lexerActions.size());
return hash;
}

View File

@ -257,14 +257,12 @@ size_t ParserATNSimulator::execATN(dfa::DFA &dfa, dfa::DFAState *s0, TokenStream
}
dfa::DFAState *ParserATNSimulator::getExistingTargetState(dfa::DFAState *previousD, size_t t) {
dfa::DFAState* retval;
_edgeLock.readLock();
auto iterator = previousD->edges.find(t);
retval = (iterator == previousD->edges.end()) ? nullptr : iterator->second;
_edgeLock.readUnlock();
if (iterator == previousD->edges.end()) {
return nullptr;
}
return iterator->second;
return retval;
}
dfa::DFAState *ParserATNSimulator::computeTargetState(dfa::DFA &dfa, dfa::DFAState *previousD, size_t t) {

View File

@ -7,8 +7,6 @@
using namespace antlr4::misc;
Interval::~Interval() = default;
size_t antlr4::misc::numericToSymbol(ssize_t v) {
return static_cast<size_t>(v);
}

View File

@ -28,57 +28,54 @@ namespace misc {
Interval();
explicit Interval(size_t a_, size_t b_); // For unsigned -> signed mappings.
Interval(ssize_t a_, ssize_t b_);
Interval(Interval const&) = default;
virtual ~Interval();
Interval& operator=(Interval const&) = default;
/// return number of elements between a and b inclusively. x..x is length 1.
/// if b < a, then length is 0. 9..10 has length 2.
virtual size_t length() const;
size_t length() const;
bool operator == (const Interval &other) const;
virtual size_t hashCode() const;
size_t hashCode() const;
/// <summary>
/// Does this start completely before other? Disjoint </summary>
virtual bool startsBeforeDisjoint(const Interval &other) const;
bool startsBeforeDisjoint(const Interval &other) const;
/// <summary>
/// Does this start at or before other? Nondisjoint </summary>
virtual bool startsBeforeNonDisjoint(const Interval &other) const;
bool startsBeforeNonDisjoint(const Interval &other) const;
/// <summary>
/// Does this.a start after other.b? May or may not be disjoint </summary>
virtual bool startsAfter(const Interval &other) const;
bool startsAfter(const Interval &other) const;
/// <summary>
/// Does this start completely after other? Disjoint </summary>
virtual bool startsAfterDisjoint(const Interval &other) const;
bool startsAfterDisjoint(const Interval &other) const;
/// <summary>
/// Does this start after other? NonDisjoint </summary>
virtual bool startsAfterNonDisjoint(const Interval &other) const;
bool startsAfterNonDisjoint(const Interval &other) const;
/// <summary>
/// Are both ranges disjoint? I.e., no overlap? </summary>
virtual bool disjoint(const Interval &other) const;
bool disjoint(const Interval &other) const;
/// <summary>
/// Are two intervals adjacent such as 0..41 and 42..42? </summary>
virtual bool adjacent(const Interval &other) const;
bool adjacent(const Interval &other) const;
virtual bool properlyContains(const Interval &other) const;
bool properlyContains(const Interval &other) const;
/// <summary>
/// Return the interval computed from combining this and other </summary>
virtual Interval Union(const Interval &other) const;
Interval Union(const Interval &other) const;
/// <summary>
/// Return the interval in common between this and o </summary>
virtual Interval intersection(const Interval &other) const;
Interval intersection(const Interval &other) const;
virtual std::string toString() const;
std::string toString() const;
private:
};

View File

@ -13,52 +13,32 @@
using namespace antlr4;
using namespace antlr4::misc;
IntervalSet const IntervalSet::COMPLETE_CHAR_SET = []() {
IntervalSet complete = IntervalSet::of(Lexer::MIN_CHAR_VALUE, Lexer::MAX_CHAR_VALUE);
complete.setReadOnly(true);
return complete;
}();
IntervalSet const IntervalSet::COMPLETE_CHAR_SET =
IntervalSet::of(Lexer::MIN_CHAR_VALUE, Lexer::MAX_CHAR_VALUE);
IntervalSet const IntervalSet::EMPTY_SET = []() {
IntervalSet empty;
empty.setReadOnly(true);
return empty;
}();
IntervalSet const IntervalSet::EMPTY_SET;
IntervalSet::IntervalSet() {
InitializeInstanceFields();
}
IntervalSet::IntervalSet(const std::vector<Interval> &intervals) : IntervalSet() {
_intervals = intervals;
IntervalSet::IntervalSet() : _intervals() {
}
IntervalSet::IntervalSet(const IntervalSet &set) : IntervalSet() {
addAll(set);
_intervals = set._intervals;
}
IntervalSet::IntervalSet(int n, ...) : IntervalSet() {
va_list vlist;
va_start(vlist, n);
for (int i = 0; i < n; i++) {
add(va_arg(vlist, int));
}
IntervalSet::IntervalSet(IntervalSet&& set) : IntervalSet(std::move(set._intervals)) {
}
IntervalSet::~IntervalSet()
{
IntervalSet::IntervalSet(std::vector<Interval>&& intervals) : _intervals(std::move(intervals)) {
}
IntervalSet& IntervalSet::operator=(const IntervalSet& other)
{
if (_readonly) {
throw IllegalStateException("can't alter read only IntervalSet");
IntervalSet& IntervalSet::operator=(const IntervalSet& other) {
_intervals = other._intervals;
return *this;
}
_intervals.clear();
return addAll(other);
IntervalSet& IntervalSet::operator=(IntervalSet&& other) {
_intervals = move(other._intervals);
return *this;
}
IntervalSet IntervalSet::of(ssize_t a) {
@ -70,16 +50,10 @@ IntervalSet IntervalSet::of(ssize_t a, ssize_t b) {
}
void IntervalSet::clear() {
if (_readonly) {
throw IllegalStateException("can't alter read only IntervalSet");
}
_intervals.clear();
}
void IntervalSet::add(ssize_t el) {
if (_readonly) {
throw IllegalStateException("can't alter read only IntervalSet");
}
add(el, el);
}
@ -88,10 +62,6 @@ void IntervalSet::add(ssize_t a, ssize_t b) {
}
void IntervalSet::add(const Interval &addition) {
if (_readonly) {
throw IllegalStateException("can't alter read only IntervalSet");
}
if (addition.b < addition.a) {
return;
}
@ -150,7 +120,7 @@ IntervalSet IntervalSet::Or(const std::vector<IntervalSet> &sets) {
IntervalSet& IntervalSet::addAll(const IntervalSet &set) {
// walk set and add each interval
for (auto &interval : set._intervals) {
for (auto const& interval : set._intervals) {
add(interval);
}
return *this;
@ -339,7 +309,7 @@ ssize_t IntervalSet::getMinElement() const {
return _intervals[0].a;
}
std::vector<Interval> IntervalSet::getIntervals() const {
std::vector<Interval> const& IntervalSet::getIntervals() const {
return _intervals;
}
@ -516,10 +486,6 @@ void IntervalSet::remove(size_t el) {
}
void IntervalSet::remove(ssize_t el) {
if (_readonly) {
throw IllegalStateException("can't alter read only IntervalSet");
}
for (size_t i = 0; i < _intervals.size(); ++i) {
Interval &interval = _intervals[i];
ssize_t a = interval.a;
@ -553,17 +519,3 @@ void IntervalSet::remove(ssize_t el) {
}
}
}
bool IntervalSet::isReadOnly() const {
return _readonly;
}
void IntervalSet::setReadOnly(bool readonly) {
if (_readonly && !readonly)
throw IllegalStateException("Can't alter readonly IntervalSet");
_readonly = readonly;
}
void IntervalSet::InitializeInstanceFields() {
_readonly = false;
}

View File

@ -6,7 +6,7 @@
#pragma once
#include "misc/Interval.h"
#include <atomic>
#include "Exceptions.h"
namespace antlr4 {
namespace misc {
@ -28,20 +28,27 @@ namespace misc {
static IntervalSet const COMPLETE_CHAR_SET;
static IntervalSet const EMPTY_SET;
protected:
private:
/// The list of sorted, disjoint intervals.
std::vector<Interval> _intervals;
std::atomic<bool> _readonly;
explicit IntervalSet(std::vector<Interval>&& intervals);
public:
IntervalSet();
IntervalSet(const std::vector<Interval> &intervals);
IntervalSet(const IntervalSet &set);
IntervalSet(int numArgs, ...);
IntervalSet(IntervalSet const& set);
IntervalSet(IntervalSet&& set);
virtual ~IntervalSet();
template<typename T1, typename... T_NEXT>
IntervalSet(int, T1 t1, T_NEXT&&... next) : IntervalSet()
{
// The first int argument is an ignored count for compatibility
// with the previous varargs based interface.
addItems(t1, std::forward<T_NEXT>(next)...);
}
IntervalSet& operator=(const IntervalSet &set);
IntervalSet& operator=(IntervalSet const& set);
IntervalSet& operator=(IntervalSet&& set);
/// Create a set with a single element, el.
static IntervalSet of(ssize_t a);
@ -49,11 +56,11 @@ namespace misc {
/// Create a set with all ints within range [a..b] (inclusive)
static IntervalSet of(ssize_t a, ssize_t b);
virtual void clear();
void clear();
/// Add a single element to the set. An isolated element is stored
/// as a range el..el.
virtual void add(ssize_t el);
void add(ssize_t el);
/// Add interval; i.e., add all integers from a to b to set.
/// If b<a, do nothing.
@ -61,30 +68,36 @@ namespace misc {
/// If overlap, combine ranges. For example,
/// If this is {1..5, 10..20}, adding 6..7 yields
/// {1..5, 6..7, 10..20}. Adding 4..8 yields {1..8, 10..20}.
virtual void add(ssize_t a, ssize_t b);
void add(ssize_t a, ssize_t b);
public:
/// combine all sets in the array returned the or'd value
static IntervalSet Or(const std::vector<IntervalSet> &sets);
// Copy on write so we can cache a..a intervals and sets of that.
virtual void add(const Interval &addition);
virtual IntervalSet& addAll(const IntervalSet &set);
void add(const Interval &addition);
IntervalSet& addAll(const IntervalSet &set);
virtual IntervalSet complement(ssize_t minElement, ssize_t maxElement) const;
template<typename T1, typename... T_NEXT>
void addItems(T1 t1, T_NEXT&&... next)
{
add(t1);
addItems(std::forward<T_NEXT>(next)...);
}
IntervalSet complement(ssize_t minElement, ssize_t maxElement) const;
/// Given the set of possible values (rather than, say UNICODE or MAXINT),
/// return a new set containing all elements in vocabulary, but not in
/// this. The computation is (vocabulary - this).
///
/// 'this' is assumed to be either a subset or equal to vocabulary.
virtual IntervalSet complement(const IntervalSet &vocabulary) const;
IntervalSet complement(const IntervalSet &vocabulary) const;
/// Compute this-other via this&~other.
/// Return a new set containing all elements in this but not in other.
/// other is assumed to be a subset of this;
/// anything that is in other but not in this will be ignored.
virtual IntervalSet subtract(const IntervalSet &other) const;
IntervalSet subtract(const IntervalSet &other) const;
/**
* Compute the set difference between two interval sets. The specific
@ -93,23 +106,23 @@ namespace misc {
*/
static IntervalSet subtract(const IntervalSet &left, const IntervalSet &right);
virtual IntervalSet Or(const IntervalSet &a) const;
IntervalSet Or(const IntervalSet &a) const;
/// Return a new set with the intersection of this set with other. Because
/// the intervals are sorted, we can use an iterator for each list and
/// just walk them together. This is roughly O(min(n,m)) for interval
/// list lengths n and m.
virtual IntervalSet And(const IntervalSet &other) const;
IntervalSet And(const IntervalSet &other) const;
/// Is el in any range of this set?
virtual bool contains(size_t el) const; // For mapping of e.g. Token::EOF to -1 etc.
virtual bool contains(ssize_t el) const;
bool contains(size_t el) const; // For mapping of e.g. Token::EOF to -1 etc.
bool contains(ssize_t el) const;
/// return true if this set has no members
virtual bool isEmpty() const;
bool isEmpty() const;
/// If this set is a single integer, return it otherwise Token.INVALID_TYPE.
virtual ssize_t getSingleElement() const;
ssize_t getSingleElement() const;
/**
* Returns the maximum value contained in the set.
@ -117,7 +130,7 @@ namespace misc {
* @return the maximum value contained in the set. If the set is empty, this
* method returns {@link Token#INVALID_TYPE}.
*/
virtual ssize_t getMaxElement() const;
ssize_t getMaxElement() const;
/**
* Returns the minimum value contained in the set.
@ -125,50 +138,48 @@ namespace misc {
* @return the minimum value contained in the set. If the set is empty, this
* method returns {@link Token#INVALID_TYPE}.
*/
virtual ssize_t getMinElement() const;
ssize_t getMinElement() const;
/// <summary>
/// Return a list of Interval objects. </summary>
virtual std::vector<Interval> getIntervals() const;
std::vector<Interval> const& getIntervals() const;
virtual size_t hashCode() const;
size_t hashCode() const;
/// Are two IntervalSets equal? Because all intervals are sorted
/// and disjoint, equals is a simple linear walk over both lists
/// to make sure they are the same.
bool operator == (const IntervalSet &other) const;
virtual std::string toString() const;
virtual std::string toString(bool elemAreChar) const;
std::string toString() const;
std::string toString(bool elemAreChar) const;
/**
* @deprecated Use {@link #toString(Vocabulary)} instead.
*/
virtual std::string toString(const std::vector<std::string> &tokenNames) const;
virtual std::string toString(const dfa::Vocabulary &vocabulary) const;
std::string toString(const std::vector<std::string> &tokenNames) const;
std::string toString(const dfa::Vocabulary &vocabulary) const;
protected:
/**
* @deprecated Use {@link #elementName(Vocabulary, int)} instead.
*/
virtual std::string elementName(const std::vector<std::string> &tokenNames, ssize_t a) const;
virtual std::string elementName(const dfa::Vocabulary &vocabulary, ssize_t a) const;
std::string elementName(const std::vector<std::string> &tokenNames, ssize_t a) const;
std::string elementName(const dfa::Vocabulary &vocabulary, ssize_t a) const;
public:
virtual size_t size() const;
virtual std::vector<ssize_t> toList() const;
virtual std::set<ssize_t> toSet() const;
size_t size() const;
std::vector<ssize_t> toList() const;
std::set<ssize_t> toSet() const;
/// Get the ith element of ordered set. Used only by RandomPhrase so
/// don't bother to implement if you're not doing that for a new
/// ANTLR code gen target.
virtual ssize_t get(size_t i) const;
virtual void remove(size_t el); // For mapping of e.g. Token::EOF to -1 etc.
virtual void remove(ssize_t el);
virtual bool isReadOnly() const;
virtual void setReadOnly(bool readonly);
ssize_t get(size_t i) const;
void remove(size_t el); // For mapping of e.g. Token::EOF to -1 etc.
void remove(ssize_t el);
private:
void InitializeInstanceFields();
void addItems() { /* No-op */ }
};
} // namespace atn

View File

@ -353,12 +353,11 @@ func PrintArrayJavaStyle(sa []string) string {
return buffer.String()
}
// murmur hash
const (
c1_32 = 0xCC9E2D51
c2_32 = 0x1B873593
n1_32 = 0xE6546B64
c1_32 uint = 0xCC9E2D51
c2_32 uint = 0x1B873593
n1_32 uint = 0xE6546B64
)
func murmurInit(seed int) int {
@ -366,23 +365,25 @@ func murmurInit(seed int) int {
}
func murmurUpdate(h1 int, k1 int) int {
k1 *= c1_32
k1 = (k1 << 15) | (k1 >> 17) // rotl32(k1, 15)
k1 *= c2_32
var k1u uint
k1u = uint(k1) * c1_32
k1u = (k1u << 15) | (k1u >> 17) // rotl32(k1u, 15)
k1u *= c2_32
h1 ^= k1
h1 = (h1 << 13) | (h1 >> 19) // rotl32(h1, 13)
h1 = h1*5 + 0xe6546b64
return h1
var h1u = uint(h1) ^ k1u
h1u = (h1u << 13) | (h1u >> 19) // rotl32(h1u, 13)
h1u = h1u*5 + 0xe6546b64
return int(h1u)
}
func murmurFinish(h1 int, numberOfWords int) int {
h1 ^= (numberOfWords * 4)
h1 ^= h1 >> 16
h1 *= 0x85ebca6b
h1 ^= h1 >> 13
h1 *= 0xc2b2ae35
h1 ^= h1 >> 16
var h1u uint = uint(h1)
h1u ^= uint(numberOfWords * 4)
h1u ^= h1u >> 16
h1u *= uint(0x85ebca6b)
h1u ^= h1u >> 13
h1u *= 0xc2b2ae35
h1u ^= h1u >> 16
return h1
return int(h1u)
}

View File

@ -270,7 +270,7 @@ public class ParserATNSimulator extends ATNSimulator {
public static final boolean retry_debug = false;
/** Just in case this optimization is bad, add an ENV variable to turn it off */
public static final boolean TURN_OFF_LR_LOOP_ENTRY_BRANCH_OPT = Boolean.parseBoolean(System.getenv("TURN_OFF_LR_LOOP_ENTRY_BRANCH_OPT"));
public static final boolean TURN_OFF_LR_LOOP_ENTRY_BRANCH_OPT = Boolean.parseBoolean(getSafeEnv("TURN_OFF_LR_LOOP_ENTRY_BRANCH_OPT"));
protected final Parser parser;
@ -2181,4 +2181,14 @@ public class ParserATNSimulator extends ATNSimulator {
public Parser getParser() {
return parser;
}
public static String getSafeEnv(String envName) {
try {
return System.getenv(envName);
}
catch(SecurityException e) {
// use the default value
}
return null;
}
}

View File

@ -93,7 +93,7 @@ public protocol ANTLRErrorListener: class {
_ stopIndex: Int,
_ exact: Bool,
_ ambigAlts: BitSet,
_ configs: ATNConfigSet) throws
_ configs: ATNConfigSet)
///
/// This method is called when an SLL conflict occurs and the parser is about
@ -123,7 +123,7 @@ public protocol ANTLRErrorListener: class {
_ startIndex: Int,
_ stopIndex: Int,
_ conflictingAlts: BitSet?,
_ configs: ATNConfigSet) throws
_ configs: ATNConfigSet)
///
/// This method is called by the parser when a full-context prediction has a
@ -168,5 +168,5 @@ public protocol ANTLRErrorListener: class {
_ startIndex: Int,
_ stopIndex: Int,
_ prediction: Int,
_ configs: ATNConfigSet) throws
_ configs: ATNConfigSet)
}

View File

@ -63,7 +63,7 @@ public protocol ANTLRErrorStrategy {
/// - throws: _RecognitionException_ if the error strategy could not recover from
/// the recognition exception
///
func recover(_ recognizer: Parser, _ e: AnyObject) throws
func recover(_ recognizer: Parser, _ e: RecognitionException) throws
///
/// This method provides the error handler with an opportunity to handle
@ -115,5 +115,5 @@ public protocol ANTLRErrorStrategy {
/// - parameter recognizer: the parser instance
/// - parameter e: the recognition exception to report
///
func reportError(_ recognizer: Parser, _ e: AnyObject)
func reportError(_ recognizer: Parser, _ e: RecognitionException)
}

View File

@ -40,8 +40,8 @@ public class BailErrorStrategy: DefaultErrorStrategy {
/// rule function catches. Use _Exception#getCause()_ to get the
/// original _org.antlr.v4.runtime.RecognitionException_.
///
override public func recover(_ recognizer: Parser, _ e: AnyObject) throws {
var context: ParserRuleContext? = recognizer.getContext()
override public func recover(_ recognizer: Parser, _ e: RecognitionException) throws {
var context = recognizer.getContext()
while let contextWrap = context {
contextWrap.exception = e
context = (contextWrap.getParent() as? ParserRuleContext)
@ -56,15 +56,14 @@ public class BailErrorStrategy: DefaultErrorStrategy {
///
override
public func recoverInline(_ recognizer: Parser) throws -> Token {
let e: InputMismatchException = try InputMismatchException(recognizer)
var context: ParserRuleContext? = recognizer.getContext()
let e = InputMismatchException(recognizer)
var context = recognizer.getContext()
while let contextWrap = context {
contextWrap.exception = e
context = (contextWrap.getParent() as? ParserRuleContext)
}
throw ANTLRException.recognition(e: e)
}
///

View File

@ -33,7 +33,7 @@ open class BaseErrorListener: ANTLRErrorListener {
_ stopIndex: Int,
_ exact: Bool,
_ ambigAlts: BitSet,
_ configs: ATNConfigSet) throws {
_ configs: ATNConfigSet) {
}
@ -42,7 +42,7 @@ open class BaseErrorListener: ANTLRErrorListener {
_ startIndex: Int,
_ stopIndex: Int,
_ conflictingAlts: BitSet?,
_ configs: ATNConfigSet) throws {
_ configs: ATNConfigSet) {
}
@ -51,6 +51,6 @@ open class BaseErrorListener: ANTLRErrorListener {
_ startIndex: Int,
_ stopIndex: Int,
_ prediction: Int,
_ configs: ATNConfigSet) throws {
_ configs: ATNConfigSet) {
}
}

View File

@ -176,7 +176,7 @@ public class BufferedTokenStream: TokenStream {
let index = tokens.count - 1
throw ANTLRError.indexOutOfBounds(msg: "token index \(i) out of range 0..\(index)")
}
return tokens[i] //tokens[i]
return tokens[i]
}
///
@ -202,7 +202,6 @@ public class BufferedTokenStream: TokenStream {
return subset
}
//TODO: LT(i)!.getType();
public func LA(_ i: Int) throws -> Int {
return try LT(i)!.getType()
}
@ -273,11 +272,11 @@ public class BufferedTokenStream: TokenStream {
fetchedEOF = false
}
public func getTokens() -> Array<Token> {
public func getTokens() -> [Token] {
return tokens
}
public func getTokens(_ start: Int, _ stop: Int) throws -> Array<Token>? {
public func getTokens(_ start: Int, _ stop: Int) throws -> [Token]? {
return try getTokens(start, stop, nil)
}
@ -286,39 +285,35 @@ public class BufferedTokenStream: TokenStream {
/// the token type BitSet. Return null if no tokens were found. This
/// method looks at both on and off channel tokens.
///
public func getTokens(_ start: Int, _ stop: Int, _ types: Set<Int>?) throws -> Array<Token>? {
public func getTokens(_ start: Int, _ stop: Int, _ types: Set<Int>?) throws -> [Token]? {
try lazyInit()
if start < 0 || stop >= tokens.count ||
stop < 0 || start >= tokens.count {
throw ANTLRError.indexOutOfBounds(msg: "start \(start) or stop \(stop) not in 0..\(tokens.count - 1)")
if start < 0 || start >= tokens.count ||
stop < 0 || stop >= tokens.count {
throw ANTLRError.indexOutOfBounds(msg: "start \(start) or stop \(stop) not in 0...\(tokens.count - 1)")
}
if start > stop {
return nil
}
var filteredTokens: Array<Token> = Array<Token>()
var filteredTokens = [Token]()
for i in start...stop {
let t: Token = tokens[i]
let t = tokens[i]
if let types = types, !types.contains(t.getType()) {
}else {
}
else {
filteredTokens.append(t)
}
}
if filteredTokens.isEmpty {
return nil
//filteredTokens = nil;
}
return filteredTokens
}
public func getTokens(_ start: Int, _ stop: Int, _ ttype: Int) throws -> Array<Token>? {
//TODO Set<Int> initialCapacity
var s: Set<Int> = Set<Int>()
public func getTokens(_ start: Int, _ stop: Int, _ ttype: Int) throws -> [Token]? {
var s = Set<Int>()
s.insert(ttype)
//s.append(ttype);
return try getTokens(start, stop, s)
}
@ -464,7 +459,7 @@ public class BufferedTokenStream: TokenStream {
}
}
}
if hidden.count == 0 {
if hidden.isEmpty {
return nil
}
return hidden

View File

@ -25,5 +25,5 @@ public protocol CharStream: IntStream {
/// - throws: _ANTLRError.unsupportedOperation_ if the stream does not support
/// getting the text of the specified interval
///
func getText(_ interval: Interval) -> String
func getText(_ interval: Interval) throws -> String
}

View File

@ -7,12 +7,6 @@
public class CommonToken: WritableToken {
///
/// An empty _org.antlr.v4.runtime.misc.Pair_ which is used as the default value of
/// _#source_ for tokens that do not have a source.
///
internal static let EMPTY_SOURCE: (TokenSource?, CharStream?) = (nil, nil)
///
/// This is the backing field for _#getType_ and _#setType_.
///
@ -21,20 +15,20 @@ public class CommonToken: WritableToken {
///
/// This is the backing field for _#getLine_ and _#setLine_.
///
internal var line: Int = 0
internal var line = 0
///
/// This is the backing field for _#getCharPositionInLine_ and
/// _#setCharPositionInLine_.
///
internal var charPositionInLine: Int = -1
internal var charPositionInLine = -1
// set to invalid position
///
/// This is the backing field for _#getChannel_ and
/// _#setChannel_.
///
internal var channel: Int = DEFAULT_CHANNEL
internal var channel = DEFAULT_CHANNEL
///
/// This is the backing field for _#getTokenSource_ and
@ -47,7 +41,7 @@ public class CommonToken: WritableToken {
/// _org.antlr.v4.runtime.misc.Pair_ containing these values.
///
internal var source: (TokenSource?, CharStream?)
internal let source: TokenSourceAndStream
///
/// This is the backing field for _#getText_ when the token text is
@ -61,19 +55,19 @@ public class CommonToken: WritableToken {
/// This is the backing field for _#getTokenIndex_ and
/// _#setTokenIndex_.
///
internal var index: Int = -1
internal var index = -1
///
/// This is the backing field for _#getStartIndex_ and
/// _#setStartIndex_.
///
internal var start: Int = 0
internal var start = 0
///
/// This is the backing field for _#getStopIndex_ and
/// _#setStopIndex_.
///
internal var stop: Int = 0
internal var stop = 0
///
/// Constructs a new _org.antlr.v4.runtime.CommonToken_ with the specified token type.
@ -85,16 +79,16 @@ public class CommonToken: WritableToken {
public init(_ type: Int) {
self.type = type
self.source = CommonToken.EMPTY_SOURCE
self.source = TokenSourceAndStream.EMPTY
}
public init(_ source: (TokenSource?, CharStream?), _ type: Int, _ channel: Int, _ start: Int, _ stop: Int) {
public init(_ source: TokenSourceAndStream, _ type: Int, _ channel: Int, _ start: Int, _ stop: Int) {
self.source = source
self.type = type
self.channel = channel
self.start = start
self.stop = stop
if let tsource = source.0 {
if let tsource = source.tokenSource {
self.line = tsource.getLine()
self.charPositionInLine = tsource.getCharPositionInLine()
}
@ -111,20 +105,12 @@ public class CommonToken: WritableToken {
self.type = type
self.channel = CommonToken.DEFAULT_CHANNEL
self.text = text
self.source = CommonToken.EMPTY_SOURCE
self.source = TokenSourceAndStream.EMPTY
}
///
/// Constructs a new _org.antlr.v4.runtime.CommonToken_ as a copy of another _org.antlr.v4.runtime.Token_.
///
///
/// If `oldToken` is also a _org.antlr.v4.runtime.CommonToken_ instance, the newly
/// constructed token will share a reference to the _#text_ field and
/// the _org.antlr.v4.runtime.misc.Pair_ stored in _#source_. Otherwise, _#text_ will
/// be assigned the result of calling _#getText_, and _#source_
/// will be constructed from the result of _org.antlr.v4.runtime.Token#getTokenSource_ and
/// _org.antlr.v4.runtime.Token#getInputStream_.
///
/// - parameter oldToken: The token to copy.
///
public init(_ oldToken: Token) {
@ -135,14 +121,8 @@ public class CommonToken: WritableToken {
channel = oldToken.getChannel()
start = oldToken.getStartIndex()
stop = oldToken.getStopIndex()
if oldToken is CommonToken {
text = (oldToken as! CommonToken).text
source = (oldToken as! CommonToken).source
} else {
text = oldToken.getText()
source = (oldToken.getTokenSource(), oldToken.getInputStream())
}
source = oldToken.getTokenSourceAndStream()
}
@ -157,14 +137,19 @@ public class CommonToken: WritableToken {
public func getText() -> String? {
if text != nil {
return text!
if let text = text {
return text
}
if let input = getInputStream() {
let n: Int = input.size()
let n = input.size()
if start < n && stop < n {
return input.getText(Interval.of(start, stop))
do {
return try input.getText(Interval.of(start, stop))
}
catch {
return nil
}
} else {
return "<EOF>"
}
@ -247,12 +232,16 @@ public class CommonToken: WritableToken {
public func getTokenSource() -> TokenSource? {
return source.0
return source.tokenSource
}
public func getInputStream() -> CharStream? {
return source.1
return source.stream
}
public func getTokenSourceAndStream() -> TokenSourceAndStream {
return source
}
public var description: String {
@ -260,10 +249,8 @@ public class CommonToken: WritableToken {
}
public func toString(_ r: Recognizer<ATNSimulator>?) -> String {
var channelStr: String = ""
if channel > 0 {
channelStr = ",channel=\(channel)"
}
let channelStr = (channel > 0 ? ",channel=\(channel)" : "")
var txt: String
if let tokenText = getText() {
txt = tokenText.replacingOccurrences(of: "\n", with: "\\n")
@ -272,12 +259,16 @@ public class CommonToken: WritableToken {
} else {
txt = "<no text>"
}
var typeString = "\(type)"
let typeString: String
if let r = r {
typeString = r.getVocabulary().getDisplayName(type);
typeString = r.getVocabulary().getDisplayName(type)
}
else {
typeString = "\(type)"
}
return "[@\(getTokenIndex()),\(start):\(stop)='\(txt)',<\(typeString)>\(channelStr),\(line):\(getCharPositionInLine())]"
}
public var visited: Bool {
get {
return _visited

View File

@ -65,18 +65,17 @@ public class CommonTokenFactory: TokenFactory {
}
public func create(_ source: (TokenSource?, CharStream?), _ type: Int, _ text: String?,
public func create(_ source: TokenSourceAndStream, _ type: Int, _ text: String?,
_ channel: Int, _ start: Int, _ stop: Int,
_ line: Int, _ charPositionInLine: Int) -> Token {
let t: CommonToken = CommonToken(source, type, channel, start, stop)
let t = CommonToken(source, type, channel, start, stop)
t.setLine(line)
t.setCharPositionInLine(charPositionInLine)
if text != nil {
t.setText(text!)
} else {
if let cStream = source.1 , copyText {
t.setText(cStream.getText(Interval.of(start, stop)))
if let text = text {
t.setText(text)
}
else if let cStream = source.stream, copyText {
t.setText(try! cStream.getText(Interval.of(start, stop)))
}
return t

View File

@ -39,7 +39,7 @@ public class CommonTokenStream: BufferedTokenStream {
/// The default value is _org.antlr.v4.runtime.Token#DEFAULT_CHANNEL_, which matches the
/// default channel assigned to tokens created by the lexer.
///
internal var channel: Int = CommonToken.DEFAULT_CHANNEL
internal var channel = CommonToken.DEFAULT_CHANNEL
///
/// Constructs a new _org.antlr.v4.runtime.CommonTokenStream_ using the specified token
@ -77,8 +77,8 @@ public class CommonTokenStream: BufferedTokenStream {
return nil
}
var i: Int = p
var n: Int = 1
var i = p
var n = 1
// find k good tokens looking backwards
while n <= k {
// skip off-channel tokens
@ -101,8 +101,8 @@ public class CommonTokenStream: BufferedTokenStream {
if k < 0 {
return try LB(-k)
}
var i: Int = p
var n: Int = 1 // we know tokens[p] is a good one
var i = p
var n = 1 // we know tokens[p] is a good one
// find k good tokens
while n < k {
// skip off-channel tokens, but make sure to not look past EOF
@ -119,11 +119,11 @@ public class CommonTokenStream: BufferedTokenStream {
/// Count EOF just once.
///
public func getNumberOfOnChannelTokens() throws -> Int {
var n: Int = 0
var n = 0
try fill()
let length = tokens.count
for i in 0..<length {
let t: Token = tokens[i]
let t = tokens[i]
if t.getChannel() == channel {
n += 1
}

View File

@ -90,7 +90,7 @@ public class DefaultErrorStrategy: ANTLRErrorStrategy {
/// the exception
///
public func reportError(_ recognizer: Parser,
_ e: AnyObject) {
_ e: RecognitionException) {
// if we've already reported an error and have not matched a token
// yet successfully, don't report any errors.
if inErrorRecoveryMode(recognizer) {
@ -98,20 +98,18 @@ public class DefaultErrorStrategy: ANTLRErrorStrategy {
return // don't report spurious errors
}
beginErrorCondition(recognizer)
if (e is NoViableAltException) {
try! reportNoViableAlternative(recognizer, e as! NoViableAltException);
} else {
if (e is InputMismatchException) {
reportInputMismatch(recognizer, e as! InputMismatchException);
} else {
if (e is FailedPredicateException) {
reportFailedPredicate(recognizer, e as! FailedPredicateException);
} else {
errPrint("unknown recognition error type: " + String(describing: type(of: e)));
let re = (e as! RecognitionException<ParserATNSimulator>)
recognizer.notifyErrorListeners(re.getOffendingToken(), re.message ?? "", e);
if let nvae = e as? NoViableAltException {
reportNoViableAlternative(recognizer, nvae)
}
else if let ime = e as? InputMismatchException {
reportInputMismatch(recognizer, ime)
}
else if let fpe = e as? FailedPredicateException {
reportFailedPredicate(recognizer, fpe)
}
else {
errPrint("unknown recognition error type: " + String(describing: type(of: e)))
recognizer.notifyErrorListeners(e.getOffendingToken(), e.message ?? "", e)
}
}
@ -120,7 +118,7 @@ public class DefaultErrorStrategy: ANTLRErrorStrategy {
/// until we find one in the resynchronization set--loosely the set of tokens
/// that can follow the current rule.
///
public func recover(_ recognizer: Parser, _ e: AnyObject) throws {
public func recover(_ recognizer: Parser, _ e: RecognitionException) throws {
// print("recover in "+recognizer.getRuleInvocationStack()+
// " index="+getTokenStream(recognizer).index()+
// ", lastErrorIndex="+
@ -140,10 +138,10 @@ public class DefaultErrorStrategy: ANTLRErrorStrategy {
}
lastErrorIndex = getTokenStream(recognizer).index()
if lastErrorStates == nil {
lastErrorStates = try IntervalSet()
lastErrorStates = IntervalSet()
}
try lastErrorStates!.add(recognizer.getState())
let followSet: IntervalSet = try getErrorRecoverySet(recognizer)
let followSet = getErrorRecoverySet(recognizer)
try consumeUntil(recognizer, followSet)
}
@ -195,24 +193,19 @@ public class DefaultErrorStrategy: ANTLRErrorStrategy {
///
public func sync(_ recognizer: Parser) throws {
let s: ATNState = recognizer.getInterpreter().atn.states[recognizer.getState()]!
let s = recognizer.getInterpreter().atn.states[recognizer.getState()]!
// errPrint("sync @ "+s.stateNumber+"="+s.getClass().getSimpleName());
// If already recovering, don't try to sync
if inErrorRecoveryMode(recognizer) {
return
}
let tokens: TokenStream = getTokenStream(recognizer)
let la: Int = try tokens.LA(1)
let tokens = getTokenStream(recognizer)
let la = try tokens.LA(1)
// try cheaper subset first; might get lucky. seems to shave a wee bit off
//let set : IntervalSet = recognizer.getATN().nextTokens(s)
if try recognizer.getATN().nextTokens(s).contains(CommonToken.EPSILON) {
return
}
if try recognizer.getATN().nextTokens(s).contains(la) {
let nextToks = recognizer.getATN().nextTokens(s)
if nextToks.contains(CommonToken.EPSILON) || nextToks.contains(la) {
return
}
@ -225,15 +218,14 @@ public class DefaultErrorStrategy: ANTLRErrorStrategy {
if try singleTokenDeletion(recognizer) != nil {
return
}
throw try ANTLRException.recognition(e: InputMismatchException(recognizer))
throw ANTLRException.recognition(e: InputMismatchException(recognizer))
case ATNState.PLUS_LOOP_BACK: fallthrough
case ATNState.STAR_LOOP_BACK:
// errPrint("at loop back: "+s.getClass().getSimpleName());
try reportUnwantedToken(recognizer)
let expecting: IntervalSet = try recognizer.getExpectedTokens()
let whatFollowsLoopIterationOrRule: IntervalSet =
try expecting.or(try getErrorRecoverySet(recognizer)) as! IntervalSet
reportUnwantedToken(recognizer)
let expecting = try recognizer.getExpectedTokens()
let whatFollowsLoopIterationOrRule = expecting.or(getErrorRecoverySet(recognizer)) as! IntervalSet
try consumeUntil(recognizer, whatFollowsLoopIterationOrRule)
break
@ -253,19 +245,21 @@ public class DefaultErrorStrategy: ANTLRErrorStrategy {
/// - parameter e: the recognition exception
///
internal func reportNoViableAlternative(_ recognizer: Parser,
_ e: NoViableAltException) throws {
let tokens: TokenStream? = getTokenStream(recognizer)
_ e: NoViableAltException) {
let tokens = getTokenStream(recognizer)
var input: String
if let tokens = tokens {
if e.getStartToken().getType() == CommonToken.EOF {
input = "<EOF>"
} else {
}
else {
do {
input = try tokens.getText(e.getStartToken(), e.getOffendingToken())
}
} else {
input = "<unknown input>"
catch {
input = "<unknown>"
}
let msg: String = "no viable alternative at input " + escapeWSAndQuote(input)
}
let msg = "no viable alternative at input " + escapeWSAndQuote(input)
recognizer.notifyErrorListeners(e.getOffendingToken(), msg, e)
}
@ -280,7 +274,7 @@ public class DefaultErrorStrategy: ANTLRErrorStrategy {
///
internal func reportInputMismatch(_ recognizer: Parser,
_ e: InputMismatchException) {
let msg: String = "mismatched input " + getTokenErrorDisplay(e.getOffendingToken()) +
let msg = "mismatched input " + getTokenErrorDisplay(e.getOffendingToken()) +
" expecting " + e.getExpectedTokens()!.toString(recognizer.getVocabulary())
recognizer.notifyErrorListeners(e.getOffendingToken(), msg, e)
}
@ -296,8 +290,8 @@ public class DefaultErrorStrategy: ANTLRErrorStrategy {
///
internal func reportFailedPredicate(_ recognizer: Parser,
_ e: FailedPredicateException) {
let ruleName: String = recognizer.getRuleNames()[recognizer._ctx!.getRuleIndex()]
let msg: String = "rule " + ruleName + " " + e.message! // e.getMessage()
let ruleName = recognizer.getRuleNames()[recognizer._ctx!.getRuleIndex()]
let msg = "rule \(ruleName) \(e.message!)"
recognizer.notifyErrorListeners(e.getOffendingToken(), msg, e)
}
@ -319,18 +313,17 @@ public class DefaultErrorStrategy: ANTLRErrorStrategy {
///
/// - parameter recognizer: the parser instance
///
internal func reportUnwantedToken(_ recognizer: Parser) throws {
internal func reportUnwantedToken(_ recognizer: Parser) {
if inErrorRecoveryMode(recognizer) {
return
}
beginErrorCondition(recognizer)
let t: Token = try recognizer.getCurrentToken()
let tokenName: String = getTokenErrorDisplay(t)
let expecting: IntervalSet = try getExpectedTokens(recognizer)
let msg: String = "extraneous input " + tokenName + " expecting " +
expecting.toString(recognizer.getVocabulary())
let t = try? recognizer.getCurrentToken()
let tokenName = getTokenErrorDisplay(t)
let expecting = (try? getExpectedTokens(recognizer)) ?? IntervalSet.EMPTY_SET
let msg = "extraneous input \(tokenName) expecting \(expecting.toString(recognizer.getVocabulary()))"
recognizer.notifyErrorListeners(t, msg, nil)
}
@ -351,17 +344,16 @@ public class DefaultErrorStrategy: ANTLRErrorStrategy {
///
/// - parameter recognizer: the parser instance
///
internal func reportMissingToken(_ recognizer: Parser) throws {
internal func reportMissingToken(_ recognizer: Parser) {
if inErrorRecoveryMode(recognizer) {
return
}
beginErrorCondition(recognizer)
let t: Token = try recognizer.getCurrentToken()
let expecting: IntervalSet = try getExpectedTokens(recognizer)
let msg: String = "missing " + expecting.toString(recognizer.getVocabulary()) +
" at " + getTokenErrorDisplay(t)
let t = try? recognizer.getCurrentToken()
let expecting = (try? getExpectedTokens(recognizer)) ?? IntervalSet.EMPTY_SET
let msg = "missing \(expecting.toString(recognizer.getVocabulary())) at \(getTokenErrorDisplay(t))"
recognizer.notifyErrorListeners(t, msg, nil)
}
@ -419,23 +411,20 @@ public class DefaultErrorStrategy: ANTLRErrorStrategy {
public func recoverInline(_ recognizer: Parser) throws -> Token {
// SINGLE TOKEN DELETION
let matchedSymbol: Token? = try singleTokenDeletion(recognizer)
if matchedSymbol != nil {
let matchedSymbol = try singleTokenDeletion(recognizer)
if let matchedSymbol = matchedSymbol {
// we have deleted the extra token.
// now, move past ttype token as if all were ok
try recognizer.consume()
return matchedSymbol!
return matchedSymbol
}
// SINGLE TOKEN INSERTION
if try singleTokenInsertion(recognizer) {
return try getMissingSymbol(recognizer)
}
throw try ANTLRException.recognition(e: InputMismatchException(recognizer))
// throw try ANTLRException.InputMismatch(e: InputMismatchException(recognizer) )
//RuntimeException("InputMismatchException")
// even that didn't work; must throw the exception
//throwException() /* throw InputMismatchException(recognizer); */
throw ANTLRException.recognition(e: InputMismatchException(recognizer))
}
///
@ -456,17 +445,17 @@ public class DefaultErrorStrategy: ANTLRErrorStrategy {
/// strategy for the current mismatched input, otherwise `false`
///
internal func singleTokenInsertion(_ recognizer: Parser) throws -> Bool {
let currentSymbolType: Int = try getTokenStream(recognizer).LA(1)
let currentSymbolType = try getTokenStream(recognizer).LA(1)
// if current token is consistent with what could come after current
// ATN state, then we know we're missing a token; error recovery
// is free to conjure up and insert the missing token
let currentState: ATNState = recognizer.getInterpreter().atn.states[recognizer.getState()]!
let next: ATNState = currentState.transition(0).target
let atn: ATN = recognizer.getInterpreter().atn
let expectingAtLL2: IntervalSet = try atn.nextTokens(next, recognizer._ctx)
let currentState = recognizer.getInterpreter().atn.states[recognizer.getState()]!
let next = currentState.transition(0).target
let atn = recognizer.getInterpreter().atn
let expectingAtLL2 = atn.nextTokens(next, recognizer._ctx)
// print("LT(2) set="+expectingAtLL2.toString(recognizer.getTokenNames()));
if expectingAtLL2.contains(currentSymbolType) {
try reportMissingToken(recognizer)
reportMissingToken(recognizer)
return true
}
return false
@ -492,10 +481,10 @@ public class DefaultErrorStrategy: ANTLRErrorStrategy {
/// `null`
///
internal func singleTokenDeletion(_ recognizer: Parser) throws -> Token? {
let nextTokenType: Int = try getTokenStream(recognizer).LA(2)
let expecting: IntervalSet = try getExpectedTokens(recognizer)
let nextTokenType = try getTokenStream(recognizer).LA(2)
let expecting = try getExpectedTokens(recognizer)
if expecting.contains(nextTokenType) {
try reportUnwantedToken(recognizer)
reportUnwantedToken(recognizer)
///
/// errPrint("recoverFromMismatchedToken deleting "+
/// ((TokenStream)getTokenStream(recognizer)).LT(1)+
@ -504,7 +493,7 @@ public class DefaultErrorStrategy: ANTLRErrorStrategy {
///
try recognizer.consume() // simply delete extra token
// we want to return the token we're actually matching
let matchedSymbol: Token = try recognizer.getCurrentToken()
let matchedSymbol = try recognizer.getCurrentToken()
reportMatch(recognizer) // we know current token is correct
return matchedSymbol
}
@ -536,22 +525,24 @@ public class DefaultErrorStrategy: ANTLRErrorStrategy {
}
internal func getMissingSymbol(_ recognizer: Parser) throws -> Token {
let currentSymbol: Token = try recognizer.getCurrentToken()
let expecting: IntervalSet = try getExpectedTokens(recognizer)
let expectedTokenType: Int = expecting.getMinElement() // get any element
let currentSymbol = try recognizer.getCurrentToken()
let expecting = try getExpectedTokens(recognizer)
let expectedTokenType = expecting.getMinElement() // get any element
var tokenText: String
if expectedTokenType == CommonToken.EOF {
tokenText = "<missing EOF>"
} else {
tokenText = "<missing " + recognizer.getVocabulary().getDisplayName(expectedTokenType) + ">"
}
var current: Token = currentSymbol
let lookback: Token? = try getTokenStream(recognizer).LT(-1)
var current = currentSymbol
let lookback = try getTokenStream(recognizer).LT(-1)
if current.getType() == CommonToken.EOF && lookback != nil {
current = lookback!
}
let token = recognizer.getTokenFactory().create((current.getTokenSource(), current.getTokenSource()!.getInputStream()), expectedTokenType, tokenText,
let token = recognizer.getTokenFactory().create(
current.getTokenSourceAndStream(),
expectedTokenType, tokenText,
CommonToken.DEFAULT_CHANNEL,
-1, -1,
current.getLine(), current.getCharPositionInLine())
@ -574,22 +565,22 @@ public class DefaultErrorStrategy: ANTLRErrorStrategy {
/// so that it creates a new Java type.
///
internal func getTokenErrorDisplay(_ t: Token?) -> String {
if t == nil {
guard let t = t else {
return "<no token>"
}
var s: String? = getSymbolText(t!)
var s = getSymbolText(t)
if s == nil {
if getSymbolType(t!) == CommonToken.EOF {
if getSymbolType(t) == CommonToken.EOF {
s = "<EOF>"
} else {
s = "<\(getSymbolType(t!))>"
s = "<\(getSymbolType(t))>"
}
}
return escapeWSAndQuote(s!)
}
internal func getSymbolText(_ symbol: Token) -> String {
return symbol.getText()!
internal func getSymbolText(_ symbol: Token) -> String? {
return symbol.getText()
}
internal func getSymbolType(_ symbol: Token) -> Int {
@ -698,19 +689,19 @@ public class DefaultErrorStrategy: ANTLRErrorStrategy {
/// Like Grosch I implement context-sensitive FOLLOW sets that are combined
/// at run-time upon error to avoid overhead during parsing.
///
internal func getErrorRecoverySet(_ recognizer: Parser) throws -> IntervalSet {
let atn: ATN = recognizer.getInterpreter().atn
internal func getErrorRecoverySet(_ recognizer: Parser) -> IntervalSet {
let atn = recognizer.getInterpreter().atn
var ctx: RuleContext? = recognizer._ctx
let recoverSet: IntervalSet = try IntervalSet()
let recoverSet = IntervalSet()
while let ctxWrap = ctx, ctxWrap.invokingState >= 0 {
// compute what follows who invoked us
let invokingState: ATNState = atn.states[ctxWrap.invokingState]!
let rt: RuleTransition = invokingState.transition(0) as! RuleTransition
let follow: IntervalSet = try atn.nextTokens(rt.followState)
try recoverSet.addAll(follow)
let invokingState = atn.states[ctxWrap.invokingState]!
let rt = invokingState.transition(0) as! RuleTransition
let follow = atn.nextTokens(rt.followState)
try! recoverSet.addAll(follow)
ctx = ctxWrap.parent
}
try recoverSet.remove(CommonToken.EPSILON)
try! recoverSet.remove(CommonToken.EPSILON)
// print("recover set "+recoverSet.toString(recognizer.getTokenNames()));
return recoverSet
}
@ -720,10 +711,9 @@ public class DefaultErrorStrategy: ANTLRErrorStrategy {
///
internal func consumeUntil(_ recognizer: Parser, _ set: IntervalSet) throws {
// errPrint("consumeUntil("+set.toString(recognizer.getTokenNames())+")");
var ttype: Int = try getTokenStream(recognizer).LA(1)
var ttype = try getTokenStream(recognizer).LA(1)
while ttype != CommonToken.EOF && !set.contains(ttype) {
//print("consume during recover LA(1)="+getTokenNames()[input.LA(1)]);
// getTokenStream(recognizer).consume();
try recognizer.consume()
ttype = try getTokenStream(recognizer).LA(1)
}

View File

@ -59,16 +59,16 @@ public class DiagnosticErrorListener: BaseErrorListener {
_ stopIndex: Int,
_ exact: Bool,
_ ambigAlts: BitSet,
_ configs: ATNConfigSet) throws {
_ configs: ATNConfigSet) {
if exactOnly && !exact {
return
}
let decision = getDecisionDescription(recognizer, dfa)
let conflictingAlts = try getConflictingAlts(ambigAlts, configs)
let text = try recognizer.getTokenStream()!.getText(Interval.of(startIndex, stopIndex))
let conflictingAlts = getConflictingAlts(ambigAlts, configs)
let text = getTextInInterval(recognizer, startIndex, stopIndex)
let message = "reportAmbiguity d=\(decision): ambigAlts=\(conflictingAlts), input='\(text)'"
try recognizer.notifyErrorListeners(message)
recognizer.notifyErrorListeners(message)
}
override
@ -77,11 +77,11 @@ public class DiagnosticErrorListener: BaseErrorListener {
_ startIndex: Int,
_ stopIndex: Int,
_ conflictingAlts: BitSet?,
_ configs: ATNConfigSet) throws {
_ configs: ATNConfigSet) {
let decision = getDecisionDescription(recognizer, dfa)
let text = try recognizer.getTokenStream()!.getText(Interval.of(startIndex, stopIndex))
let text = getTextInInterval(recognizer, startIndex, stopIndex)
let message = "reportAttemptingFullContext d=\(decision), input='\(text)'"
try recognizer.notifyErrorListeners(message)
recognizer.notifyErrorListeners(message)
}
override
@ -90,11 +90,11 @@ public class DiagnosticErrorListener: BaseErrorListener {
_ startIndex: Int,
_ stopIndex: Int,
_ prediction: Int,
_ configs: ATNConfigSet) throws {
_ configs: ATNConfigSet) {
let decision = getDecisionDescription(recognizer, dfa)
let text = try recognizer.getTokenStream()!.getText(Interval.of(startIndex, stopIndex))
let text = getTextInInterval(recognizer, startIndex, stopIndex)
let message = "reportContextSensitivity d=\(decision), input='\(text)'"
try recognizer.notifyErrorListeners(message)
recognizer.notifyErrorListeners(message)
}
internal func getDecisionDescription(_ recognizer: Parser, _ dfa: DFA) -> String {
@ -125,12 +125,17 @@ public class DiagnosticErrorListener: BaseErrorListener {
/// - returns: Returns `reportedAlts` if it is not `null`, otherwise
/// returns the set of alternatives represented in `configs`.
///
internal func getConflictingAlts(_ reportedAlts: BitSet?, _ configs: ATNConfigSet) throws -> BitSet {
if reportedAlts != nil {
return reportedAlts!
internal func getConflictingAlts(_ reportedAlts: BitSet?, _ configs: ATNConfigSet) -> BitSet {
return reportedAlts ?? configs.getAltBitSet()
}
let result = try configs.getAltBitSet()
return result
}
fileprivate func getTextInInterval(_ recognizer: Parser, _ startIndex: Int, _ stopIndex: Int) -> String {
do {
return try recognizer.getTokenStream()?.getText(Interval.of(startIndex, stopIndex)) ?? "<unknown>"
}
catch {
return "<unknown>"
}
}

View File

@ -11,30 +11,18 @@
/// Disambiguating predicate evaluation occurs when we test a predicate during
/// prediction.
///
public class FailedPredicateException: RecognitionException<ParserATNSimulator> {
public class FailedPredicateException: RecognitionException {
private final var ruleIndex: Int
private final var predicateIndex: Int
private final var predicate: String?
public convenience init(_ recognizer: Parser) throws {
try self.init(recognizer, nil)
}
public init(_ recognizer: Parser, _ predicate: String? = nil, _ message: String? = nil) {
let s = recognizer.getInterpreter().atn.states[recognizer.getState()]!
public convenience init(_ recognizer: Parser, _ predicate: String?)throws {
try self.init(recognizer, predicate, nil)
}
public init(_ recognizer: Parser,
_ predicate: String?,
_ message: String?) throws
{
let s: ATNState = recognizer.getInterpreter().atn.states[recognizer.getState()]!
let trans: AbstractPredicateTransition = s.transition(0) as! AbstractPredicateTransition
if trans is PredicateTransition {
self.ruleIndex = (trans as! PredicateTransition).ruleIndex
self.predicateIndex = (trans as! PredicateTransition).predIndex
let trans = s.transition(0) as! AbstractPredicateTransition
if let predex = trans as? PredicateTransition {
self.ruleIndex = predex.ruleIndex
self.predicateIndex = predex.predIndex
}
else {
self.ruleIndex = 0
@ -43,9 +31,10 @@ public class FailedPredicateException: RecognitionException<ParserATNSimulator>
self.predicate = predicate
super.init(FailedPredicateException.formatMessage(predicate!, message), recognizer , recognizer.getInputStream()!, recognizer._ctx)
try self.setOffendingToken(recognizer.getCurrentToken())
super.init(recognizer, recognizer.getInputStream()!, recognizer._ctx, FailedPredicateException.formatMessage(predicate, message))
if let token = try? recognizer.getCurrentToken() {
setOffendingToken(token)
}
}
public func getRuleIndex() -> Int {
@ -56,17 +45,17 @@ public class FailedPredicateException: RecognitionException<ParserATNSimulator>
return predicateIndex
}
public func getPredicate() -> String? {
return predicate
}
private static func formatMessage(_ predicate: String, _ message: String?) -> String {
private static func formatMessage(_ predicate: String?, _ message: String?) -> String {
if message != nil {
return message!
}
return "failed predicate: {predicate}?" //String.format(Locale.getDefault(), "failed predicate: {%s}?", predicate);
let predstr = predicate ?? "<unknown>"
return "failed predicate: {\(predstr)}?"
}
}

View File

@ -10,9 +10,11 @@
/// when the current input does not match the expected token.
///
public class InputMismatchException: RecognitionException<ParserATNSimulator> {
public init(_ recognizer: Parser) throws {
public class InputMismatchException: RecognitionException {
public init(_ recognizer: Parser) {
super.init(recognizer, recognizer.getInputStream()!, recognizer._ctx)
self.setOffendingToken(try recognizer.getCurrentToken())
if let token = try? recognizer.getCurrentToken() {
setOffendingToken(token)
}
}
}

View File

@ -14,27 +14,24 @@
import Foundation
//public class Lexer : Recognizer<Int, LexerATNSimulator>
open class Lexer: Recognizer<LexerATNSimulator>, TokenSource {
public static let EOF = -1
public static let DEFAULT_MODE = 0
public static let MORE = -2
public static let SKIP = -3
open class Lexer: Recognizer<LexerATNSimulator>
, TokenSource {
public static let EOF: Int = -1
public static let DEFAULT_MODE: Int = 0
public static let MORE: Int = -2
public static let SKIP: Int = -3
public static let DEFAULT_TOKEN_CHANNEL: Int = CommonToken.DEFAULT_CHANNEL
public static let HIDDEN: Int = CommonToken.HIDDEN_CHANNEL
public static let MIN_CHAR_VALUE: Int = Character.MIN_VALUE;
public static let MAX_CHAR_VALUE: Int = Character.MAX_VALUE;
public static let DEFAULT_TOKEN_CHANNEL = CommonToken.DEFAULT_CHANNEL
public static let HIDDEN = CommonToken.HIDDEN_CHANNEL
public static let MIN_CHAR_VALUE = Character.MIN_VALUE;
public static let MAX_CHAR_VALUE = Character.MAX_VALUE;
public var _input: CharStream?
internal var _tokenFactorySourcePair: (TokenSource?, CharStream?)
internal var _tokenFactorySourcePair: TokenSourceAndStream
///
/// How to create token objects
///
internal var _factory: TokenFactory = CommonTokenFactory.DEFAULT
internal var _factory = CommonTokenFactory.DEFAULT
///
/// The goal of all lexer rules/methods is to create a token object.
@ -52,36 +49,36 @@ open class Lexer: Recognizer<LexerATNSimulator>
/// Needed, for example, to get the text for current token. Set at
/// the start of nextToken.
///
public var _tokenStartCharIndex: Int = -1
public var _tokenStartCharIndex = -1
///
/// The line on which the first character of the token resides
///
public var _tokenStartLine: Int = 0
public var _tokenStartLine = 0
///
/// The character position of first character within the line
///
public var _tokenStartCharPositionInLine: Int = 0
public var _tokenStartCharPositionInLine = 0
///
/// Once we see EOF on char stream, next token will be EOF.
/// If you have DONE : EOF ; then you see DONE EOF.
///
public var _hitEOF: Bool = false
public var _hitEOF = false
///
/// The channel number for the current token
///
public var _channel: Int = 0
public var _channel = 0
///
/// The token type for the current token
///
public var _type: Int = 0
public var _type = 0
public final var _modeStack: Stack<Int> = Stack<Int>()
public var _mode: Int = Lexer.DEFAULT_MODE
public final var _modeStack = Stack<Int>()
public var _mode = Lexer.DEFAULT_MODE
///
/// You can set the text for the current token to override what is in
@ -90,13 +87,17 @@ open class Lexer: Recognizer<LexerATNSimulator>
public var _text: String?
public override init() {
self._tokenFactorySourcePair = TokenSourceAndStream()
super.init()
self._tokenFactorySourcePair.tokenSource = self
}
public init(_ input: CharStream) {
super.init()
public required init(_ input: CharStream) {
self._input = input
self._tokenFactorySourcePair = (self, input)
self._tokenFactorySourcePair = TokenSourceAndStream()
super.init()
self._tokenFactorySourcePair.tokenSource = self
self._tokenFactorySourcePair.stream = input
}
open func reset() throws {
@ -131,7 +132,7 @@ open class Lexer: Recognizer<LexerATNSimulator>
// Mark start location in char stream so unbuffered streams are
// guaranteed at least have text of current token
var tokenStartMarker: Int = _input.mark()
var tokenStartMarker = _input.mark()
defer {
// make sure we release marker after match or
// unbuffered char stream will keep buffering
@ -237,10 +238,10 @@ open class Lexer: Recognizer<LexerATNSimulator>
open override func setInputStream(_ input: IntStream) throws {
self._input = nil
self._tokenFactorySourcePair = (self, _input!)
self._tokenFactorySourcePair = makeTokenSourceAndStream()
try reset()
self._input = input as? CharStream
self._tokenFactorySourcePair = (self, _input!)
self._tokenFactorySourcePair = makeTokenSourceAndStream()
}
@ -273,22 +274,23 @@ open class Lexer: Recognizer<LexerATNSimulator>
///
@discardableResult
open func emit() -> Token {
let t: Token = _factory.create(_tokenFactorySourcePair, _type, _text, _channel, _tokenStartCharIndex, getCharIndex() - 1,
_tokenStartLine, _tokenStartCharPositionInLine)
let t = _factory.create(_tokenFactorySourcePair, _type, _text, _channel, _tokenStartCharIndex, getCharIndex() - 1, _tokenStartLine, _tokenStartCharPositionInLine)
emit(t)
return t
}
@discardableResult
open func emitEOF() -> Token {
let cpos: Int = getCharPositionInLine()
let line: Int = getLine()
let eof: Token = _factory.create(
let cpos = getCharPositionInLine()
let line = getLine()
let idx = _input!.index()
let eof = _factory.create(
_tokenFactorySourcePair,
CommonToken.EOF,
nil,
CommonToken.DEFAULT_CHANNEL,
_input!.index(),
_input!.index() - 1,
idx,
idx - 1,
line,
cpos)
emit(eof)
@ -374,23 +376,13 @@ open class Lexer: Recognizer<LexerATNSimulator>
return nil
}
///
/// Used to print out token names like ID during debugging and
/// error reporting. The generated parsers implement a method
/// that overrides this to point to their String[] tokenNames.
///
override
open func getTokenNames() -> [String?]? {
return nil
}
///
/// Return a list of all Token objects in input char stream.
/// Forces load of all tokens. Does not include EOF token.
///
open func getAllTokens() throws -> Array<Token> {
var tokens: Array<Token> = Array<Token>()
var t: Token = try nextToken()
open func getAllTokens() throws -> [Token] {
var tokens = [Token]()
var t = try nextToken()
while t.getType() != CommonToken.EOF {
tokens.append(t)
t = try nextToken()
@ -407,23 +399,29 @@ open class Lexer: Recognizer<LexerATNSimulator>
open func notifyListeners<T>(_ e: LexerNoViableAltException, recognizer: Recognizer<T>) {
let text: String = _input!.getText(Interval.of(_tokenStartCharIndex, _input!.index()))
let msg: String = "token recognition error at: '\(getErrorDisplay(text))'"
let text: String
do {
text = try _input!.getText(Interval.of(_tokenStartCharIndex, _input!.index()))
}
catch {
text = "<unknown>"
}
let msg = "token recognition error at: '\(getErrorDisplay(text))'"
let listener: ANTLRErrorListener = getErrorListenerDispatch()
let listener = getErrorListenerDispatch()
listener.syntaxError(recognizer, nil, _tokenStartLine, _tokenStartCharPositionInLine, msg, e)
}
open func getErrorDisplay(_ s: String) -> String {
let buf: StringBuilder = StringBuilder()
for c: Character in s.characters {
let buf = StringBuilder()
for c in s.characters {
buf.append(getErrorDisplay(c))
}
return buf.toString()
}
open func getErrorDisplay(_ c: Character) -> String {
var s: String = String(c) // String.valueOf(c as Character);
var s = String(c)
if c.integerValue == CommonToken.EOF {
s = "<EOF>"
}
@ -455,4 +453,8 @@ open class Lexer: Recognizer<LexerATNSimulator>
// TODO: Do we lose character or line position information?
try _input!.consume()
}
internal func makeTokenSourceAndStream() -> TokenSourceAndStream {
return TokenSourceAndStream(self, _input)
}
}

View File

@ -9,44 +9,19 @@ public class LexerInterpreter: Lexer {
internal final var grammarFileName: String
internal final var atn: ATN
///
/// /@Deprecated
///
internal final var tokenNames: [String?]?
internal final var ruleNames: [String]
internal final var channelNames: [String]
internal final var modeNames: [String]
private final var vocabulary: Vocabulary?
internal final var _decisionToDFA: [DFA]
internal final var _sharedContextCache: PredictionContextCache =
PredictionContextCache()
// public override init() {
// super.init()}
// public convenience init(_ input : CharStream) {
// self.init()
// self._input = input;
// self._tokenFactorySourcePair = (self, input);
// }
//@Deprecated
public convenience init(_ grammarFileName: String, _ tokenNames: Array<String?>?, _ ruleNames: Array<String>, _ channelNames: Array<String>, _ modeNames: Array<String>, _ atn: ATN, _ input: CharStream) throws {
try self.init(grammarFileName, Vocabulary.fromTokenNames(tokenNames), ruleNames, channelNames, modeNames, atn, input)
}
internal final var _sharedContextCache = PredictionContextCache()
public init(_ grammarFileName: String, _ vocabulary: Vocabulary, _ ruleNames: Array<String>, _ channelNames: Array<String>, _ modeNames: Array<String>, _ atn: ATN, _ input: CharStream) throws {
self.grammarFileName = grammarFileName
self.atn = atn
self.tokenNames = [String?]()
//new String[atn.maxTokenType];
let length = tokenNames!.count
for i in 0..<length {
tokenNames![i] = vocabulary.getDisplayName(i)
}
self.ruleNames = ruleNames
self.channelNames = channelNames
self.modeNames = modeNames
@ -57,9 +32,7 @@ public class LexerInterpreter: Lexer {
for i in 0..<_decisionToDFALength {
_decisionToDFA[i] = DFA(atn.getDecisionState(i)!, i)
}
super.init()
self._input = input
self._tokenFactorySourcePair = (self, input)
super.init(input)
self._interp = LexerATNSimulator(self, atn, _decisionToDFA, _sharedContextCache)
if atn.grammarType != ATNType.lexer {
@ -68,6 +41,10 @@ public class LexerInterpreter: Lexer {
}
}
public required init(_ input: CharStream) {
fatalError("Use the other initializer")
}
override
public func getATN() -> ATN {
return atn
@ -78,14 +55,6 @@ public class LexerInterpreter: Lexer {
return grammarFileName
}
override
///
/// /@Deprecated
///
public func getTokenNames() -> [String?]? {
return tokenNames
}
override
public func getRuleNames() -> [String] {
return ruleNames

View File

@ -5,7 +5,7 @@
///
public class LexerNoViableAltException: RecognitionException<LexerATNSimulator>, CustomStringConvertible {
public class LexerNoViableAltException: RecognitionException, CustomStringConvertible {
///
/// Matching attempted at what input index?
///
@ -31,23 +31,15 @@ public class LexerNoViableAltException: RecognitionException<LexerATNSimulator>,
return startIndex
}
public func getDeadEndConfigs() -> ATNConfigSet {
return deadEndConfigs
}
//override
// public func getInputStream() -> CharStream {
// return super.getInputStream() as! CharStream;
// }
public var description: String {
var symbol: String = ""
if startIndex >= 0 && startIndex < getInputStream().size() {
let charStream: CharStream = getInputStream() as! CharStream
let interval: Interval = Interval.of(startIndex, startIndex)
symbol = charStream.getText(interval)
var symbol = ""
if let charStream = getInputStream() as? CharStream, startIndex >= 0 && startIndex < charStream.size() {
let interval = Interval.of(startIndex, startIndex)
symbol = try! charStream.getText(interval)
symbol = Utils.escapeWhitespace(symbol, false)
}

View File

@ -17,7 +17,7 @@ public class ListTokenSource: TokenSource {
///
/// The wrapped collection of _org.antlr.v4.runtime.Token_ objects to return.
///
internal final var tokens: Array<Token>
internal final var tokens: [Token]
///
/// The name of the input source. If this value is `null`, a call to
@ -32,7 +32,7 @@ public class ListTokenSource: TokenSource {
/// _#nextToken_. The end of the input is indicated by this value
/// being greater than or equal to the number of items in _#tokens_.
///
internal var i: Int = 0
internal var i = 0
///
/// This field caches the EOF token for the token source.
@ -43,7 +43,7 @@ public class ListTokenSource: TokenSource {
/// This is the backing field for _#getTokenFactory_ and
/// _setTokenFactory_.
///
private var _factory: TokenFactory = CommonTokenFactory.DEFAULT
private var _factory = CommonTokenFactory.DEFAULT
///
/// Constructs a new _org.antlr.v4.runtime.ListTokenSource_ instance from the specified
@ -52,7 +52,7 @@ public class ListTokenSource: TokenSource {
/// - parameter tokens: The collection of _org.antlr.v4.runtime.Token_ objects to provide as a
/// _org.antlr.v4.runtime.TokenSource_.
///
public convenience init(_ tokens: Array<Token>) {
public convenience init(_ tokens: [Token]) {
self.init(tokens, nil)
}
@ -67,8 +67,7 @@ public class ListTokenSource: TokenSource {
/// the next _org.antlr.v4.runtime.Token_ (or the previous token if the end of the input has
/// been reached).
///
public init(_ tokens: Array<Token>, _ sourceName: String?) {
public init(_ tokens: [Token], _ sourceName: String?) {
self.tokens = tokens
self.sourceName = sourceName
}
@ -76,28 +75,24 @@ public class ListTokenSource: TokenSource {
public func getCharPositionInLine() -> Int {
if i < tokens.count {
return tokens[i].getCharPositionInLine()
} else {
if let eofToken = eofToken {
}
else if let eofToken = eofToken {
return eofToken.getCharPositionInLine()
} else {
if tokens.count > 0 {
}
else if tokens.count > 0 {
// have to calculate the result from the line/column of the previous
// token, along with the text of the token.
let lastToken: Token = tokens[tokens.count - 1]
let lastToken = tokens[tokens.count - 1]
if let tokenText = lastToken.getText() {
let lastNewLine: Int = tokenText.lastIndexOf("\n")
let lastNewLine = tokenText.lastIndexOf("\n")
if lastNewLine >= 0 {
return tokenText.length - lastNewLine - 1
}
}
var position = lastToken.getCharPositionInLine()
position += lastToken.getStopIndex()
position -= lastToken.getStartIndex()
position += 1
return position
}
}
return (lastToken.getCharPositionInLine() +
lastToken.getStopIndex() -
lastToken.getStartIndex() + 1)
}
// only reach this if tokens is empty, meaning EOF occurs at the first
@ -108,22 +103,23 @@ public class ListTokenSource: TokenSource {
public func nextToken() -> Token {
if i >= tokens.count {
if eofToken == nil {
var start: Int = -1
var start = -1
if tokens.count > 0 {
let previousStop: Int = tokens[tokens.count - 1].getStopIndex()
let previousStop = tokens[tokens.count - 1].getStopIndex()
if previousStop != -1 {
start = previousStop + 1
}
}
let stop: Int = max(-1, start - 1)
eofToken = _factory.create((self, getInputStream()!), CommonToken.EOF, "EOF", CommonToken.DEFAULT_CHANNEL, start, stop, getLine(), getCharPositionInLine())
let stop = max(-1, start - 1)
let source = TokenSourceAndStream(self, getInputStream())
eofToken = _factory.create(source, CommonToken.EOF, "EOF", CommonToken.DEFAULT_CHANNEL, start, stop, getLine(), getCharPositionInLine())
}
return eofToken!
}
let t: Token = tokens[i]
let t = tokens[i]
if i == tokens.count - 1 && t.getType() == CommonToken.EOF {
eofToken = t
}
@ -142,8 +138,8 @@ public class ListTokenSource: TokenSource {
if tokens.count > 0 {
// have to calculate the result from the line/column of the previous
// token, along with the text of the token.
let lastToken: Token = tokens[tokens.count - 1]
var line: Int = lastToken.getLine()
let lastToken = tokens[tokens.count - 1]
var line = lastToken.getLine()
if let tokenText = lastToken.getText() {
let length = tokenText.length
@ -168,23 +164,21 @@ public class ListTokenSource: TokenSource {
public func getInputStream() -> CharStream? {
if i < tokens.count {
return tokens[i].getInputStream()
} else {
if let eofToken = eofToken{
}
else if let eofToken = eofToken {
return eofToken.getInputStream()
} else {
if tokens.count > 0 {
}
else if tokens.count > 0 {
return tokens[tokens.count - 1].getInputStream()
}
}
}
// no input stream information is available
return nil
}
public func getSourceName() -> String {
if sourceName != nil {
return sourceName!
if let sourceName = sourceName {
return sourceName
}
if let inputStream = getInputStream() {

View File

@ -10,7 +10,7 @@
/// in the various paths when the error. Reported by reportNoViableAlternative()
///
public class NoViableAltException: RecognitionException<ParserATNSimulator> {
public class NoViableAltException: RecognitionException {
/// Which configurations did we try at input.index() that couldn't match input.LT(1)?
private final var deadEndConfigs: ATNConfigSet?
@ -22,29 +22,31 @@ public class NoViableAltException: RecognitionException<ParserATNSimulator> {
///
private final var startToken: Token
public convenience init(_ recognizer: Parser?) throws {
public convenience init(_ recognizer: Parser) {
// LL(1) error
let token = try! recognizer.getCurrentToken()
self.init(recognizer,
recognizer!.getInputStream()!,
try recognizer!.getCurrentToken(),
try recognizer!.getCurrentToken(),
recognizer.getInputStream()!,
token,
token,
nil,
recognizer!._ctx)
recognizer._ctx)
}
public init(_ recognizer: Parser?,
_ input: IntStream,
_ startToken: Token,
_ offendingToken: Token,
_ offendingToken: Token?,
_ deadEndConfigs: ATNConfigSet?,
_ ctx: ParserRuleContext?) {
self.deadEndConfigs = deadEndConfigs
self.startToken = startToken
// as? Recognizer<AnyObject, ATNSimulator>
super.init(recognizer, input, ctx)
self.setOffendingToken(offendingToken)
if let offendingToken = offendingToken {
setOffendingToken(offendingToken)
}
}

View File

@ -12,7 +12,7 @@ import Foundation
/// This is all the parsing support code essentially; most of it is error recovery stuff.
///
open class Parser: Recognizer<ParserATNSimulator> {
public static let EOF: Int = -1
public static let EOF = -1
public static var ConsoleError = true
public class TraceListener: ParseTreeListener {
@ -27,16 +27,13 @@ open class Parser: Recognizer<ParserATNSimulator> {
print("enter \(ruleName), LT(1)=\(lt1)")
}
public func visitTerminal(_ node: TerminalNode) {
print("consume \(String(describing: node.getSymbol())) rule \(host.getRuleNames()[host._ctx!.getRuleIndex()])")
}
public func visitErrorNode(_ node: ErrorNode) {
}
public func exitEveryRule(_ ctx: ParserRuleContext) throws {
let ruleName = host.getRuleNames()[ctx.getRuleIndex()]
let lt1 = try host._input.LT(1)!.getText()!
@ -45,23 +42,17 @@ open class Parser: Recognizer<ParserATNSimulator> {
}
public class TrimToSizeListener: ParseTreeListener {
public static let INSTANCE: TrimToSizeListener = TrimToSizeListener()
public static let INSTANCE = TrimToSizeListener()
public func enterEveryRule(_ ctx: ParserRuleContext) {
}
public func visitTerminal(_ node: TerminalNode) {
}
public func visitErrorNode(_ node: ErrorNode) {
}
public func exitEveryRule(_ ctx: ParserRuleContext) {
// TODO: Print exit info.
}
@ -193,7 +184,7 @@ open class Parser: Recognizer<ParserATNSimulator> {
///
@discardableResult
public func match(_ ttype: Int) throws -> Token {
var t: Token = try getCurrentToken()
var t = try getCurrentToken()
if t.getType() == ttype {
_errHandler.reportMatch(self)
try consume()
@ -228,7 +219,7 @@ open class Parser: Recognizer<ParserATNSimulator> {
///
@discardableResult
public func matchWildcard() throws -> Token {
var t: Token = try getCurrentToken()
var t = try getCurrentToken()
if t.getType() > 0 {
_errHandler.reportMatch(self)
try consume()
@ -297,18 +288,11 @@ open class Parser: Recognizer<ParserATNSimulator> {
/// using the default _org.antlr.v4.runtime.Parser.TrimToSizeListener_ during the parse process.
///
public func getTrimParseTree() -> Bool {
return !getParseListeners().filter({ $0 === TrimToSizeListener.INSTANCE }).isEmpty
}
public func getParseListeners() -> Array<ParseTreeListener> {
let listeners: Array<ParseTreeListener>? = _parseListeners
if listeners == nil {
return Array<ParseTreeListener>()
}
return listeners!
public func getParseListeners() -> [ParseTreeListener] {
return _parseListeners ?? [ParseTreeListener]()
}
///
@ -338,10 +322,10 @@ open class Parser: Recognizer<ParserATNSimulator> {
///
public func addParseListener(_ listener: ParseTreeListener) {
if _parseListeners == nil {
_parseListeners = Array<ParseTreeListener>()
_parseListeners = [ParseTreeListener]()
}
self._parseListeners!.append(listener)
_parseListeners!.append(listener)
}
///
@ -399,9 +383,9 @@ open class Parser: Recognizer<ParserATNSimulator> {
public func triggerExitRuleEvent() throws {
// reverse order walk of listeners
if let _parseListeners = _parseListeners, let _ctx = _ctx {
var i: Int = _parseListeners.count - 1
var i = _parseListeners.count - 1
while i >= 0 {
let listener: ParseTreeListener = _parseListeners[i]
let listener = _parseListeners[i]
_ctx.exitRule(listener)
try listener.exitEveryRule(_ctx)
i -= 1
@ -421,14 +405,12 @@ open class Parser: Recognizer<ParserATNSimulator> {
override
open func getTokenFactory() -> TokenFactory {
//<AnyObject>
return _input.getTokenSource().getTokenFactory()
}
/// Tell our token source and error strategy about a new way to create tokens.
override
open func setTokenFactory(_ factory: TokenFactory) {
//<AnyObject>
_input.getTokenSource().setTokenFactory(factory)
}
@ -439,15 +421,13 @@ open class Parser: Recognizer<ParserATNSimulator> {
/// - Throws: _ANTLRError.unsupportedOperation_ if the current parser does not
/// implement the _#getSerializedATN()_ method.
///
public func getATNWithBypassAlts() -> ATN {
let serializedAtn: String = getSerializedATN()
let serializedAtn = getSerializedATN()
var result: ATN? = bypassAltsAtnCache[serializedAtn]
bypassAltsAtnCacheMutex.synchronized {
[unowned self] in
var result = bypassAltsAtnCache[serializedAtn]
bypassAltsAtnCacheMutex.synchronized { [unowned self] in
if result == nil {
let deserializationOptions: ATNDeserializationOptions = ATNDeserializationOptions()
let deserializationOptions = ATNDeserializationOptions()
try! deserializationOptions.setGenerateRuleBypassTransitions(true)
result = try! ATNDeserializer(deserializationOptions).deserialize(Array(serializedAtn.characters))
self.bypassAltsAtnCache[serializedAtn] = result!
@ -469,14 +449,12 @@ open class Parser: Recognizer<ParserATNSimulator> {
///
public func compileParseTreePattern(_ pattern: String, _ patternRuleIndex: Int) throws -> ParseTreePattern {
if let tokenStream = getTokenStream() {
let tokenSource: TokenSource = tokenStream.getTokenSource()
if tokenSource is Lexer {
let lexer: Lexer = tokenSource as! Lexer
let tokenSource = tokenStream.getTokenSource()
if let lexer = tokenSource as? Lexer {
return try compileParseTreePattern(pattern, patternRuleIndex, lexer)
}
}
throw ANTLRError.unsupportedOperation(msg: "Parser can't discover a lexer to use")
}
///
@ -485,7 +463,7 @@ open class Parser: Recognizer<ParserATNSimulator> {
///
public func compileParseTreePattern(_ pattern: String, _ patternRuleIndex: Int,
_ lexer: Lexer) throws -> ParseTreePattern {
let m: ParseTreePatternMatcher = ParseTreePatternMatcher(lexer, self)
let m = ParseTreePatternMatcher(lexer, self)
return try m.compile(pattern, patternRuleIndex)
}
@ -528,19 +506,21 @@ open class Parser: Recognizer<ParserATNSimulator> {
return try _input.LT(1)!
}
public final func notifyErrorListeners(_ msg: String) throws {
try notifyErrorListeners(getCurrentToken(), msg, nil)
public final func notifyErrorListeners(_ msg: String) {
let token = try? getCurrentToken()
notifyErrorListeners(token, msg, nil)
}
public func notifyErrorListeners(_ offendingToken: Token, _ msg: String,
_ e: AnyObject?) {
public func notifyErrorListeners(_ offendingToken: Token?, _ msg: String, _ e: AnyObject?) {
_syntaxErrors += 1
var line: Int = -1
var charPositionInLine: Int = -1
var line = -1
var charPositionInLine = -1
if let offendingToken = offendingToken {
line = offendingToken.getLine()
charPositionInLine = offendingToken.getCharPositionInLine()
}
let listener: ANTLRErrorListener = getErrorListenerDispatch()
let listener = getErrorListenerDispatch()
listener.syntaxError(self, offendingToken, line, charPositionInLine, msg, e)
}
@ -567,27 +547,27 @@ open class Parser: Recognizer<ParserATNSimulator> {
///
@discardableResult
public func consume() throws -> Token {
let o: Token = try getCurrentToken()
let o = try getCurrentToken()
if o.getType() != Parser.EOF {
try getInputStream()!.consume()
}
guard let _ctx = _ctx else {
return o
}
let hasListener: Bool = _parseListeners != nil && !_parseListeners!.isEmpty
let hasListener = _parseListeners != nil && !_parseListeners!.isEmpty
if _buildParseTrees || hasListener {
if _errHandler.inErrorRecoveryMode(self) {
let node: ErrorNode = _ctx.addErrorNode(createErrorNode(parent: _ctx, t: o))
let node = _ctx.addErrorNode(createErrorNode(parent: _ctx, t: o))
if let _parseListeners = _parseListeners {
for listener: ParseTreeListener in _parseListeners {
for listener in _parseListeners {
listener.visitErrorNode(node)
}
}
} else {
let node: TerminalNode = _ctx.addChild(createTerminalNode(parent: _ctx, t: o))
let node = _ctx.addChild(createTerminalNode(parent: _ctx, t: o))
if let _parseListeners = _parseListeners {
for listener: ParseTreeListener in _parseListeners {
for listener in _parseListeners {
listener.visitTerminal(node)
}
}
@ -703,7 +683,7 @@ open class Parser: Recognizer<ParserATNSimulator> {
/// Make the current context the child of the incoming localctx.
///
public func pushNewRecursionContext(_ localctx: ParserRuleContext, _ state: Int, _ ruleIndex: Int) throws {
let previous: ParserRuleContext = _ctx!
let previous = _ctx!
previous.parent = localctx
previous.invokingState = state
previous.stop = try _input.LT(-1)
@ -722,7 +702,7 @@ open class Parser: Recognizer<ParserATNSimulator> {
public func unrollRecursionContexts(_ _parentctx: ParserRuleContext?) throws {
_precedenceStack.pop()
_ctx!.stop = try _input.LT(-1)
let retctx: ParserRuleContext = _ctx! // save current ctx (return value)
let retctx = _ctx! // save current ctx (return value)
// unroll so _ctx is as it was before call to recursive method
if _parseListeners != nil {
@ -744,7 +724,7 @@ open class Parser: Recognizer<ParserATNSimulator> {
}
public func getInvokingContext(_ ruleIndex: Int) -> ParserRuleContext? {
var p: ParserRuleContext? = _ctx
var p = _ctx
while let pWrap = p {
if pWrap.getRuleIndex() == ruleIndex {
return pWrap
@ -857,7 +837,7 @@ open class Parser: Recognizer<ParserATNSimulator> {
// parser.getInterpreter()!.setPredictionMode(PredictionMode.LL_EXACT_AMBIG_DETECTION);
//
// // get ambig trees
// var alt : Int = ambiguityInfo.ambigAlts.nextSetBit(0);
// var alt : Int = ambiguityInfo.ambigAlts.firstSetBit();
// while alt>=0 {
// // re-parse entire input for all ambiguous alternatives
// // (don't have to do first as it's been parsed, but do again for simplicity
@ -895,12 +875,11 @@ open class Parser: Recognizer<ParserATNSimulator> {
/// - Returns: `true` if `symbol` can follow the current state in
/// the ATN, otherwise `false`.
///
public func isExpectedToken(_ symbol: Int) throws -> Bool {
// return getInterpreter().atn.nextTokens(_ctx);
let atn: ATN = getInterpreter().atn
public func isExpectedToken(_ symbol: Int) -> Bool {
let atn = getInterpreter().atn
var ctx: ParserRuleContext? = _ctx
let s: ATNState = atn.states[getState()]!
var following: IntervalSet = try atn.nextTokens(s)
let s = atn.states[getState()]!
var following = atn.nextTokens(s)
if following.contains(symbol) {
return true
}
@ -910,9 +889,9 @@ open class Parser: Recognizer<ParserATNSimulator> {
}
while let ctxWrap = ctx, ctxWrap.invokingState >= 0 && following.contains(CommonToken.EPSILON) {
let invokingState: ATNState = atn.states[ctxWrap.invokingState]!
let rt: RuleTransition = invokingState.transition(0) as! RuleTransition
following = try atn.nextTokens(rt.followState)
let invokingState = atn.states[ctxWrap.invokingState]!
let rt = invokingState.transition(0) as! RuleTransition
following = atn.nextTokens(rt.followState)
if following.contains(symbol) {
return true
}
@ -939,19 +918,15 @@ open class Parser: Recognizer<ParserATNSimulator> {
}
public func getExpectedTokensWithinCurrentRule() throws -> IntervalSet {
let atn: ATN = getInterpreter().atn
let s: ATNState = atn.states[getState()]!
return try atn.nextTokens(s)
public func getExpectedTokensWithinCurrentRule() -> IntervalSet {
let atn = getInterpreter().atn
let s = atn.states[getState()]!
return atn.nextTokens(s)
}
/// Get a rule's index (i.e., `RULE_ruleName` field) or -1 if not found.
public func getRuleIndex(_ ruleName: String) -> Int {
let ruleIndex: Int? = getRuleIndexMap()[ruleName]
if ruleIndex != nil {
return ruleIndex!
}
return -1
return getRuleIndexMap()[ruleName] ?? -1
}
public func getRuleContext() -> ParserRuleContext? {
@ -965,17 +940,17 @@ open class Parser: Recognizer<ParserATNSimulator> {
///
/// This is very useful for error messages.
///
public func getRuleInvocationStack() -> Array<String> {
public func getRuleInvocationStack() -> [String] {
return getRuleInvocationStack(_ctx)
}
public func getRuleInvocationStack(_ p: RuleContext?) -> Array<String> {
public func getRuleInvocationStack(_ p: RuleContext?) -> [String] {
var p = p
var ruleNames: [String] = getRuleNames()
var stack: Array<String> = Array<String>()
var ruleNames = getRuleNames()
var stack = [String]()
while let pWrap = p {
// compute what follows who invoked us
let ruleIndex: Int = pWrap.getRuleIndex()
let ruleIndex = pWrap.getRuleIndex()
if ruleIndex < 0 {
stack.append("n/a")
} else {
@ -987,16 +962,14 @@ open class Parser: Recognizer<ParserATNSimulator> {
}
/// For debugging and other purposes.
public func getDFAStrings() -> Array<String> {
var s: Array<String> = Array<String>()
public func getDFAStrings() -> [String] {
var s = [String]()
guard let _interp = _interp else {
return s
}
decisionToDFAMutex.synchronized {
[unowned self] in
decisionToDFAMutex.synchronized { [unowned self] in
for d in 0..<_interp.decisionToDFA.count {
let dfa: DFA = _interp.decisionToDFA[d]
let dfa = _interp.decisionToDFA[d]
s.append(dfa.toString(self.getVocabulary()))
}
@ -1009,12 +982,10 @@ open class Parser: Recognizer<ParserATNSimulator> {
guard let _interp = _interp else {
return
}
decisionToDFAMutex.synchronized {
[unowned self] in
var seenOne: Bool = false
decisionToDFAMutex.synchronized { [unowned self] in
var seenOne = false
for d in 0..<_interp.decisionToDFA.count {
let dfa: DFA = _interp.decisionToDFA[d]
for dfa in _interp.decisionToDFA {
if !dfa.states.isEmpty {
if seenOne {
print("")
@ -1034,9 +1005,9 @@ open class Parser: Recognizer<ParserATNSimulator> {
override
open func getParseInfo() -> ParseInfo? {
let interp: ParserATNSimulator? = getInterpreter()
if interp is ProfilingATNSimulator {
return ParseInfo(interp as! ProfilingATNSimulator)
let interp = getInterpreter()
if let interp = interp as? ProfilingATNSimulator {
return ParseInfo(interp)
}
return nil
}
@ -1045,16 +1016,15 @@ open class Parser: Recognizer<ParserATNSimulator> {
/// - Since: 4.3
///
public func setProfile(_ profile: Bool) {
let interp: ParserATNSimulator = getInterpreter()
let saveMode: PredictionMode = interp.getPredictionMode()
let interp = getInterpreter()
let saveMode = interp.getPredictionMode()
if profile {
if !(interp is ProfilingATNSimulator) {
setInterpreter(ProfilingATNSimulator(self))
}
} else {
if interp is ProfilingATNSimulator {
let sim: ParserATNSimulator =
ParserATNSimulator(self, getATN(), interp.decisionToDFA, interp.getSharedContextCache()!)
let sim = ParserATNSimulator(self, getATN(), interp.decisionToDFA, interp.getSharedContextCache()!)
setInterpreter(sim)
}
}

View File

@ -32,10 +32,6 @@ public class ParserInterpreter: Parser {
internal final var sharedContextCache: PredictionContextCache =
PredictionContextCache()
///
/// /@Deprecated
///
internal final var tokenNames: [String]
internal final var ruleNames: [String]
private final var vocabulary: Vocabulary
@ -64,7 +60,6 @@ public class ParserInterpreter: Parser {
self.grammarFileName = old.grammarFileName
self.statesNeedingLeftRecursionContext = old.statesNeedingLeftRecursionContext
self.decisionToDFA = old.decisionToDFA
self.tokenNames = old.tokenNames
self.ruleNames = old.ruleNames
self.vocabulary = old.vocabulary
try super.init(old.getTokenStream()!)
@ -73,26 +68,11 @@ public class ParserInterpreter: Parser {
sharedContextCache))
}
///
/// Use _#ParserInterpreter(String, org.antlr.v4.runtime.Vocabulary, java.util.Collection, org.antlr.v4.runtime.atn.ATN, org.antlr.v4.runtime.TokenStream)_ instead.
///
//@Deprecated
public convenience init(_ grammarFileName: String, _ tokenNames: Array<String?>?,
_ ruleNames: Array<String>, _ atn: ATN, _ input: TokenStream) throws {
try self.init(grammarFileName, Vocabulary.fromTokenNames(tokenNames), ruleNames, atn, input)
}
public init(_ grammarFileName: String, _ vocabulary: Vocabulary,
_ ruleNames: Array<String>, _ atn: ATN, _ input: TokenStream) throws {
self.grammarFileName = grammarFileName
self.atn = atn
self.tokenNames = [String]()// new String[atn.maxTokenType];
let length = tokenNames.count
for i in 0..<length {
tokenNames[i] = vocabulary.getDisplayName(i)
}
self.ruleNames = ruleNames
self.vocabulary = vocabulary
self.decisionToDFA = [DFA]() //new DFA[atn.getNumberOfDecisions()];
@ -123,14 +103,6 @@ public class ParserInterpreter: Parser {
return atn
}
// override
///
/// /@Deprecated
///
public func getTokenNames() -> [String] {
return tokenNames
}
override
public func getVocabulary() -> Vocabulary {
return vocabulary
@ -148,9 +120,9 @@ public class ParserInterpreter: Parser {
/// Begin parsing at startRuleIndex
public func parse(_ startRuleIndex: Int) throws -> ParserRuleContext {
let startRuleStartState: RuleStartState = atn.ruleToStartState[startRuleIndex]
let startRuleStartState = atn.ruleToStartState[startRuleIndex]
let rootContext: InterpreterRuleContext = InterpreterRuleContext(nil, ATNState.INVALID_STATE_NUMBER, startRuleIndex)
let rootContext = InterpreterRuleContext(nil, ATNState.INVALID_STATE_NUMBER, startRuleIndex)
if startRuleStartState.isPrecedenceRule {
try enterRecursionRule(rootContext, startRuleStartState.stateNumber, startRuleIndex, 0)
} else {
@ -158,7 +130,7 @@ public class ParserInterpreter: Parser {
}
while true {
let p: ATNState = getATNState()!
let p = getATNState()!
switch p.getStateType() {
case ATNState.RULE_STOP:
// pop; return from rule
@ -208,7 +180,7 @@ public class ParserInterpreter: Parser {
var altNum: Int
if p.getNumberOfTransitions() > 1 {
try getErrorHandler().sync(self)
let decision: Int = (p as! DecisionState).decision
let decision = (p as! DecisionState).decision
if decision == overrideDecision && _input.index() == overrideDecisionInputIndex {
altNum = overrideDecisionAlt
} else {
@ -218,7 +190,7 @@ public class ParserInterpreter: Parser {
altNum = 1
}
let transition: Transition = p.transition(altNum - 1)
let transition = p.transition(altNum - 1)
switch transition.getSerializationType() {
case Transition.EPSILON:
if try statesNeedingLeftRecursionContext.get(p.stateNumber) &&
@ -252,9 +224,9 @@ public class ParserInterpreter: Parser {
break
case Transition.RULE:
let ruleStartState: RuleStartState = transition.target as! RuleStartState
let ruleIndex: Int = ruleStartState.ruleIndex!
let ctx: InterpreterRuleContext = InterpreterRuleContext(_ctx, p.stateNumber, ruleIndex)
let ruleStartState = transition.target as! RuleStartState
let ruleIndex = ruleStartState.ruleIndex!
let ctx = InterpreterRuleContext(_ctx, p.stateNumber, ruleIndex)
if ruleStartState.isPrecedenceRule {
try enterRecursionRule(ctx, ruleStartState.stateNumber, ruleIndex, (transition as! RuleTransition).precedence)
} else {
@ -263,25 +235,20 @@ public class ParserInterpreter: Parser {
break
case Transition.PREDICATE:
let predicateTransition: PredicateTransition = transition as! PredicateTransition
let predicateTransition = transition as! PredicateTransition
if try !sempred(_ctx!, predicateTransition.ruleIndex, predicateTransition.predIndex) {
throw try ANTLRException.recognition(e: FailedPredicateException(self))
throw ANTLRException.recognition(e: FailedPredicateException(self))
}
break
case Transition.ACTION:
let actionTransition: ActionTransition = transition as! ActionTransition
let actionTransition = transition as! ActionTransition
try action(_ctx, actionTransition.ruleIndex, actionTransition.actionIndex)
break
case Transition.PRECEDENCE:
if !precpred(_ctx!, (transition as! PrecedencePredicateTransition).precedence) {
throw try ANTLRException.recognition(e: FailedPredicateException(self, "precpred(_ctx,\((transition as! PrecedencePredicateTransition).precedence))"))
throw ANTLRException.recognition(e: FailedPredicateException(self, "precpred(_ctx,\((transition as! PrecedencePredicateTransition).precedence))"))
}
break
@ -294,16 +261,16 @@ public class ParserInterpreter: Parser {
}
internal func visitRuleStopState(_ p: ATNState) throws {
let ruleStartState: RuleStartState = atn.ruleToStartState[p.ruleIndex!]
let ruleStartState = atn.ruleToStartState[p.ruleIndex!]
if ruleStartState.isPrecedenceRule {
let parentContext: (ParserRuleContext?, Int) = _parentContextStack.pop()
try unrollRecursionContexts(parentContext.0!)
setState(parentContext.1)
let (parentContext, parentState) = _parentContextStack.pop()
try unrollRecursionContexts(parentContext!)
setState(parentState)
} else {
try exitRule()
}
let ruleTransition: RuleTransition = atn.states[getState()]!.transition(0) as! RuleTransition
let ruleTransition = atn.states[getState()]!.transition(0) as! RuleTransition
setState(ruleTransition.followState.stateNumber)
}

View File

@ -13,10 +13,9 @@
///
public class ProxyErrorListener: ANTLRErrorListener {
private final var delegates: Array<ANTLRErrorListener>
public init(_ delegates: Array<ANTLRErrorListener>) {
private final var delegates: [ANTLRErrorListener]
public init(_ delegates: [ANTLRErrorListener]) {
self.delegates = delegates
}
@ -27,7 +26,7 @@ public class ProxyErrorListener: ANTLRErrorListener {
_ msg: String,
_ e: AnyObject?)
{
for listener: ANTLRErrorListener in delegates {
for listener in delegates {
listener.syntaxError(recognizer, offendingSymbol, line, charPositionInLine, msg, e)
}
}
@ -39,9 +38,9 @@ public class ProxyErrorListener: ANTLRErrorListener {
_ stopIndex: Int,
_ exact: Bool,
_ ambigAlts: BitSet,
_ configs: ATNConfigSet) throws {
for listener: ANTLRErrorListener in delegates {
try listener.reportAmbiguity(recognizer, dfa, startIndex, stopIndex, exact, ambigAlts, configs)
_ configs: ATNConfigSet) {
for listener in delegates {
listener.reportAmbiguity(recognizer, dfa, startIndex, stopIndex, exact, ambigAlts, configs)
}
}
@ -51,9 +50,9 @@ public class ProxyErrorListener: ANTLRErrorListener {
_ startIndex: Int,
_ stopIndex: Int,
_ conflictingAlts: BitSet?,
_ configs: ATNConfigSet) throws {
for listener: ANTLRErrorListener in delegates {
try listener.reportAttemptingFullContext(recognizer, dfa, startIndex, stopIndex, conflictingAlts, configs)
_ configs: ATNConfigSet) {
for listener in delegates {
listener.reportAttemptingFullContext(recognizer, dfa, startIndex, stopIndex, conflictingAlts, configs)
}
}
@ -63,9 +62,9 @@ public class ProxyErrorListener: ANTLRErrorListener {
_ startIndex: Int,
_ stopIndex: Int,
_ prediction: Int,
_ configs: ATNConfigSet) throws {
for listener: ANTLRErrorListener in delegates {
try listener.reportContextSensitivity(recognizer, dfa, startIndex, stopIndex, prediction, configs)
_ configs: ATNConfigSet) {
for listener in delegates {
listener.reportContextSensitivity(recognizer, dfa, startIndex, stopIndex, prediction, configs)
}
}
}

View File

@ -11,16 +11,15 @@
/// and what kind of problem occurred.
///
public class RecognitionException<T:ATNSimulator> {
public class RecognitionException {
///
/// The _org.antlr.v4.runtime.Recognizer_ where this exception originated.
///
private final var recognizer: Recognizer<T>?
//Recognizer<AnyObject,ATNSimulator>? ;
private final var recognizer: RecognizerProtocol?
private final var ctx: RuleContext?
private final weak var ctx: RuleContext?
private final var input: IntStream
private final var input: IntStream?
///
/// The current _org.antlr.v4.runtime.Token_ when an error occurred. Since not all streams
@ -29,28 +28,18 @@ public class RecognitionException<T:ATNSimulator> {
///
private var offendingToken: Token!
private var offendingState: Int = -1
private var offendingState = -1
public var message: String?
public init(_ recognizer: Recognizer<T>?,
_ input: IntStream,
_ ctx: ParserRuleContext?) {
self.recognizer = recognizer
self.input = input
self.ctx = ctx
if let recognizer = recognizer {
self.offendingState = recognizer.getState()
}
}
public init(_ message: String,
_ recognizer: Recognizer<T>?,
public init(_ recognizer: RecognizerProtocol?,
_ input: IntStream,
_ ctx: ParserRuleContext?) {
self.message = message
_ ctx: ParserRuleContext? = nil,
_ message: String? = nil) {
self.recognizer = recognizer
self.input = input
self.ctx = ctx
self.message = message
if let recognizer = recognizer {
self.offendingState = recognizer.getState()
}
@ -87,7 +76,6 @@ public class RecognitionException<T:ATNSimulator> {
if let recognizer = recognizer {
return try? recognizer.getATN().getExpectedTokens(offendingState, ctx!)
}
return nil
}
@ -113,10 +101,13 @@ public class RecognitionException<T:ATNSimulator> {
/// where this exception was thrown, or `null` if the stream is not
/// available.
///
public func getInputStream() -> IntStream {
public func getInputStream() -> IntStream? {
return input
}
public func clearInputStream() {
input = nil
}
public func getOffendingToken() -> Token {
return offendingToken
@ -134,7 +125,11 @@ public class RecognitionException<T:ATNSimulator> {
/// - Returns: The recognizer where this exception occurred, or `null` if
/// the recognizer is not available.
///
public func getRecognizer() -> Recognizer<T>? {
public func getRecognizer() -> RecognizerProtocol? {
return recognizer
}
public func clearRecognizer() {
self.recognizer = nil
}
}

View File

@ -5,60 +5,53 @@
import Foundation
open class Recognizer<ATNInterpreter:ATNSimulator> {
//public static let EOF: Int = -1
public protocol RecognizerProtocol {
func getATN() -> ATN
func getGrammarFileName() -> String
func getParseInfo() -> ParseInfo?
func getRuleNames() -> [String]
func getSerializedATN() -> String
func getState() -> Int
func getTokenType(_ tokenName: String) -> Int
func getVocabulary() -> Vocabulary
}
open class Recognizer<ATNInterpreter: ATNSimulator>: RecognizerProtocol {
//TODO: WeakKeyDictionary NSMapTable Dictionary MapTable<Vocabulary,HashMap<String, Int>>
private let tokenTypeMapCache = HashMap<Vocabulary,Dictionary<String, Int>>()
private let tokenTypeMapCache = HashMap<Vocabulary, [String : Int]>()
private let ruleIndexMapCache = HashMap<ArrayWrapper<String>,Dictionary<String, Int>>()
private var _listeners: Array<ANTLRErrorListener> = [ConsoleErrorListener.INSTANCE]
private let ruleIndexMapCache = HashMap<ArrayWrapper<String>, [String : Int]>()
private var _listeners: [ANTLRErrorListener] = [ConsoleErrorListener.INSTANCE]
public var _interp: ATNInterpreter!
private var _stateNumber: Int = -1
private var _stateNumber = -1
///
/// mutex for tokenTypeMapCache updates
///
private var tokenTypeMapCacheMutex = Mutex()
private let tokenTypeMapCacheMutex = Mutex()
///
/// mutex for ruleIndexMapCacheMutex updates
///
private var ruleIndexMapCacheMutex = Mutex()
/// Used to print out token names like ID during debugging and
/// error reporting. The generated parsers implement a method
/// that overrides this to point to their String[] tokenNames.
///
/// Use _#getVocabulary()_ instead.
///
///
/// /@Deprecated
///
open func getTokenNames() -> [String?]? {
RuntimeException(#function + " must be overridden")
return []
}
private let ruleIndexMapCacheMutex = Mutex()
open func getRuleNames() -> [String] {
RuntimeException(#function + " must be overridden")
return []
fatalError(#function + " must be overridden")
}
///
/// Get the vocabulary used by the recognizer.
///
/// - Returns: A _org.antlr.v4.runtime.Vocabulary_ instance providing information about the
/// vocabulary used by the grammar.
///
open func getVocabulary() -> Vocabulary {
return Vocabulary.fromTokenNames(getTokenNames())
fatalError(#function + " must be overridden")
}
///
@ -66,35 +59,29 @@ open class Recognizer<ATNInterpreter:ATNSimulator> {
///
/// Used for XPath and tree pattern compilation.
///
public func getTokenTypeMap() -> Dictionary<String, Int> {
let vocabulary: Vocabulary = getVocabulary()
var result: Dictionary<String, Int>? = self.tokenTypeMapCache[vocabulary]
tokenTypeMapCacheMutex.synchronized {
[unowned self] in
public func getTokenTypeMap() -> [String : Int] {
let vocabulary = getVocabulary()
var result = tokenTypeMapCache[vocabulary]
tokenTypeMapCacheMutex.synchronized { [unowned self] in
if result == nil {
result = Dictionary<String, Int>()
result = [String : Int]()
let length = self.getATN().maxTokenType
for i in 0...length {
let literalName: String? = vocabulary.getLiteralName(i)
if literalName != nil {
result![literalName!] = i
if let literalName = vocabulary.getLiteralName(i) {
result![literalName] = i
}
let symbolicName: String? = vocabulary.getSymbolicName(i)
if symbolicName != nil {
result![symbolicName!] = i
if let symbolicName = vocabulary.getSymbolicName(i) {
result![symbolicName] = i
}
}
result!["EOF"] = CommonToken.EOF
//TODO Result Collections.unmodifiableMap
self.tokenTypeMapCache[vocabulary] = result!
}
}
return result!
}
///
@ -102,26 +89,20 @@ open class Recognizer<ATNInterpreter:ATNSimulator> {
///
/// Used for XPath and tree pattern compilation.
///
public func getRuleIndexMap() -> Dictionary<String, Int> {
let ruleNames: [String] = getRuleNames()
public func getRuleIndexMap() -> [String : Int] {
let ruleNames = getRuleNames()
let result: Dictionary<String, Int>? = self.ruleIndexMapCache[ArrayWrapper<String>(ruleNames)]
ruleIndexMapCacheMutex.synchronized {
[unowned self] in
let result = ruleIndexMapCache[ArrayWrapper<String>(ruleNames)]
ruleIndexMapCacheMutex.synchronized { [unowned self] in
if result == nil {
self.ruleIndexMapCache[ArrayWrapper<String>(ruleNames)] = Utils.toMap(ruleNames)
}
}
return result!
}
public func getTokenType(_ tokenName: String) -> Int {
let ttype: Int? = getTokenTypeMap()[tokenName]
if ttype != nil {
return ttype!
}
return CommonToken.INVALID_TYPE
return getTokenTypeMap()[tokenName] ?? CommonToken.INVALID_TYPE
}
///
@ -132,16 +113,14 @@ open class Recognizer<ATNInterpreter:ATNSimulator> {
/// created the interpreter from it.
///
open func getSerializedATN() -> String {
RuntimeException("there is no serialized ATN")
fatalError()
fatalError("there is no serialized ATN")
}
/// For debugging and other purposes, might want the grammar name.
/// Have ANTLR generate an implementation for this method.
///
open func getGrammarFileName() -> String {
RuntimeException(#function + " must be overridden")
return ""
fatalError(#function + " must be overridden")
}
///
@ -150,8 +129,7 @@ open class Recognizer<ATNInterpreter:ATNSimulator> {
/// - Returns: The _org.antlr.v4.runtime.atn.ATN_ used by the recognizer for prediction.
///
open func getATN() -> ATN {
RuntimeException(#function + " must be overridden")
fatalError()
fatalError(#function + " must be overridden")
}
///
@ -185,51 +163,14 @@ open class Recognizer<ATNInterpreter:ATNSimulator> {
///
/// What is the error header, normally line/character position information?
///
open func getErrorHeader(_ e: AnyObject) -> String {
let line: Int = (e as! RecognitionException).getOffendingToken().getLine()
let charPositionInLine: Int = (e as! RecognitionException).getOffendingToken().getCharPositionInLine()
return "line " + String(line) + ":" + String(charPositionInLine)
}
/// How should a token be displayed in an error message? The default
/// is to display just the text, but during development you might
/// want to have a lot of information spit out. Override in that case
/// to use t.toString() (which, for CommonToken, dumps everything about
/// the token). This is better than forcing you to override a method in
/// your token objects because you don't have to go modify your lexer
/// so that it creates a new Java type.
///
/// This method is not called by the ANTLR 4 Runtime. Specific
/// implementations of _org.antlr.v4.runtime.ANTLRErrorStrategy_ may provide a similar
/// feature when necessary. For example, see
/// _org.antlr.v4.runtime.DefaultErrorStrategy#getTokenErrorDisplay_.
///
///
/// /@Deprecated
///
open func getTokenErrorDisplay(_ t: Token?) -> String {
guard let t = t else {
return "<no token>"
}
var s: String
if let text = t.getText() {
s = text
} else {
if t.getType() == CommonToken.EOF {
s = "<EOF>"
} else {
s = "<\(t.getType())>"
}
}
s = s.replacingOccurrences(of: "\n", with: "\\n")
s = s.replacingOccurrences(of: "\r", with: "\\r")
s = s.replacingOccurrences(of: "\t", with: "\\t")
return "\(s)"
open func getErrorHeader(_ e: RecognitionException) -> String {
let offending = e.getOffendingToken()
let line = offending.getLine()
let charPositionInLine = offending.getCharPositionInLine()
return "line \(line):\(charPositionInLine)"
}
open func addErrorListener(_ listener: ANTLRErrorListener) {
_listeners.append(listener)
}
@ -237,16 +178,13 @@ open class Recognizer<ATNInterpreter:ATNSimulator> {
_listeners = _listeners.filter() {
$0 !== listener
}
// _listeners.removeObject(listener);
}
open func removeErrorListeners() {
_listeners.removeAll()
}
open func getErrorListeners() -> Array<ANTLRErrorListener> {
open func getErrorListeners() -> [ANTLRErrorListener] {
return _listeners
}
@ -260,7 +198,7 @@ open class Recognizer<ATNInterpreter:ATNSimulator> {
return true
}
open func precpred(_ localctx: RuleContext?, _ precedence: Int) throws -> Bool {
open func precpred(_ localctx: RuleContext?, _ precedence: Int) -> Bool {
return true
}
@ -285,22 +223,18 @@ open class Recognizer<ATNInterpreter:ATNSimulator> {
}
open func getInputStream() -> IntStream? {
RuntimeException(#function + "Must be overridden")
fatalError()
fatalError(#function + " must be overridden")
}
open func setInputStream(_ input: IntStream) throws {
RuntimeException(#function + "Must be overridden")
fatalError(#function + " must be overridden")
}
open func getTokenFactory() -> TokenFactory {
RuntimeException(#function + "Must be overridden")
fatalError()
fatalError(#function + " must be overridden")
}
open func setTokenFactory(_ input: TokenFactory) {
RuntimeException(#function + "Must be overridden")
fatalError(#function + " must be overridden")
}
}

View File

@ -56,17 +56,18 @@
///
open class RuleContext: RuleNode {
public static let EMPTY: ParserRuleContext = ParserRuleContext()
public static let EMPTY = ParserRuleContext()
/// What context invoked this rule?
public var parent: RuleContext?
public weak var parent: RuleContext?
/// What state invoked the rule associated with this context?
/// The "return address" is the followState of invokingState
/// If parent is null, this should be -1 this context object represents
/// the start rule.
///
public var invokingState: Int = -1
public var invokingState = -1
override
public init() {
super.init()
@ -79,7 +80,7 @@ open class RuleContext: RuleNode {
}
open func depth() -> Int {
var n: Int = 0
var n = 0
var p: RuleContext? = self
while let pWrap = p {
p = pWrap.parent
@ -131,7 +132,7 @@ open class RuleContext: RuleNode {
return ""
}
let builder: StringBuilder = StringBuilder()
let builder = StringBuilder()
for i in 0..<length {
builder.append((getChild(i) as! ParseTree).getText())
}
@ -159,54 +160,10 @@ open class RuleContext: RuleNode {
return visitor.visitChildren(self)
}
// /// Call this method to view a parse tree in a dialog box visually.
// public func inspect(parser : Parser) -> Future<JDialog> {
// var ruleNames : Array<String> = parser != nil ? Arrays.asList(parser.getRuleNames()) : null;
// return inspect(ruleNames);
// }
//
// public func inspect(ruleNames : Array<String>) -> Future<JDialog> {
// var viewer : TreeViewer = TreeViewer(ruleNames, self);
// return viewer.open();
// }
//
// /// Save this tree in a postscript file
// public func save(parser : Parser, _ fileName : String)
// throws; IOException, PrintException
// {
// var ruleNames : Array<String> = parser != nil ? Arrays.asList(parser.getRuleNames()) : null;
// save(ruleNames, fileName);
// }
//
// /// Save this tree in a postscript file using a particular font name and size
// public func save(parser : Parser, _ fileName : String,
// _ fontName : String, _ fontSize : Int)
// throws; IOException
// {
// var ruleNames : Array<String> = parser != nil ? Arrays.asList(parser.getRuleNames()) : null;
// save(ruleNames, fileName, fontName, fontSize);
// }
//
// /// Save this tree in a postscript file
// public func save(ruleNames : Array<String>, _ fileName : String)
// throws; IOException, PrintException
// {
// Trees.writePS(self, ruleNames, fileName);
// }
//
// /// Save this tree in a postscript file using a particular font name and size
// public func save(ruleNames : Array<String>, _ fileName : String,
// _ fontName : String, _ fontSize : Int)
// throws; IOException
// {
// Trees.writePS(self, ruleNames, fileName, fontName, fontSize);
// }
/// Print out a whole tree, not just a node, in LISP format
/// (root child1 .. childN). Print just a node if this is a leaf.
/// We have to know the recognizer so we can get rule names.
///
open override func toStringTree(_ recog: Parser) -> String {
return Trees.toStringTree(self, recog)
}
@ -214,19 +171,16 @@ open class RuleContext: RuleNode {
/// Print out a whole tree, not just a node, in LISP format
/// (root child1 .. childN). Print just a node if this is a leaf.
///
public func toStringTree(_ ruleNames: Array<String>?) -> String {
public func toStringTree(_ ruleNames: [String]?) -> String {
return Trees.toStringTree(self, ruleNames)
}
open override func toStringTree() -> String {
let info: Array<String>? = nil
return toStringTree(info)
return toStringTree(nil)
}
open override var description: String {
let p1: Array<String>? = nil
let p2: RuleContext? = nil
return toString(p1, p2)
return toString(nil, nil)
}
open override var debugDescription: String {
@ -237,31 +191,31 @@ open class RuleContext: RuleNode {
return toString(recog, ParserRuleContext.EMPTY)
}
public final func toString(_ ruleNames: Array<String>) -> String {
public final func toString(_ ruleNames: [String]) -> String {
return toString(ruleNames, nil)
}
// recog null unless ParserRuleContext, in which case we use subclass toString(...)
open func toString<T>(_ recog: Recognizer<T>?, _ stop: RuleContext) -> String {
let ruleNames: [String]? = recog != nil ? recog!.getRuleNames() : nil
let ruleNamesList: Array<String>? = ruleNames ?? nil
return toString(ruleNamesList, stop)
let ruleNames = recog?.getRuleNames()
return toString(ruleNames, stop)
}
open func toString(_ ruleNames: Array<String>?, _ stop: RuleContext?) -> String {
let buf: StringBuilder = StringBuilder()
open func toString(_ ruleNames: [String]?, _ stop: RuleContext?) -> String {
let buf = StringBuilder()
var p: RuleContext? = self
buf.append("[")
while let pWrap = p, pWrap !== stop {
if ruleNames == nil {
if let ruleNames = ruleNames {
let ruleIndex = pWrap.getRuleIndex()
let ruleIndexInRange = (ruleIndex >= 0 && ruleIndex < ruleNames.count)
let ruleName = (ruleIndexInRange ? ruleNames[ruleIndex] : String(ruleIndex))
buf.append(ruleName)
}
else {
if !pWrap.isEmpty() {
buf.append(pWrap.invokingState)
}
} else {
let ruleIndex: Int = pWrap.getRuleIndex()
let ruleIndexInRange: Bool = ruleIndex >= 0 && ruleIndex < ruleNames!.count
let ruleName: String = ruleIndexInRange ? ruleNames![ruleIndex] : String(ruleIndex)
buf.append(ruleName)
}
if pWrap.parent != nil && (ruleNames != nil || !pWrap.parent!.isEmpty()) {

View File

@ -98,5 +98,7 @@ public protocol Token: class, CustomStringConvertible {
///
func getInputStream() -> CharStream?
func getTokenSourceAndStream() -> TokenSourceAndStream
var visited: Bool { get set }
}

View File

@ -8,7 +8,6 @@
/// the error handling strategy (to create missing tokens). Notifying the parser
/// of a new factory means that it notifies it's token source and error strategy.
///
public protocol TokenFactory {
//typealias Symbol
@ -16,10 +15,33 @@ public protocol TokenFactory {
/// error handling strategy. If text!=null, than the start and stop positions
/// are wiped to -1 in the text override is set in the CommonToken.
///
func create(_ source: (TokenSource?, CharStream?), _ type: Int, _ text: String?,
func create(_ source: TokenSourceAndStream, _ type: Int, _ text: String?,
_ channel: Int, _ start: Int, _ stop: Int,
_ line: Int, _ charPositionInLine: Int) -> Token
/// Generically useful
func create(_ type: Int, _ text: String) -> Token
}
/**
Holds the references to the TokenSource and CharStream used to create a Token.
These are together to reduce memory footprint by having one instance of
TokenSourceAndStream shared across many tokens. The references here are weak
to avoid retain cycles.
*/
public class TokenSourceAndStream {
///
/// An empty TokenSourceAndStream which is used as the default value of
/// _#source_ for tokens that do not have a source.
///
public static let EMPTY = TokenSourceAndStream()
public weak var tokenSource: TokenSource?
public weak var stream: CharStream?
public init(_ tokenSource: TokenSource? = nil, _ stream: CharStream? = nil) {
self.tokenSource = tokenSource
self.stream = stream
}
}

View File

@ -0,0 +1,389 @@
/*
* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.
* Use of this file is governed by the BSD 3-clause license that
* can be found in the LICENSE.txt file in the project root.
*/
import Foundation
/** Do not buffer up the entire char stream. It does keep a small buffer
* for efficiency and also buffers while a mark exists (set by the
* lookahead prediction in parser). "Unbuffered" here refers to fact
* that it doesn't buffer all data, not that's it's on demand loading of char.
*
* Before 4.7, this class used the default environment encoding to convert
* bytes to UTF-16, and held the UTF-16 bytes in the buffer as chars.
*
* As of 4.7, the class uses UTF-8 by default, and the buffer holds Unicode
* code points in the buffer as ints.
*/
open class UnbufferedCharStream: CharStream {
private let bufferSize: Int
/**
* A moving window buffer of the data being scanned. While there's a marker,
* we keep adding to buffer. Otherwise, {@link #consume consume()} resets so
* we start filling at index 0 again.
*/
internal var data: [Int]
/**
* The number of characters currently in {@link #data data}.
*
* <p>This is not the buffer capacity, that's {@code data.length}.</p>
*/
internal var n = 0
/**
* 0..n-1 index into {@link #data data} of next character.
*
* <p>The {@code LA(1)} character is {@code data[p]}. If {@code p == n}, we are
* out of buffered characters.</p>
*/
internal var p = 0
/**
* Count up with {@link #mark mark()} and down with
* {@link #release release()}. When we {@code release()} the last mark,
* {@code numMarkers} reaches 0 and we reset the buffer. Copy
* {@code data[p]..data[n-1]} to {@code data[0]..data[(n-1)-p]}.
*/
internal var numMarkers = 0
/**
* This is the {@code LA(-1)} character for the current position.
*/
internal var lastChar = -1
/**
* When {@code numMarkers > 0}, this is the {@code LA(-1)} character for the
* first character in {@link #data data}. Otherwise, this is unspecified.
*/
internal var lastCharBufferStart = 0
/**
* Absolute character index. It's the index of the character about to be
* read via {@code LA(1)}. Goes from 0 to the number of characters in the
* entire stream, although the stream size is unknown before the end is
* reached.
*/
internal var currentCharIndex = 0
internal let input: InputStream
private var unicodeIterator: UnicodeScalarStreamIterator
/** The name or source of this char stream. */
public var name: String = ""
public init(_ input: InputStream, _ bufferSize: Int = 256) {
self.input = input
self.bufferSize = bufferSize
self.data = [Int](repeating: 0, count: bufferSize)
let si = UInt8StreamIterator(input)
self.unicodeIterator = UnicodeScalarStreamIterator(si)
}
public func consume() throws {
if try LA(1) == CommonToken.EOF {
throw ANTLRError.illegalState(msg: "cannot consume EOF")
}
// buf always has at least data[p==0] in this method due to ctor
lastChar = data[p] // track last char for LA(-1)
if p == n - 1 && numMarkers == 0 {
n = 0
p = -1 // p++ will leave this at 0
lastCharBufferStart = lastChar
}
p += 1
currentCharIndex += 1
sync(1)
}
/**
* Make sure we have 'need' elements from current position {@link #p p}.
* Last valid {@code p} index is {@code data.length-1}. {@code p+need-1} is
* the char index 'need' elements ahead. If we need 1 element,
* {@code (p+1-1)==p} must be less than {@code data.length}.
*/
internal func sync(_ want: Int) {
let need = (p + want - 1) - n + 1 // how many more elements we need?
if need > 0 {
fill(need)
}
}
/**
* Add {@code n} characters to the buffer. Returns the number of characters
* actually added to the buffer. If the return value is less than {@code n},
* then EOF was reached before {@code n} characters could be added.
*/
@discardableResult internal func fill(_ toAdd: Int) -> Int {
for i in 0 ..< toAdd {
if n > 0 && data[n - 1] == CommonToken.EOF {
return i
}
guard let c = nextChar() else {
return i
}
add(c)
}
return n
}
/**
* Override to provide different source of characters than
* {@link #input input}.
*/
internal func nextChar() -> Int? {
if let next = unicodeIterator.next() {
return Int(next.value)
}
else if unicodeIterator.hasErrorOccurred {
return nil
}
else {
return nil
}
}
internal func add(_ c: Int) {
if n >= data.count {
data += [Int](repeating: 0, count: data.count)
}
data[n] = c
n += 1
}
public func LA(_ i: Int) throws -> Int {
if i == -1 {
return lastChar // special case
}
sync(i)
let index = p + i - 1
if index < 0 {
throw ANTLRError.indexOutOfBounds(msg: "")
}
if index >= n {
return CommonToken.EOF
}
return data[index]
}
/**
* Return a marker that we can release later.
*
* <p>The specific marker value used for this class allows for some level of
* protection against misuse where {@code seek()} is called on a mark or
* {@code release()} is called in the wrong order.</p>
*/
public func mark() -> Int {
if numMarkers == 0 {
lastCharBufferStart = lastChar
}
let mark = -numMarkers - 1
numMarkers += 1
return mark
}
/** Decrement number of markers, resetting buffer if we hit 0.
* @param marker
*/
public func release(_ marker: Int) throws {
let expectedMark = -numMarkers
if marker != expectedMark {
preconditionFailure("release() called with an invalid marker.")
}
numMarkers -= 1
if numMarkers == 0 && p > 0 {
// release buffer when we can, but don't do unnecessary work
// Copy data[p]..data[n-1] to data[0]..data[(n-1)-p], reset ptrs
// p is last valid char; move nothing if p==n as we have no valid char
if p == n {
if data.count != bufferSize {
data = [Int](repeating: 0, count: bufferSize)
}
n = 0
}
else {
data = Array(data[p ..< n])
n -= p
}
p = 0
lastCharBufferStart = lastChar
}
}
public func index() -> Int {
return currentCharIndex
}
/** Seek to absolute character index, which might not be in the current
* sliding window. Move {@code p} to {@code index-bufferStartIndex}.
*/
public func seek(_ index_: Int) throws {
var index = index_
if index == currentCharIndex {
return
}
if index > currentCharIndex {
sync(index - currentCharIndex)
index = min(index, getBufferStartIndex() + n - 1)
}
// index == to bufferStartIndex should set p to 0
let i = index - getBufferStartIndex()
if i < 0 {
throw ANTLRError.illegalArgument(msg: "cannot seek to negative index \(index)")
}
else if i >= n {
let si = getBufferStartIndex()
let ei = si + n
let msg = "seek to index outside buffer: \(index) not in \(si)..\(ei)"
throw ANTLRError.unsupportedOperation(msg: msg)
}
p = i
currentCharIndex = index
if p == 0 {
lastChar = lastCharBufferStart
}
else {
lastChar = data[p - 1]
}
}
public func size() -> Int {
preconditionFailure("Unbuffered stream cannot know its size")
}
public func getSourceName() -> String {
return name
}
public func getText(_ interval: Interval) throws -> String {
if interval.a < 0 || interval.b < interval.a - 1 {
throw ANTLRError.illegalArgument(msg: "invalid interval")
}
let bufferStartIndex = getBufferStartIndex()
if n > 0 &&
data[n - 1] == CommonToken.EOF &&
interval.a + interval.length() > bufferStartIndex + n {
throw ANTLRError.illegalArgument(msg: "the interval extends past the end of the stream")
}
if interval.a < bufferStartIndex || interval.b >= bufferStartIndex + n {
let msg = "interval \(interval) outside buffer: \(bufferStartIndex)...\(bufferStartIndex + n - 1)"
throw ANTLRError.unsupportedOperation(msg: msg)
}
if interval.b < interval.a {
// The EOF token.
return ""
}
// convert from absolute to local index
let i = interval.a - bufferStartIndex
let j = interval.b - bufferStartIndex
// Convert from Int codepoints to a String.
let codepoints = data[i ... j].map { Character(Unicode.Scalar($0)!) }
return String(codepoints)
}
internal func getBufferStartIndex() -> Int {
return currentCharIndex - p
}
}
fileprivate struct UInt8StreamIterator: IteratorProtocol {
private static let bufferSize = 1024
private let stream: InputStream
private var buffer = [UInt8](repeating: 0, count: UInt8StreamIterator.bufferSize)
private var buffGen: IndexingIterator<ArraySlice<UInt8>>
var hasErrorOccurred = false
init(_ stream: InputStream) {
self.stream = stream
self.buffGen = buffer[0..<0].makeIterator()
}
mutating func next() -> UInt8? {
if let result = buffGen.next() {
return result
}
if hasErrorOccurred {
return nil
}
switch stream.streamStatus {
case .notOpen, .writing, .closed:
preconditionFailure()
case .atEnd:
return nil
case .error:
hasErrorOccurred = true
return nil
case .opening, .open, .reading:
break
}
let count = stream.read(&buffer, maxLength: buffer.count)
if count < 0 {
hasErrorOccurred = true
return nil
}
else if count == 0 {
return nil
}
buffGen = buffer.prefix(count).makeIterator()
return buffGen.next()
}
}
fileprivate struct UnicodeScalarStreamIterator: IteratorProtocol {
private var streamIterator: UInt8StreamIterator
private var codec = Unicode.UTF8()
var hasErrorOccurred = false
init(_ streamIterator: UInt8StreamIterator) {
self.streamIterator = streamIterator
}
mutating func next() -> Unicode.Scalar? {
if streamIterator.hasErrorOccurred {
hasErrorOccurred = true
return nil
}
switch codec.decode(&streamIterator) {
case .scalarValue(let scalar):
return scalar
case .emptyInput:
return nil
case .error:
hasErrorOccurred = true
return nil
}
}
}

View File

@ -4,7 +4,7 @@
*/
public class UnbufferedTokenStream<T>: TokenStream {
public class UnbufferedTokenStream: TokenStream {
internal var tokenSource: TokenSource
///
@ -274,10 +274,7 @@ public class UnbufferedTokenStream<T>: TokenStream {
public func size() -> Int {
RuntimeException("Unbuffered stream cannot know its size")
fatalError()
fatalError("Unbuffered stream cannot know its size")
}

View File

@ -73,9 +73,9 @@ public class ATN {
/// the rule surrounding `s`. In other words, the set will be
/// restricted to tokens reachable staying within `s`'s rule.
///
public func nextTokens(_ s: ATNState, _ ctx: RuleContext?)throws -> IntervalSet {
let anal: LL1Analyzer = LL1Analyzer(self)
let next: IntervalSet = try anal.LOOK(s, ctx)
public func nextTokens(_ s: ATNState, _ ctx: RuleContext?) -> IntervalSet {
let anal = LL1Analyzer(self)
let next = anal.LOOK(s, ctx)
return next
}
@ -84,14 +84,14 @@ public class ATN {
/// staying in same rule. _org.antlr.v4.runtime.Token#EPSILON_ is in set if we reach end of
/// rule.
///
public func nextTokens(_ s: ATNState) throws -> IntervalSet {
public func nextTokens(_ s: ATNState) -> IntervalSet {
if let nextTokenWithinRule = s.nextTokenWithinRule
{
return nextTokenWithinRule
}
let intervalSet = try nextTokens(s, nil)
let intervalSet = nextTokens(s, nil)
s.nextTokenWithinRule = intervalSet
try intervalSet.setReadonly(true)
try! intervalSet.setReadonly(true)
return intervalSet
}
@ -151,27 +151,27 @@ public class ATN {
}
var ctx: RuleContext? = context
let s: ATNState = states[stateNumber]!
var following: IntervalSet = try nextTokens(s)
let s = states[stateNumber]!
var following = nextTokens(s)
if !following.contains(CommonToken.EPSILON) {
return following
}
let expected: IntervalSet = try IntervalSet()
try expected.addAll(following)
try expected.remove(CommonToken.EPSILON)
let expected = IntervalSet()
try! expected.addAll(following)
try! expected.remove(CommonToken.EPSILON)
while let ctxWrap = ctx, ctxWrap.invokingState >= 0 && following.contains(CommonToken.EPSILON) {
let invokingState: ATNState = states[ctxWrap.invokingState]!
let rt: RuleTransition = invokingState.transition(0) as! RuleTransition
following = try nextTokens(rt.followState)
try expected.addAll(following)
try expected.remove(CommonToken.EPSILON)
let invokingState = states[ctxWrap.invokingState]!
let rt = invokingState.transition(0) as! RuleTransition
following = nextTokens(rt.followState)
try! expected.addAll(following)
try! expected.remove(CommonToken.EPSILON)
ctx = ctxWrap.parent
}
if following.contains(CommonToken.EPSILON) {
try expected.add(CommonToken.EOF)
try! expected.add(CommonToken.EOF)
}
return expected

View File

@ -150,13 +150,12 @@ public class ATNConfig: Hashable, CustomStringConvertible {
///
public var hashValue: Int {
var hashCode: Int = MurmurHash.initialize(7)
var hashCode = MurmurHash.initialize(7)
hashCode = MurmurHash.update(hashCode, state.stateNumber)
hashCode = MurmurHash.update(hashCode, alt)
hashCode = MurmurHash.update(hashCode, context)
hashCode = MurmurHash.update(hashCode, semanticContext)
hashCode = MurmurHash.finish(hashCode, 4)
return hashCode
return MurmurHash.finish(hashCode, 4)
}

View File

@ -27,7 +27,7 @@ public class ATNConfigSet: Hashable, CustomStringConvertible {
/// fields; in particular, conflictingAlts is set after
/// we've made this readonly.
///
internal final var readonly: Bool = false
internal final var readonly = false
///
/// All configs but hashed by (s, i, _, pi) not including context. Wiped out
@ -38,11 +38,11 @@ public class ATNConfigSet: Hashable, CustomStringConvertible {
///
/// Track the elements as they are added to the set; supports get(i)
///
public final var configs: Array<ATNConfig> = Array<ATNConfig>()
public final var configs = [ATNConfig]()
// TODO: these fields make me pretty uncomfortable but nice to pack up info together, saves recomputation
// TODO: can we track conflicts as they are added to save scanning configs later?
public final var uniqueAlt: Int = 0
public final var uniqueAlt = 0
//TODO no default
///
/// Currently this is only used when we detect SLL conflict; this does
@ -54,9 +54,9 @@ public class ATNConfigSet: Hashable, CustomStringConvertible {
// Used in parser and lexer. In lexer, it indicates we hit a pred
// while computing a closure operation. Don't make a DFA state from this.
public final var hasSemanticContext: Bool = false
public final var hasSemanticContext = false
//TODO no default
public final var dipsIntoOuterContext: Bool = false
public final var dipsIntoOuterContext = false
//TODO no default
///
@ -66,7 +66,7 @@ public class ATNConfigSet: Hashable, CustomStringConvertible {
///
public final var fullCtx: Bool
private var cachedHashCode: Int = -1
private var cachedHashCode = -1
public init(_ fullCtx: Bool) {
configLookup = LookupDictionary()
@ -76,9 +76,9 @@ public class ATNConfigSet: Hashable, CustomStringConvertible {
self.init(true)
}
public convenience init(_ old: ATNConfigSet) throws {
public convenience init(_ old: ATNConfigSet) {
self.init(old.fullCtx)
try addAll(old)
try! addAll(old)
self.uniqueAlt = old.uniqueAlt
self.conflictingAlts = old.conflictingAlts
self.hasSemanticContext = old.hasSemanticContext
@ -108,7 +108,6 @@ public class ATNConfigSet: Hashable, CustomStringConvertible {
_ mergeCache: inout DoubleKeyMap<PredictionContext, PredictionContext, PredictionContext>?) throws -> Bool {
if readonly {
throw ANTLRError.illegalState(msg: "This set is readonly")
}
if config.semanticContext != SemanticContext.NONE {
@ -125,10 +124,9 @@ public class ATNConfigSet: Hashable, CustomStringConvertible {
return true
}
// a previous (s,i,pi,_), merge with it and save result
let rootIsWildcard: Bool = !fullCtx
let rootIsWildcard = !fullCtx
let merged: PredictionContext =
PredictionContext.merge(existing.context!, config.context!, rootIsWildcard, &mergeCache)
let merged = PredictionContext.merge(existing.context!, config.context!, rootIsWildcard, &mergeCache)
// no need to check for existing.context, config.context in cache
// since only way to create new graphs is "call rule" and here. We
@ -154,16 +152,14 @@ public class ATNConfigSet: Hashable, CustomStringConvertible {
///
/// Return a List holding list of configs
///
public final func elements() -> Array<ATNConfig> {
public final func elements() -> [ATNConfig] {
return configs
}
public final func getStates() -> Set<ATNState> {
let length = configs.count
var states: Set<ATNState> = Set<ATNState>(minimumCapacity: length)
for i in 0..<length {
states.insert(configs[i].state)
var states = Set<ATNState>(minimumCapacity: configs.count)
for config in configs {
states.insert(config.state)
}
return states
}
@ -176,21 +172,19 @@ public class ATNConfigSet: Hashable, CustomStringConvertible {
///
/// - since: 4.3
///
public final func getAlts() throws -> BitSet {
let alts: BitSet = BitSet()
let length = configs.count
for i in 0..<length {
try alts.set(configs[i].alt)
public final func getAlts() -> BitSet {
let alts = BitSet()
for config in configs {
try! alts.set(config.alt)
}
return alts
}
public final func getPredicates() -> Array<SemanticContext> {
var preds: Array<SemanticContext> = Array<SemanticContext>()
let length = configs.count
for i in 0..<length {
if configs[i].semanticContext != SemanticContext.NONE {
preds.append(configs[i].semanticContext)
public final func getPredicates() -> [SemanticContext] {
var preds = [SemanticContext]()
for config in configs {
if config.semanticContext != SemanticContext.NONE {
preds.append(config.semanticContext)
}
}
return preds
@ -203,21 +197,19 @@ public class ATNConfigSet: Hashable, CustomStringConvertible {
public final func optimizeConfigs(_ interpreter: ATNSimulator) throws {
if readonly {
throw ANTLRError.illegalState(msg: "This set is readonly")
}
if configLookup.isEmpty {
return
}
let length = configs.count
for i in 0..<length {
configs[i].context = interpreter.getCachedContext(configs[i].context!)
for config in configs {
config.context = interpreter.getCachedContext(config.context!)
}
}
@discardableResult
public final func addAll(_ coll: ATNConfigSet) throws -> Bool {
for c: ATNConfig in coll.configs {
for c in coll.configs {
try add(c)
}
return false
@ -266,7 +258,6 @@ public class ATNConfigSet: Hashable, CustomStringConvertible {
public final func clear() throws {
if readonly {
throw ANTLRError.illegalState(msg: "This set is readonly")
}
configs.removeAll()
cachedHashCode = -1
@ -284,7 +275,7 @@ public class ATNConfigSet: Hashable, CustomStringConvertible {
}
public var description: String {
let buf: StringBuilder = StringBuilder()
let buf = StringBuilder()
buf.append(elements().map({ $0.description }))
if hasSemanticContext {
buf.append(",hasSemanticContext=")
@ -313,18 +304,15 @@ public class ATNConfigSet: Hashable, CustomStringConvertible {
/// return configLookup.toArray(a);
///
private final func configHash(_ stateNumber: Int,_ context: PredictionContext?) -> Int{
var hashCode: Int = MurmurHash.initialize(7)
var hashCode = MurmurHash.initialize(7)
hashCode = MurmurHash.update(hashCode, stateNumber)
hashCode = MurmurHash.update(hashCode, context)
hashCode = MurmurHash.finish(hashCode, 2)
return hashCode
return MurmurHash.finish(hashCode, 2)
}
public final func getConflictingAltSubsets() throws -> Array<BitSet> {
public final func getConflictingAltSubsets() -> [BitSet] {
let length = configs.count
let configToAlts: HashMap<Int, BitSet> = HashMap<Int, BitSet>(count: length)
let configToAlts = HashMap<Int, BitSet>(count: length)
for i in 0..<length {
let hash = configHash(configs[i].state.stateNumber, configs[i].context)
@ -336,15 +324,15 @@ public class ATNConfigSet: Hashable, CustomStringConvertible {
configToAlts[hash] = alts
}
try alts.set(configs[i].alt)
try! alts.set(configs[i].alt)
}
return configToAlts.values
}
public final func getStateToAltMap() throws -> HashMap<ATNState, BitSet> {
public final func getStateToAltMap() -> HashMap<ATNState, BitSet> {
let length = configs.count
let m: HashMap<ATNState, BitSet> = HashMap<ATNState, BitSet>(count: length) //minimumCapacity: length)
let m = HashMap<ATNState, BitSet>(count: length)
for i in 0..<length {
var alts: BitSet
@ -355,42 +343,37 @@ public class ATNConfigSet: Hashable, CustomStringConvertible {
m[configs[i].state] = alts
}
try alts.set(configs[i].alt)
try! alts.set(configs[i].alt)
}
return m
}
//for DFAState
public final func getAltSet() -> Set<Int>? {
var alts: Set<Int> = Set<Int>()
let length = configs.count
for i in 0..<length {
alts.insert(configs[i].alt)
}
if alts.isEmpty {
if configs.isEmpty {
return nil
}
var alts = Set<Int>()
for config in configs {
alts.insert(config.alt)
}
return alts
}
//for DiagnosticErrorListener
public final func getAltBitSet() throws -> BitSet {
let result: BitSet = BitSet()
let length = configs.count
for i in 0..<length {
try result.set(configs[i].alt)
public final func getAltBitSet() -> BitSet {
let result = BitSet()
for config in configs {
try! result.set(config.alt)
}
return result
}
//LexerATNSimulator
public final var firstConfigWithRuleStopState: ATNConfig? {
let length = configs.count
for i in 0..<length {
if configs[i].state is RuleStopState {
return configs[i]
for config in configs {
if config.state is RuleStopState {
return config
}
}
@ -400,110 +383,104 @@ public class ATNConfigSet: Hashable, CustomStringConvertible {
//ParserATNSimulator
public final func getUniqueAlt() -> Int {
var alt: Int = ATN.INVALID_ALT_NUMBER
let length = configs.count
for i in 0..<length {
var alt = ATN.INVALID_ALT_NUMBER
for config in configs {
if alt == ATN.INVALID_ALT_NUMBER {
alt = configs[i].alt // found first alt
} else {
if configs[i].alt != alt {
alt = config.alt // found first alt
} else if config.alt != alt {
return ATN.INVALID_ALT_NUMBER
}
}
}
return alt
}
public final func removeAllConfigsNotInRuleStopState(_ mergeCache: inout DoubleKeyMap<PredictionContext, PredictionContext, PredictionContext>?,_ lookToEndOfRule: Bool,_ atn: ATN) throws -> ATNConfigSet {
public final func removeAllConfigsNotInRuleStopState(_ mergeCache: inout DoubleKeyMap<PredictionContext, PredictionContext, PredictionContext>?,_ lookToEndOfRule: Bool,_ atn: ATN) -> ATNConfigSet {
if PredictionMode.allConfigsInRuleStopStates(self) {
return self
}
let result: ATNConfigSet = ATNConfigSet(fullCtx)
let length = configs.count
for i in 0..<length {
if configs[i].state is RuleStopState {
try result.add(configs[i],&mergeCache)
let result = ATNConfigSet(fullCtx)
for config in configs {
if config.state is RuleStopState {
try! result.add(config, &mergeCache)
continue
}
if lookToEndOfRule && configs[i].state.onlyHasEpsilonTransitions() {
let nextTokens: IntervalSet = try atn.nextTokens(configs[i].state)
if lookToEndOfRule && config.state.onlyHasEpsilonTransitions() {
let nextTokens = atn.nextTokens(config.state)
if nextTokens.contains(CommonToken.EPSILON) {
let endOfRuleState: ATNState = atn.ruleToStopState[configs[i].state.ruleIndex!]
try result.add(ATNConfig(configs[i], endOfRuleState), &mergeCache)
let endOfRuleState = atn.ruleToStopState[config.state.ruleIndex!]
try! result.add(ATNConfig(config, endOfRuleState), &mergeCache)
}
}
}
return result
}
public final func applyPrecedenceFilter(_ mergeCache: inout DoubleKeyMap<PredictionContext, PredictionContext, PredictionContext>?,_ parser: Parser,_ _outerContext: ParserRuleContext!) throws -> ATNConfigSet {
let configSet: ATNConfigSet = ATNConfigSet(fullCtx)
let length = configs.count
let statesFromAlt1: HashMap<Int, PredictionContext> = HashMap<Int, PredictionContext>(count: length)
for i in 0..<length {
let configSet = ATNConfigSet(fullCtx)
let statesFromAlt1 = HashMap<Int, PredictionContext>(count: configs.count)
for config in configs {
// handle alt 1 first
if configs[i].alt != 1 {
if config.alt != 1 {
continue
}
let updatedContext: SemanticContext? = try configs[i].semanticContext.evalPrecedence(parser, _outerContext)
let updatedContext = try config.semanticContext.evalPrecedence(parser, _outerContext)
if updatedContext == nil {
// the configuration was eliminated
continue
}
statesFromAlt1[configs[i].state.stateNumber] = configs[i].context
if updatedContext != configs[i].semanticContext {
try configSet.add(ATNConfig(configs[i], updatedContext!), &mergeCache)
statesFromAlt1[config.state.stateNumber] = config.context
if updatedContext != config.semanticContext {
try! configSet.add(ATNConfig(config, updatedContext!), &mergeCache)
} else {
try configSet.add(configs[i],&mergeCache)
try! configSet.add(config, &mergeCache)
}
}
for i in 0..<length {
if configs[i].alt == 1 {
for config in configs {
if config.alt == 1 {
// already handled
continue
}
if !configs[i].isPrecedenceFilterSuppressed() {
if !config.isPrecedenceFilterSuppressed() {
///
/// In the future, this elimination step could be updated to also
/// filter the prediction context for alternatives predicting alt>1
/// (basically a graph subtraction algorithm).
///
let context: PredictionContext? = statesFromAlt1[configs[i].state.stateNumber]
if context != nil && context == configs[i].context {
let context = statesFromAlt1[config.state.stateNumber]
if context != nil && context == config.context {
// eliminated
continue
}
}
try configSet.add(configs[i], &mergeCache)
try! configSet.add(config, &mergeCache)
}
return configSet
}
internal func getPredsForAmbigAlts(_ ambigAlts: BitSet,
_ nalts: Int) throws -> [SemanticContext?]? {
var altToPred: [SemanticContext?]? = [SemanticContext?](repeating: nil, count: nalts + 1) //new SemanticContext[nalts + 1];
let length = configs.count
for i in 0..<length {
if try ambigAlts.get(configs[i].alt) {
altToPred![configs[i].alt] = SemanticContext.or(altToPred![configs[i].alt], configs[i].semanticContext)
internal func getPredsForAmbigAlts(_ ambigAlts: BitSet, _ nalts: Int) -> [SemanticContext?]? {
var altToPred = [SemanticContext?](repeating: nil, count: nalts + 1)
for config in configs {
if try! ambigAlts.get(config.alt) {
altToPred[config.alt] = SemanticContext.or(altToPred[config.alt], config.semanticContext)
}
}
var nPredAlts: Int = 0
var nPredAlts = 0
for i in 1...nalts {
if altToPred![i] == nil {
altToPred![i] = SemanticContext.NONE
} else {
if altToPred![i] != SemanticContext.NONE {
nPredAlts += 1
if altToPred[i] == nil {
altToPred[i] = SemanticContext.NONE
}
else if altToPred[i] != SemanticContext.NONE {
nPredAlts += 1
}
}
@ -513,21 +490,16 @@ public class ATNConfigSet: Hashable, CustomStringConvertible {
// }
// nonambig alts are null in altToPred
if nPredAlts == 0 {
altToPred = nil
return (nPredAlts == 0 ? nil : altToPred)
}
return altToPred
}
public final func getAltThatFinishedDecisionEntryRule() throws -> Int {
let alts: IntervalSet = try IntervalSet()
let length = configs.count
for i in 0..<length {
if configs[i].getOuterContextDepth() > 0 ||
(configs[i].state is RuleStopState &&
configs[i].context!.hasEmptyPath()) {
try alts.add(configs[i].alt)
public final func getAltThatFinishedDecisionEntryRule() -> Int {
let alts = IntervalSet()
for config in configs {
if config.getOuterContextDepth() > 0 ||
(config.state is RuleStopState &&
config.context!.hasEmptyPath()) {
try! alts.add(config.alt)
}
}
if alts.size() == 0 {
@ -548,39 +520,36 @@ public class ATNConfigSet: Hashable, CustomStringConvertible {
///
public final func splitAccordingToSemanticValidity(
_ outerContext: ParserRuleContext,
_ evalSemanticContext:( SemanticContext,ParserRuleContext,Int,Bool) throws -> Bool) throws -> (ATNConfigSet, ATNConfigSet) {
let succeeded: ATNConfigSet = ATNConfigSet(fullCtx)
let failed: ATNConfigSet = ATNConfigSet(fullCtx)
let length = configs.count
for i in 0..<length {
if configs[i].semanticContext != SemanticContext.NONE {
let predicateEvaluationResult: Bool = try evalSemanticContext(configs[i].semanticContext, outerContext, configs[i].alt,fullCtx)
_ evalSemanticContext: (SemanticContext, ParserRuleContext, Int, Bool) throws -> Bool) rethrows -> (ATNConfigSet, ATNConfigSet) {
let succeeded = ATNConfigSet(fullCtx)
let failed = ATNConfigSet(fullCtx)
for config in configs {
if config.semanticContext != SemanticContext.NONE {
let predicateEvaluationResult = try evalSemanticContext(config.semanticContext, outerContext, config.alt,fullCtx)
if predicateEvaluationResult {
try succeeded.add(configs[i])
try! succeeded.add(config)
} else {
try failed.add(configs[i])
try! failed.add(config)
}
} else {
try succeeded.add(configs[i])
try! succeeded.add(config)
}
}
return (succeeded, failed)
}
//public enum PredictionMode
public final func dupConfigsWithoutSemanticPredicates() throws -> ATNConfigSet {
let dup: ATNConfigSet = ATNConfigSet()
let length = configs.count
for i in 0..<length {
let c = ATNConfig(configs[i], SemanticContext.NONE)
try dup.add(c)
public final func dupConfigsWithoutSemanticPredicates() -> ATNConfigSet {
let dup = ATNConfigSet()
for config in configs {
let c = ATNConfig(config, SemanticContext.NONE)
try! dup.add(c)
}
return dup
}
public final var hasConfigInRuleStopState: Bool {
let length = configs.count
for i in 0..<length {
if configs[i].state is RuleStopState {
for config in configs {
if config.state is RuleStopState {
return true
}
}
@ -589,9 +558,8 @@ public class ATNConfigSet: Hashable, CustomStringConvertible {
}
public final var allConfigsInRuleStopStates: Bool {
let length = configs.count
for i in 0..<length {
if !(configs[i].state is RuleStopState) {
for config in configs {
if !(config.state is RuleStopState) {
return false
}
}

View File

@ -117,26 +117,24 @@ public class ATNDeserializer {
}
var p: Int = 0
let version: Int = data[p].unicodeValue //toInt(data[p++]);
let version = data[p].unicodeValue
p += 1
if version != ATNDeserializer.SERIALIZED_VERSION {
let reason: String = "Could not deserialize ATN with version \(version) (expected \(ATNDeserializer.SERIALIZED_VERSION))."
let reason = "Could not deserialize ATN with version \(version) (expected \(ATNDeserializer.SERIALIZED_VERSION))."
throw ANTLRError.unsupportedOperation(msg: reason)
}
let uuid: UUID = toUUID(data, p)
p += 8
if !ATNDeserializer.SUPPORTED_UUIDS.contains(uuid) {
let reason: String = "Could not deserialize ATN with UUID \(uuid) (expected \(ATNDeserializer.SERIALIZED_UUID) or a legacy UUID)."
let reason = "Could not deserialize ATN with UUID \(uuid) (expected \(ATNDeserializer.SERIALIZED_UUID) or a legacy UUID)."
throw ANTLRError.unsupportedOperation(msg: reason)
}
let supportsPrecedencePredicates: Bool = isFeatureSupported(ATNDeserializer.ADDED_PRECEDENCE_TRANSITIONS, uuid)
let supportsLexerActions: Bool = isFeatureSupported(ATNDeserializer.ADDED_LEXER_ACTIONS, uuid)
let supportsPrecedencePredicates = isFeatureSupported(ATNDeserializer.ADDED_PRECEDENCE_TRANSITIONS, uuid)
let supportsLexerActions = isFeatureSupported(ATNDeserializer.ADDED_LEXER_ACTIONS, uuid)
let grammarType: ATNType = ATNType(rawValue: toInt(data[p]))!
let grammarType = ATNType(rawValue: toInt(data[p]))!
p += 1
let maxTokenType: Int = toInt(data[p])
p += 1
@ -145,12 +143,12 @@ public class ATNDeserializer {
//
// STATES
//
var loopBackStateNumbers: Array<(LoopEndState, Int)> = Array<(LoopEndState, Int)>()
var endStateNumbers: Array<(BlockStartState, Int)> = Array<(BlockStartState, Int)>()
let nstates: Int = toInt(data[p])
var loopBackStateNumbers = [(LoopEndState, Int)]()
var endStateNumbers = [(BlockStartState, Int)]()
let nstates = toInt(data[p])
p += 1
for _ in 0..<nstates {
let stype: Int = toInt(data[p])
let stype = toInt(data[p])
p += 1
// ignore bad type of states
if stype == ATNState.INVALID_TYPE {
@ -158,26 +156,24 @@ public class ATNDeserializer {
continue
}
var ruleIndex: Int = toInt(data[p])
var ruleIndex = toInt(data[p])
p += 1
if ruleIndex == Int.max {
// Character.MAX_VALUE
ruleIndex = -1
}
let s: ATNState = try stateFactory(stype, ruleIndex)!
let s = try stateFactory(stype, ruleIndex)!
if stype == ATNState.LOOP_END {
// special case
let loopBackStateNumber: Int = toInt(data[p])
let loopBackStateNumber = toInt(data[p])
p += 1
loopBackStateNumbers.append((s as! LoopEndState, loopBackStateNumber))
} else {
if let s = s as? BlockStartState {
let endStateNumber: Int = toInt(data[p])
} else if let s = s as? BlockStartState {
let endStateNumber = toInt(data[p])
p += 1
endStateNumbers.append((s, endStateNumber))
}
}
atn.addState(s)
}
@ -272,12 +268,12 @@ public class ATNDeserializer {
var sets: Array<IntervalSet> = Array<IntervalSet>()
// First, deserialize sets with 16-bit arguments <= U+FFFF.
try readSets(data, &p, &sets, readUnicodeInt)
readSets(data, &p, &sets, readUnicodeInt)
// Next, if the ATN was serialized with the Unicode SMP feature,
// deserialize sets with 32-bit arguments <= U+10FFFF.
if isFeatureSupported(ATNDeserializer.ADDED_UNICODE_SMP, uuid) {
try readSets(data, &p, &sets, readUnicodeInt32)
readSets(data, &p, &sets, readUnicodeInt32)
}
//
@ -548,23 +544,23 @@ public class ATNDeserializer {
return result
}
private func readSets(_ data: [Character], _ p: inout Int, _ sets: inout Array<IntervalSet>, _ readUnicode: ([Character], inout Int) -> Int) throws {
let nsets: Int = toInt(data[p])
private func readSets(_ data: [Character], _ p: inout Int, _ sets: inout Array<IntervalSet>, _ readUnicode: ([Character], inout Int) -> Int) {
let nsets = toInt(data[p])
p += 1
for _ in 0..<nsets {
let nintervals: Int = toInt(data[p])
let nintervals = toInt(data[p])
p += 1
let set: IntervalSet = try IntervalSet()
let set = IntervalSet()
sets.append(set)
let containsEof: Bool = toInt(data[p]) != 0
let containsEof = (toInt(data[p]) != 0)
p += 1
if containsEof {
try set.add(-1)
try! set.add(-1)
}
for _ in 0..<nintervals {
try set.add(readUnicode(data, &p), readUnicode(data, &p))
try! set.add(readUnicode(data, &p), readUnicode(data, &p))
}
}
}
@ -724,27 +720,27 @@ public class ATNDeserializer {
//
// SETS
//
var sets: Array<IntervalSet> = Array<IntervalSet>()
let nsets: Int = dict["nsets"] as! Int
var sets = [IntervalSet]()
let nsets = dict["nsets"] as! Int
let intervalSet = dict["IntervalSet"] as! [Dictionary<String, Any>]
for i in 0..<nsets {
let setBuilder = intervalSet[i]
let nintervals: Int = setBuilder["size"] as! Int
let nintervals = setBuilder["size"] as! Int
let set: IntervalSet = try IntervalSet()
let set = IntervalSet()
sets.append(set)
let containsEof: Bool = (setBuilder["containsEof"] as! Int) != 0
let containsEof = (setBuilder["containsEof"] as! Int) != 0
if containsEof {
try set.add(-1)
try! set.add(-1)
}
let intervalsBuilder = setBuilder["Intervals"] as! [Dictionary<String, Any>]
let intervalsBuilder = setBuilder["Intervals"] as! [[String : Any]]
for j in 0..<nintervals {
let vals = intervalsBuilder[j]
try set.add((vals["a"] as! Int), (vals["b"] as! Int))
try! set.add((vals["a"] as! Int), (vals["b"] as! Int))
}
}
@ -759,15 +755,15 @@ public class ATNDeserializer {
for transitionsBuilder in allTransitions {
for transition in transitionsBuilder {
let src: Int = transition["src"] as! Int
let trg: Int = transition["trg"] as! Int
let ttype: Int = transition["edgeType"] as! Int
let arg1: Int = transition["arg1"] as! Int
let arg2: Int = transition["arg2"] as! Int
let arg3: Int = transition["arg3"] as! Int
let trans: Transition = try edgeFactory(atn, ttype, src, trg, arg1, arg2, arg3, sets)
let src = transition["src"] as! Int
let trg = transition["trg"] as! Int
let ttype = transition["edgeType"] as! Int
let arg1 = transition["arg1"] as! Int
let arg2 = transition["arg2"] as! Int
let arg3 = transition["arg3"] as! Int
let trans = try edgeFactory(atn, ttype, src, trg, arg1, arg2, arg3, sets)
let srcState: ATNState = atn.states[src]!
let srcState = atn.states[src]!
srcState.addTransition(trans)
}
@ -775,13 +771,13 @@ public class ATNDeserializer {
// edges for rule stop states can be derived, so they aren't serialized
for state: ATNState? in atn.states {
for state in atn.states {
if let state = state {
let length = state.getNumberOfTransitions()
for i in 0..<length {
let t: Transition = state.transition(i)
let t = state.transition(i)
if let ruleTransition = t as? RuleTransition {
var outermostPrecedenceReturn: Int = -1
var outermostPrecedenceReturn = -1
if let targetRuleIndex = ruleTransition.target.ruleIndex {
if atn.ruleToStartState[targetRuleIndex].isPrecedenceRule {
if ruleTransition.precedence == 0 {
@ -789,7 +785,7 @@ public class ATNDeserializer {
}
}
let returnTransition: EpsilonTransition = EpsilonTransition(ruleTransition.followState, outermostPrecedenceReturn)
let returnTransition = EpsilonTransition(ruleTransition.followState, outermostPrecedenceReturn)
atn.ruleToStopState[targetRuleIndex].addTransition(returnTransition)
}
}
@ -797,39 +793,36 @@ public class ATNDeserializer {
}
}
for state: ATNState? in atn.states {
for state in atn.states {
if let state = state as? BlockStartState {
// we need to know the end state to set its start state
if let stateEndState = state.endState {
// block end states can only be associated to a single block start state
if stateEndState.startState != nil {
throw ANTLRError.illegalState(msg: "state.endState.startState != nil")
}
stateEndState.startState = state
}
else {
throw ANTLRError.illegalState(msg: "state.endState == nil")
}
}
if let loopbackState = state as? PlusLoopbackState {
let length = loopbackState.getNumberOfTransitions()
for i in 0..<length {
let target: ATNState = loopbackState.transition(i).target
if target is PlusBlockStartState {
(target as! PlusBlockStartState).loopBackState = loopbackState
let target = loopbackState.transition(i).target
if let startState = target as? PlusBlockStartState {
startState.loopBackState = loopbackState
}
}
} else {
if let loopbackState = state as? StarLoopbackState {
let length = loopbackState.getNumberOfTransitions()
for i in 0..<length {
let target: ATNState = loopbackState.transition(i).target
if target is StarLoopEntryState {
(target as! StarLoopEntryState).loopBackState = loopbackState
let target = loopbackState.transition(i).target
if let entryState = target as? StarLoopEntryState {
entryState.loopBackState = loopbackState
}
}
}
@ -840,13 +833,12 @@ public class ATNDeserializer {
//
// DECISIONS
//
let ndecisions: [Int] = dict["decisionToState"] as! [Int]
let ndecisions = dict["decisionToState"] as! [Int]
let length = ndecisions.count
for i in 0..<length {
let s: Int = ndecisions[i]
let decState: DecisionState = atn.states[s] as! DecisionState
let s = ndecisions[i]
let decState = atn.states[s] as! DecisionState
atn.appendDecisionToState(decState)
//atn.decisionToState.append(decState)
decState.decision = i
}
@ -854,40 +846,34 @@ public class ATNDeserializer {
// LEXER ACTIONS
//
if atn.grammarType == ATNType.lexer {
let lexerActionsBuilder = dict["lexerActions"] as! [Dictionary<String, Any>]
let lexerActionsBuilder = dict["lexerActions"] as! [[String : Any]]
if supportsLexerActions {
atn.lexerActions = [LexerAction](repeating: LexerAction(), count: lexerActionsBuilder.count) //[toInt(data[p++])];
atn.lexerActions = [LexerAction](repeating: LexerAction(), count: lexerActionsBuilder.count)
let length = atn.lexerActions.count
for i in 0..<length {
let actionTypeValue = lexerActionsBuilder[i]["actionType"] as! Int
let actionType: LexerActionType = LexerActionType(rawValue: actionTypeValue)! //LexerActionType.values()[toInt(data[p++])];
let data1: Int = lexerActionsBuilder[i]["a"] as! Int
let data2: Int = lexerActionsBuilder[i]["b"] as! Int
let lexerAction: LexerAction = lexerActionFactory(actionType, data1, data2)
let actionType = LexerActionType(rawValue: actionTypeValue)!
let data1 = lexerActionsBuilder[i]["a"] as! Int
let data2 = lexerActionsBuilder[i]["b"] as! Int
let lexerAction = lexerActionFactory(actionType, data1, data2)
atn.lexerActions[i] = lexerAction
}
} else {
// for compatibility with older serialized ATNs, convert the old
// serialized action index for action transitions to the new
// form, which is the index of a LexerCustomAction
var legacyLexerActions: Array<LexerAction> = Array<LexerAction>()
for state: ATNState? in atn.states {
var legacyLexerActions = [LexerAction]()
for state in atn.states {
if let state = state {
let length = state.getNumberOfTransitions()
for i in 0..<length {
let transition: Transition = state.transition(i)
if !(transition is ActionTransition) {
guard let transition = state.transition(i) as? ActionTransition else {
continue
}
let ruleIndex: Int = (transition as! ActionTransition).ruleIndex
let actionIndex: Int = (transition as! ActionTransition).actionIndex
let lexerAction: LexerCustomAction = LexerCustomAction(ruleIndex, actionIndex)
let ruleIndex = transition.ruleIndex
let actionIndex = transition.actionIndex
let lexerAction = LexerCustomAction(ruleIndex, actionIndex)
state.setTransition(i, ActionTransition(transition.target, ruleIndex, legacyLexerActions.count, false))
legacyLexerActions.append(lexerAction)
}
@ -912,11 +898,11 @@ public class ATNDeserializer {
}
for i in 0..<length {
let bypassStart: BasicBlockStartState = BasicBlockStartState()
let bypassStart = BasicBlockStartState()
bypassStart.ruleIndex = i
atn.addState(bypassStart)
let bypassStop: BlockEndState = BlockEndState()
let bypassStop = BlockEndState()
bypassStop.ruleIndex = i
atn.addState(bypassStop)
@ -930,7 +916,7 @@ public class ATNDeserializer {
if atn.ruleToStartState[i].isPrecedenceRule {
// wrap from the beginning of the rule to the StarLoopEntryState
endState = nil
for state: ATNState? in atn.states {
for state in atn.states {
if let state = state {
if state.ruleIndex != i {
continue
@ -940,7 +926,7 @@ public class ATNDeserializer {
continue
}
let maybeLoopEndState: ATNState = state.transition(state.getNumberOfTransitions() - 1).target
let maybeLoopEndState = state.transition(state.getNumberOfTransitions() - 1).target
if !(maybeLoopEndState is LoopEndState) {
continue
}
@ -963,9 +949,9 @@ public class ATNDeserializer {
}
// all non-excluded transitions that currently target end state need to target blockEnd instead
for state: ATNState? in atn.states {
for state in atn.states {
if let state = state {
for transition: Transition in state.transitions {
for transition in state.transitions {
if transition === excludeTransition! {
continue
}
@ -979,7 +965,7 @@ public class ATNDeserializer {
// all transitions leaving the rule start state need to leave blockStart instead
while atn.ruleToStartState[i].getNumberOfTransitions() > 0 {
let transition: Transition = atn.ruleToStartState[i].removeTransition(atn.ruleToStartState[i].getNumberOfTransitions() - 1)
let transition = atn.ruleToStartState[i].removeTransition(atn.ruleToStartState[i].getNumberOfTransitions() - 1)
bypassStart.addTransition(transition)
}
@ -987,7 +973,7 @@ public class ATNDeserializer {
atn.ruleToStartState[i].addTransition(EpsilonTransition(bypassStart))
bypassStop.addTransition(EpsilonTransition(endState!))
let matchState: ATNState = BasicState()
let matchState = BasicState()
atn.addState(matchState)
matchState.addTransition(AtomTransition(bypassStop, atn.ruleToTokenType[i]))
bypassStart.addTransition(EpsilonTransition(matchState))
@ -1011,7 +997,7 @@ public class ATNDeserializer {
/// - parameter atn: The ATN.
///
internal func markPrecedenceDecisions(_ atn: ATN) {
for state: ATNState? in atn.states {
for state in atn.states {
if let state = state as? StarLoopEntryState {
///
@ -1021,7 +1007,7 @@ public class ATNDeserializer {
///
if let stateRuleIndex = state.ruleIndex {
if atn.ruleToStartState[stateRuleIndex].isPrecedenceRule {
let maybeLoopEndState: ATNState = state.transition(state.getNumberOfTransitions() - 1).target
let maybeLoopEndState = state.transition(state.getNumberOfTransitions() - 1).target
if maybeLoopEndState is LoopEndState {
if maybeLoopEndState.epsilonOnlyTransitions && maybeLoopEndState.transition(0).target is RuleStopState {
state.precedenceRuleDecision = true
@ -1035,7 +1021,7 @@ public class ATNDeserializer {
internal func verifyATN(_ atn: ATN) throws {
// verify assumptions
for state: ATNState? in atn.states {
for state in atn.states {
guard let state = state else {
continue
}
@ -1084,8 +1070,7 @@ public class ATNDeserializer {
try checkCondition((state as! BlockEndState).startState != nil)
}
if state is DecisionState {
let decisionState: DecisionState = state as! DecisionState
if let decisionState = state as? DecisionState {
try checkCondition(decisionState.getNumberOfTransitions() <= 1 || decisionState.decision >= 0)
} else {
try checkCondition(state.getNumberOfTransitions() <= 1 || state is RuleStopState)
@ -1109,7 +1094,7 @@ public class ATNDeserializer {
_ type: Int, _ src: Int, _ trg: Int,
_ arg1: Int, _ arg2: Int, _ arg3: Int,
_ sets: Array<IntervalSet>) throws -> Transition {
let target: ATNState = atn.states[trg]!
let target = atn.states[trg]!
switch type {
case Transition.EPSILON: return EpsilonTransition(target)
case Transition.RANGE:
@ -1119,10 +1104,10 @@ public class ATNDeserializer {
return RangeTransition(target, arg1, arg2)
}
case Transition.RULE:
let rt: RuleTransition = RuleTransition(atn.states[arg1] as! RuleStartState, arg2, arg3, target)
let rt = RuleTransition(atn.states[arg1] as! RuleStartState, arg2, arg3, target)
return rt
case Transition.PREDICATE:
let pt: PredicateTransition = PredicateTransition(target, arg1, arg2, arg3 != 0)
let pt = PredicateTransition(target, arg1, arg2, arg3 != 0)
return pt
case Transition.PRECEDENCE:
return PrecedencePredicateTransition(target, arg1)
@ -1133,17 +1118,14 @@ public class ATNDeserializer {
return AtomTransition(target, arg1)
}
case Transition.ACTION:
let a: ActionTransition = ActionTransition(target, arg1, arg2, arg3 != 0)
return a
return ActionTransition(target, arg1, arg2, arg3 != 0)
case Transition.SET: return SetTransition(target, sets[arg1])
case Transition.NOT_SET: return NotSetTransition(target, sets[arg1])
case Transition.WILDCARD: return WildcardTransition(target)
default:
throw ANTLRError.illegalState(msg: "The specified transition type is not valid.")
}
}
internal func stateFactory(_ type: Int, _ ruleIndex: Int) throws -> ATNState? {
@ -1197,12 +1179,6 @@ public class ATNDeserializer {
case .type:
return LexerTypeAction(data1)
//default:
}
// let message : String = "The specified lexer action type \(type) is not valid."
// RuntimeException(message)
}
}
}

View File

@ -8,23 +8,6 @@
import Foundation
open class ATNSimulator {
///
/// - Use _org.antlr.v4.runtime.atn.ATNDeserializer#SERIALIZED_VERSION_ instead.
///
public static let SERIALIZED_VERSION: Int = {
return ATNDeserializer.SERIALIZED_VERSION
}()
///
/// This is the current serialized UUID.
/// - Use _org.antlr.v4.runtime.atn.ATNDeserializer#checkCondition(boolean)_ instead.
///
public static let SERIALIZED_UUID: UUID = {
return (ATNDeserializer.SERIALIZED_UUID as UUID)
}()
///
/// Must distinguish between missing edge and edge we know leads nowhere
///
@ -34,7 +17,7 @@ open class ATNSimulator {
return error
}()
public var atn: ATN
public let atn: ATN
///
/// The context cache maps all PredictionContext objects that are equals()
@ -67,7 +50,7 @@ open class ATNSimulator {
}
open func reset() {
RuntimeException(" must overriden ")
fatalError(#function + " must be overridden")
}
///
@ -96,55 +79,11 @@ open class ATNSimulator {
//TODO: synced (sharedContextCache!)
//synced (sharedContextCache!) {
let visited: HashMap<PredictionContext, PredictionContext> =
HashMap<PredictionContext, PredictionContext>()
let visited = HashMap<PredictionContext, PredictionContext>()
return PredictionContext.getCachedContext(context,
sharedContextCache!,
visited)
//}
}
///
/// - note: Use _org.antlr.v4.runtime.atn.ATNDeserializer#deserialize_ instead.
///
public static func deserialize(_ data: [Character]) throws -> ATN {
return try ATNDeserializer().deserialize(data)
}
///
/// - note: Use _org.antlr.v4.runtime.atn.ATNDeserializer#checkCondition(boolean)_ instead.
///
public static func checkCondition(_ condition: Bool) throws {
try ATNDeserializer().checkCondition(condition)
}
///
/// - note: Use _org.antlr.v4.runtime.atn.ATNDeserializer#checkCondition(boolean, String)_ instead.
///
public static func checkCondition(_ condition: Bool, _ message: String) throws {
try ATNDeserializer().checkCondition(condition, message)
}
///
/// - note: Use _org.antlr.v4.runtime.atn.ATNDeserializer#toInt_ instead.
///
public func toInt(_ c: Character) -> Int {
return toInt(c)
}
///
/// - note: Use _org.antlr.v4.runtime.atn.ATNDeserializer#toInt32_ instead.
///
public func toInt32(_ data: [Character], _ offset: Int) -> Int {
return toInt32(data, offset)
}
///
/// - note: Use _org.antlr.v4.runtime.atn.ATNDeserializer#toLong_ instead.
///
public func toLong(_ data: [Character], _ offset: Int) -> Int64 {
return toLong(data, offset)
}
public static func edgeFactory(_ atn: ATN,
@ -153,12 +92,4 @@ open class ATNSimulator {
_ sets: Array<IntervalSet>) throws -> Transition {
return try ATNDeserializer().edgeFactory(atn, type, src, trg, arg1, arg2, arg3, sets)
}
///
/// - note: Use _org.antlr.v4.runtime.atn.ATNDeserializer#stateFactory_ instead.
///
public static func stateFactory(_ type: Int, _ ruleIndex: Int) throws -> ATNState {
return try ATNDeserializer().stateFactory(type, ruleIndex)!
}
}

View File

@ -183,8 +183,7 @@ public class ATNState: Hashable, CustomStringConvertible {
}
public func getStateType() -> Int {
RuntimeException(#function + " must be overridden")
return 0
fatalError(#function + " must be overridden")
}
public final func onlyHasEpsilonTransitions() -> Bool {

View File

@ -27,8 +27,8 @@ public final class AtomTransition: Transition, CustomStringConvertible {
}
override
public func labelIntervalSet() throws -> IntervalSet? {
return try IntervalSet.of(label)
public func labelIntervalSet() -> IntervalSet? {
return IntervalSet(label)
}
override

View File

@ -28,18 +28,18 @@ public class LL1Analyzer {
/// - parameter s: the ATN state
/// - returns: the expected symbols for each outgoing transition of `s`.
///
public func getDecisionLookahead(_ s: ATNState?) throws -> [IntervalSet?]? {
public func getDecisionLookahead(_ s: ATNState?) -> [IntervalSet?]? {
guard let s = s else {
return nil
}
let length = s.getNumberOfTransitions()
var look: [IntervalSet?] = [IntervalSet?](repeating: nil, count: length)
var look = [IntervalSet?](repeating: nil, count: length)
for alt in 0..<length {
look[alt] = try IntervalSet()
var lookBusy: Set<ATNConfig> = Set<ATNConfig>()
let seeThruPreds: Bool = false // fail to get lookahead upon pred
try _LOOK(s.transition(alt).target, nil, PredictionContext.EMPTY,
look[alt] = IntervalSet()
var lookBusy = Set<ATNConfig>()
let seeThruPreds = false // fail to get lookahead upon pred
_LOOK(s.transition(alt).target, nil, PredictionContext.EMPTY,
look[alt]!, &lookBusy, BitSet(), seeThruPreds, false)
// Wipe out lookahead for this alternative if we found nothing
// or we had a predicate when we !seeThruPreds
@ -66,8 +66,8 @@ public class LL1Analyzer {
/// - returns: The set of tokens that can follow `s` in the ATN in the
/// specified `ctx`.
///
public func LOOK(_ s: ATNState, _ ctx: RuleContext?) throws -> IntervalSet {
return try LOOK(s, nil, ctx)
public func LOOK(_ s: ATNState, _ ctx: RuleContext?) -> IntervalSet {
return LOOK(s, nil, ctx)
}
///
@ -89,13 +89,12 @@ public class LL1Analyzer {
/// specified `ctx`.
///
public func LOOK(_ s: ATNState, _ stopState: ATNState?, _ ctx: RuleContext?) throws -> IntervalSet {
let r: IntervalSet = try IntervalSet()
let seeThruPreds: Bool = true // ignore preds; get all lookahead
let lookContext: PredictionContext? = ctx != nil ? PredictionContext.fromRuleContext(s.atn!, ctx) : nil
public func LOOK(_ s: ATNState, _ stopState: ATNState?, _ ctx: RuleContext?) -> IntervalSet {
let r = IntervalSet()
let seeThruPreds = true // ignore preds; get all lookahead
let lookContext = ctx != nil ? PredictionContext.fromRuleContext(s.atn!, ctx) : nil
var config = Set<ATNConfig>()
try _LOOK(s, stopState, lookContext,
r, &config, BitSet(), seeThruPreds, true)
_LOOK(s, stopState, lookContext, r, &config, BitSet(), seeThruPreds, true)
return r
}
@ -135,13 +134,10 @@ public class LL1Analyzer {
_ look: IntervalSet,
_ lookBusy: inout Set<ATNConfig>,
_ calledRuleStack: BitSet,
_ seeThruPreds: Bool, _ addEOF: Bool) throws {
_ seeThruPreds: Bool,
_ addEOF: Bool) {
// print ("_LOOK(\(s.stateNumber), ctx=\(ctx)");
//TODO var c : ATNConfig = ATNConfig(s, 0, ctx);
if s.description == "273" {
var s = 0
}
var c: ATNConfig = ATNConfig(s, 0, ctx)
let c = ATNConfig(s, 0, ctx)
if lookBusy.contains(c) {
return
} else {
@ -150,12 +146,12 @@ public class LL1Analyzer {
if s == stopState {
guard let ctx = ctx else {
try look.add(CommonToken.EPSILON)
try! look.add(CommonToken.EPSILON)
return
}
if ctx.isEmpty() && addEOF {
try look.add(CommonToken.EOF)
try! look.add(CommonToken.EOF)
return
}
@ -163,75 +159,64 @@ public class LL1Analyzer {
if s is RuleStopState {
guard let ctx = ctx else {
try look.add(CommonToken.EPSILON)
try! look.add(CommonToken.EPSILON)
return
}
if ctx.isEmpty() && addEOF {
try look.add(CommonToken.EOF)
try! look.add(CommonToken.EOF)
return
}
if ctx != PredictionContext.EMPTY {
// run thru all possible stack tops in ctx
let length = ctx.size()
for i in 0..<length {
var returnState: ATNState = atn.states[(ctx.getReturnState(i))]!
var removed: Bool = try calledRuleStack.get(returnState.ruleIndex!)
try calledRuleStack.clear(returnState.ruleIndex!)
try self._LOOK(returnState, stopState, ctx.getParent(i), look, &lookBusy, calledRuleStack, seeThruPreds, addEOF)
defer {
let returnState = atn.states[(ctx.getReturnState(i))]!
let removed = try! calledRuleStack.get(returnState.ruleIndex!)
try! calledRuleStack.clear(returnState.ruleIndex!)
_LOOK(returnState, stopState, ctx.getParent(i), look, &lookBusy, calledRuleStack, seeThruPreds, addEOF)
if removed {
try! calledRuleStack.set(returnState.ruleIndex!)
}
}
}
return
}
}
var n: Int = s.getNumberOfTransitions()
let n = s.getNumberOfTransitions()
for i in 0..<n {
var t: Transition = s.transition(i)
if type(of: t) == RuleTransition.self {
if try calledRuleStack.get((t as! RuleTransition).target.ruleIndex!) {
let t = s.transition(i)
if let rt = t as? RuleTransition {
if try! calledRuleStack.get(rt.target.ruleIndex!) {
continue
}
var newContext: PredictionContext =
SingletonPredictionContext.create(ctx, (t as! RuleTransition).followState.stateNumber)
try calledRuleStack.set((t as! RuleTransition).target.ruleIndex!)
try _LOOK(t.target, stopState, newContext, look, &lookBusy, calledRuleStack, seeThruPreds, addEOF)
defer {
try! calledRuleStack.clear((t as! RuleTransition).target.ruleIndex!)
let newContext = SingletonPredictionContext.create(ctx, rt.followState.stateNumber)
try! calledRuleStack.set(rt.target.ruleIndex!)
_LOOK(t.target, stopState, newContext, look, &lookBusy, calledRuleStack, seeThruPreds, addEOF)
try! calledRuleStack.clear(rt.target.ruleIndex!)
}
} else {
if t is AbstractPredicateTransition {
else if t is AbstractPredicateTransition {
if seeThruPreds {
try _LOOK(t.target, stopState, ctx, look, &lookBusy, calledRuleStack, seeThruPreds, addEOF)
_LOOK(t.target, stopState, ctx, look, &lookBusy, calledRuleStack, seeThruPreds, addEOF)
} else {
try look.add(HIT_PRED)
try! look.add(HIT_PRED)
}
} else {
if t.isEpsilon() {
try _LOOK(t.target, stopState, ctx, look, &lookBusy, calledRuleStack, seeThruPreds, addEOF)
} else {
if type(of: t) == WildcardTransition.self {
try look.addAll(IntervalSet.of(CommonToken.MIN_USER_TOKEN_TYPE, atn.maxTokenType))
} else {
var set: IntervalSet? = try t.labelIntervalSet()
}
else if t.isEpsilon() {
_LOOK(t.target, stopState, ctx, look, &lookBusy, calledRuleStack, seeThruPreds, addEOF)
}
else if t is WildcardTransition {
try! look.addAll(IntervalSet.of(CommonToken.MIN_USER_TOKEN_TYPE, atn.maxTokenType))
}
else {
var set = t.labelIntervalSet()
if set != nil {
if t is NotSetTransition {
set = try set!.complement(IntervalSet.of(CommonToken.MIN_USER_TOKEN_TYPE, atn.maxTokenType)) as? IntervalSet
}
try look.addAll(set)
}
}
set = set!.complement(IntervalSet.of(CommonToken.MIN_USER_TOKEN_TYPE, atn.maxTokenType)) as? IntervalSet
}
try! look.addAll(set)
}
}
}

View File

@ -77,15 +77,14 @@ public class LexerATNConfig: ATNConfig {
}*/
public var hashValue: Int {
var hashCode: Int = MurmurHash.initialize(7)
var hashCode = MurmurHash.initialize(7)
hashCode = MurmurHash.update(hashCode, state.stateNumber)
hashCode = MurmurHash.update(hashCode, alt)
hashCode = MurmurHash.update(hashCode, context)
hashCode = MurmurHash.update(hashCode, semanticContext)
hashCode = MurmurHash.update(hashCode, passedThroughNonGreedyDecision ? 1 : 0)
hashCode = MurmurHash.update(hashCode, lexerActionExecutor)
hashCode = MurmurHash.finish(hashCode, 6)
return hashCode
return MurmurHash.finish(hashCode, 6)
}

View File

@ -11,11 +11,11 @@
///
open class LexerATNSimulator: ATNSimulator {
public static let debug: Bool = false
public let dfa_debug: Bool = false
public static let debug = false
public let dfa_debug = false
public static let MIN_DFA_EDGE: Int = 0
public static let MAX_DFA_EDGE: Int = 127
public static let MIN_DFA_EDGE = 0
public static let MAX_DFA_EDGE = 127
// forces unicode to stay in ATN
///
@ -50,7 +50,7 @@ open class LexerATNSimulator: ATNSimulator {
}
internal let recog: Lexer?
internal weak var recog: Lexer?
///
/// The current token's starting index into the character stream.
@ -58,21 +58,21 @@ open class LexerATNSimulator: ATNSimulator {
/// DFA did not have a previous accept state. In this case, we use the
/// ATN-generated exception object.
///
internal var startIndex: Int = -1
internal var startIndex = -1
///
/// line number 1..n within the input
///
public var line: Int = 1
public var line = 1
///
/// The index of the character relative to the beginning of the line 0..n-1
///
public var charPositionInLine: Int = 0
public var charPositionInLine = 0
public final var decisionToDFA: [DFA]
internal var mode: Int = Lexer.DEFAULT_MODE
internal var mode = Lexer.DEFAULT_MODE
///
/// mutex for DFAState change
@ -88,9 +88,9 @@ open class LexerATNSimulator: ATNSimulator {
/// Used during DFA/ATN exec to record the most recent accept configuration info
///
internal final var prevAccept: SimState = SimState()
internal final var prevAccept = SimState()
public static var match_calls: Int = 0
public static var match_calls = 0
public convenience init(_ atn: ATN, _ decisionToDFA: [DFA],
_ sharedContextCache: PredictionContextCache) {
@ -116,11 +116,11 @@ open class LexerATNSimulator: ATNSimulator {
open func match(_ input: CharStream, _ mode: Int) throws -> Int {
LexerATNSimulator.match_calls += 1
self.mode = mode
var mark: Int = input.mark()
var mark = input.mark()
do {
self.startIndex = input.index()
self.prevAccept.reset()
var dfa: DFA = decisionToDFA[mode]
var dfa = decisionToDFA[mode]
defer {
try! input.release(mark)
}
@ -146,31 +146,30 @@ open class LexerATNSimulator: ATNSimulator {
override
open func clearDFA() {
for d in 0..<decisionToDFA.count {
decisionToDFA[d] = DFA(atn.getDecisionState(d)!, d)
}
}
internal func matchATN(_ input: CharStream) throws -> Int {
let startState: ATNState = atn.modeToStartState[mode]
let startState = atn.modeToStartState[mode]
if LexerATNSimulator.debug {
print("matchATN mode \(mode) start: \(startState)\n")
}
let old_mode: Int = mode
let old_mode = mode
let s0_closure: ATNConfigSet = try computeStartState(input, startState)
let suppressEdge: Bool = s0_closure.hasSemanticContext
let s0_closure = try computeStartState(input, startState)
let suppressEdge = s0_closure.hasSemanticContext
s0_closure.hasSemanticContext = false
let next: DFAState = addDFAState(s0_closure)
let next = addDFAState(s0_closure)
if !suppressEdge {
decisionToDFA[mode].s0 = next
}
let predict: Int = try execATN(input, next)
let predict = try execATN(input, next)
if LexerATNSimulator.debug {
print("DFA after matchATN: \(decisionToDFA[old_mode].toLexerString())")
@ -190,14 +189,13 @@ open class LexerATNSimulator: ATNSimulator {
captureSimState(prevAccept, input, ds0)
}
var t: Int = try input.LA(1)
var t = try input.LA(1)
var s: DFAState = ds0 // s is current/from DFA state
var s = ds0 // s is current/from DFA state
while true {
// while more work
if LexerATNSimulator.debug {
print("execATN loop starting closure: \(s.configs)\n")
}
@ -268,7 +266,7 @@ open class LexerATNSimulator: ATNSimulator {
return nil
}
let target: DFAState? = s.edges[t - LexerATNSimulator.MIN_DFA_EDGE]
let target = s.edges[t - LexerATNSimulator.MIN_DFA_EDGE]
if LexerATNSimulator.debug && target != nil {
print("reuse state \(s.stateNumber) edge to \(target!.stateNumber)")
}
@ -290,7 +288,7 @@ open class LexerATNSimulator: ATNSimulator {
///
internal func computeTargetState(_ input: CharStream, _ s: DFAState, _ t: Int) throws -> DFAState {
let reach: ATNConfigSet = OrderedATNConfigSet()
let reach = OrderedATNConfigSet()
// if we don't find an existing DFA state
// Fill reach starting from closure, following t transitions
@ -316,7 +314,7 @@ open class LexerATNSimulator: ATNSimulator {
internal func failOrAccept(_ prevAccept: SimState, _ input: CharStream,
_ reach: ATNConfigSet, _ t: Int) throws -> Int {
if let dfaState = prevAccept.dfaState {
let lexerActionExecutor: LexerActionExecutor? = dfaState.lexerActionExecutor
let lexerActionExecutor = dfaState.lexerActionExecutor
try accept(input, lexerActionExecutor, startIndex,
prevAccept.index, prevAccept.line, prevAccept.charPos)
return dfaState.prediction
@ -326,7 +324,6 @@ open class LexerATNSimulator: ATNSimulator {
return CommonToken.EOF
}
throw ANTLRException.recognition(e: LexerNoViableAltException(recog, input, startIndex, reach))
}
}
@ -338,12 +335,12 @@ open class LexerATNSimulator: ATNSimulator {
internal func getReachableConfigSet(_ input: CharStream, _ closureConfig: ATNConfigSet, _ reach: ATNConfigSet, _ t: Int) throws {
// this is used to skip processing for configs which have a lower priority
// than a config that already reached an accept state for the same rule
var skipAlt: Int = ATN.INVALID_ALT_NUMBER
for c: ATNConfig in closureConfig.configs {
var skipAlt = ATN.INVALID_ALT_NUMBER
for c in closureConfig.configs {
guard let c = c as? LexerATNConfig else {
continue
}
let currentAltReachedAcceptState: Bool = c.alt == skipAlt
let currentAltReachedAcceptState = (c.alt == skipAlt)
if currentAltReachedAcceptState && c.hasPassedThroughNonGreedyDecision() {
continue
}
@ -353,17 +350,17 @@ open class LexerATNSimulator: ATNSimulator {
}
let n: Int = c.state.getNumberOfTransitions()
let n = c.state.getNumberOfTransitions()
for ti in 0..<n {
// for each transition
let trans: Transition = c.state.transition(ti)
let trans = c.state.transition(ti)
if let target = getReachableTarget(trans, t) {
var lexerActionExecutor: LexerActionExecutor? = c.getLexerActionExecutor()
var lexerActionExecutor = c.getLexerActionExecutor()
if lexerActionExecutor != nil {
lexerActionExecutor = lexerActionExecutor!.fixOffsetBeforeMatch(input.index() - startIndex)
}
let treatEofAsEpsilon: Bool = t == BufferedTokenStream.EOF
let treatEofAsEpsilon = (t == BufferedTokenStream.EOF)
if try closure(input,
LexerATNConfig(c, target, lexerActionExecutor),
reach,
@ -384,7 +381,6 @@ open class LexerATNSimulator: ATNSimulator {
_ startIndex: Int, _ index: Int, _ line: Int, _ charPos: Int) throws {
if LexerATNSimulator.debug {
print("ACTION \(String(describing: lexerActionExecutor))\n")
}
// seek to after last char in token
@ -409,12 +405,12 @@ open class LexerATNSimulator: ATNSimulator {
final func computeStartState(_ input: CharStream,
_ p: ATNState) throws -> ATNConfigSet {
let initialContext: PredictionContext = PredictionContext.EMPTY
let configs: ATNConfigSet = OrderedATNConfigSet()
let initialContext = PredictionContext.EMPTY
let configs = OrderedATNConfigSet()
let length = p.getNumberOfTransitions()
for i in 0..<length {
let target: ATNState = p.transition(i).target
let c: LexerATNConfig = LexerATNConfig(target, i + 1, initialContext)
let target = p.transition(i).target
let c = LexerATNConfig(target, i + 1, initialContext)
try closure(input, c, configs, false, false, false)
}
return configs
@ -441,10 +437,8 @@ open class LexerATNSimulator: ATNSimulator {
if LexerATNSimulator.debug {
if recog != nil {
print("closure at \(recog!.getRuleNames()[config.state.ruleIndex!]) rule stop \(config)\n")
} else {
print("closure at rule stop \(config)\n")
}
}
@ -462,9 +456,9 @@ open class LexerATNSimulator: ATNSimulator {
let length = configContext.size()
for i in 0..<length {
if configContext.getReturnState(i) != PredictionContext.EMPTY_RETURN_STATE {
let newContext: PredictionContext = configContext.getParent(i)! // "pop" return state
let returnState: ATNState? = atn.states[configContext.getReturnState(i)]
let c: LexerATNConfig = LexerATNConfig(config, returnState!, newContext)
let newContext = configContext.getParent(i)! // "pop" return state
let returnState = atn.states[configContext.getReturnState(i)]
let c = LexerATNConfig(config, returnState!, newContext)
currentAltReachedAcceptState = try closure(input, c, configs, currentAltReachedAcceptState, speculative, treatEofAsEpsilon)
}
}
@ -480,13 +474,12 @@ open class LexerATNSimulator: ATNSimulator {
}
}
let p: ATNState = config.state
let p = config.state
let length = p.getNumberOfTransitions()
for i in 0..<length {
let t: Transition = p.transition(i)
let c: LexerATNConfig? = try getEpsilonTarget(input, config, t, configs, speculative, treatEofAsEpsilon)
if c != nil {
currentAltReachedAcceptState = try closure(input, c!, configs, currentAltReachedAcceptState, speculative, treatEofAsEpsilon)
let t = p.transition(i)
if let c = try getEpsilonTarget(input, config, t, configs, speculative, treatEofAsEpsilon) {
currentAltReachedAcceptState = try closure(input, c, configs, currentAltReachedAcceptState, speculative, treatEofAsEpsilon)
}
}
@ -504,9 +497,8 @@ open class LexerATNSimulator: ATNSimulator {
var c: LexerATNConfig? = nil
switch t.getSerializationType() {
case Transition.RULE:
let ruleTransition: RuleTransition = t as! RuleTransition
let newContext: PredictionContext =
SingletonPredictionContext.create(config.context, ruleTransition.followState.stateNumber)
let ruleTransition = t as! RuleTransition
let newContext = SingletonPredictionContext.create(config.context, ruleTransition.followState.stateNumber)
c = LexerATNConfig(config, t.target, newContext)
break
@ -534,7 +526,7 @@ open class LexerATNSimulator: ATNSimulator {
/// states reached by traversing predicates. Since this is when we
/// test them, we cannot cash the DFA state target of ID.
///
let pt: PredicateTransition = t as! PredicateTransition
let pt = t as! PredicateTransition
if LexerATNSimulator.debug {
print("EVAL rule \(pt.ruleIndex):\(pt.predIndex)")
}
@ -558,7 +550,7 @@ open class LexerATNSimulator: ATNSimulator {
// getEpsilonTarget to return two configurations, so
// additional modifications are needed before we can support
// the split operation.
let lexerActionExecutor: LexerActionExecutor = LexerActionExecutor.append(config.getLexerActionExecutor(), atn.lexerActions[(t as! ActionTransition).actionIndex])
let lexerActionExecutor = LexerActionExecutor.append(config.getLexerActionExecutor(), atn.lexerActions[(t as! ActionTransition).actionIndex])
c = LexerATNConfig(config, t.target, lexerActionExecutor)
break
} else {
@ -619,10 +611,10 @@ open class LexerATNSimulator: ATNSimulator {
return try recog.sempred(nil, ruleIndex, predIndex)
}
var savedCharPositionInLine: Int = charPositionInLine
var savedLine: Int = line
var index: Int = input.index()
var marker: Int = input.mark()
var savedCharPositionInLine = charPositionInLine
var savedLine = line
var index = input.index()
var marker = input.mark()
do {
try consume(input)
defer
@ -663,11 +655,9 @@ open class LexerATNSimulator: ATNSimulator {
/// If that gets us to a previously created (but dangling) DFA
/// state, we can continue in pure DFA mode from there.
///
let suppressEdge: Bool = q.hasSemanticContext
let suppressEdge = q.hasSemanticContext
q.hasSemanticContext = false
let to: DFAState = addDFAState(q)
let to = addDFAState(q)
if suppressEdge {
return to
@ -690,8 +680,7 @@ open class LexerATNSimulator: ATNSimulator {
dfaStateMutex.synchronized {
if p.edges == nil {
// make room for tokens 1..n and -1 masquerading as index 0
//TODO ARRAY COUNT
p.edges = [DFAState?](repeating: nil, count: LexerATNSimulator.MAX_DFA_EDGE - LexerATNSimulator.MIN_DFA_EDGE + 1) //new DFAState[MAX_DFA_EDGE-MIN_DFA_EDGE+1];
p.edges = [DFAState?](repeating: nil, count: LexerATNSimulator.MAX_DFA_EDGE - LexerATNSimulator.MIN_DFA_EDGE + 1)
}
p.edges[t - LexerATNSimulator.MIN_DFA_EDGE] = q // connect
}
@ -711,8 +700,8 @@ open class LexerATNSimulator: ATNSimulator {
///
assert(!configs.hasSemanticContext, "Expected: !configs.hasSemanticContext")
let proposed: DFAState = DFAState(configs)
let firstConfigWithRuleStopState: ATNConfig? = configs.firstConfigWithRuleStopState
let proposed = DFAState(configs)
let firstConfigWithRuleStopState = configs.firstConfigWithRuleStopState
if firstConfigWithRuleStopState != nil {
proposed.isAcceptState = true
@ -720,14 +709,14 @@ open class LexerATNSimulator: ATNSimulator {
proposed.prediction = atn.ruleToTokenType[firstConfigWithRuleStopState!.state.ruleIndex!]
}
let dfa: DFA = decisionToDFA[mode]
let dfa = decisionToDFA[mode]
return dfaStatesMutex.synchronized {
if let existing = dfa.states[proposed] {
return existing!
}
let newState: DFAState = proposed
let newState = proposed
newState.stateNumber = dfa.states.count
configs.setReadonly(true)
newState.configs = configs
@ -747,7 +736,7 @@ open class LexerATNSimulator: ATNSimulator {
public func getText(_ input: CharStream) -> String {
// index is first lookahead char, don't include.
return input.getText(Interval.of(startIndex, input.index() - 1))
return try! input.getText(Interval.of(startIndex, input.index() - 1))
}
public func getLine() -> Int {
@ -767,7 +756,7 @@ open class LexerATNSimulator: ATNSimulator {
}
public func consume(_ input: CharStream) throws {
let curChar: Int = try input.LA(1)
let curChar = try input.LA(1)
if String(Character(integerLiteral: curChar)) == "\n" {
line += 1
charPositionInLine = 0

View File

@ -22,8 +22,7 @@ public class LexerAction: Hashable {
/// - returns: The serialization type of the lexer action.
///
public func getActionType() -> LexerActionType {
RuntimeException(" must overriden ")
fatalError()
fatalError(#function + " must be overridden")
}
@ -42,8 +41,7 @@ public class LexerAction: Hashable {
/// otherwise, `false`.
///
public func isPositionDependent() -> Bool {
RuntimeException(" must overriden ")
fatalError()
fatalError(#function + " must be overridden")
}
///
@ -55,12 +53,11 @@ public class LexerAction: Hashable {
/// - parameter lexer: The lexer instance.
///
public func execute(_ lexer: Lexer) throws {
RuntimeException(" must overriden ")
fatalError(#function + " must be overridden")
}
public var hashValue: Int {
RuntimeException(" must overriden ")
fatalError()
fatalError(#function + " must be overridden")
}
}

View File

@ -36,7 +36,7 @@ public class LexerActionExecutor: Hashable {
public init(_ lexerActions: [LexerAction]) {
self.lexerActions = lexerActions
var hash: Int = MurmurHash.initialize()
var hash = MurmurHash.initialize()
for lexerAction: LexerAction in lexerActions {
hash = MurmurHash.update(hash, lexerAction)
}

View File

@ -65,7 +65,7 @@ public final class LexerChannelAction: LexerAction, CustomStringConvertible {
override
public var hashValue: Int {
var hash: Int = MurmurHash.initialize()
var hash = MurmurHash.initialize()
hash = MurmurHash.update(hash, getActionType().rawValue)
hash = MurmurHash.update(hash, channel)
return MurmurHash.finish(hash, 2)

View File

@ -94,7 +94,7 @@ public final class LexerCustomAction: LexerAction {
override
public var hashValue: Int {
var hash: Int = MurmurHash.initialize()
var hash = MurmurHash.initialize()
hash = MurmurHash.update(hash, getActionType().rawValue)
hash = MurmurHash.update(hash, ruleIndex)
hash = MurmurHash.update(hash, actionIndex)

View File

@ -97,7 +97,7 @@ public final class LexerIndexedCustomAction: LexerAction {
public override var hashValue: Int {
var hash: Int = MurmurHash.initialize()
var hash = MurmurHash.initialize()
hash = MurmurHash.update(hash, offset)
hash = MurmurHash.update(hash, action)
return MurmurHash.finish(hash, 2)

View File

@ -64,7 +64,7 @@ public final class LexerModeAction: LexerAction, CustomStringConvertible {
}
override
public var hashValue: Int {
var hash: Int = MurmurHash.initialize()
var hash = MurmurHash.initialize()
hash = MurmurHash.update(hash, getActionType().rawValue)
hash = MurmurHash.update(hash, mode)
return MurmurHash.finish(hash, 2)

View File

@ -58,7 +58,7 @@ public final class LexerMoreAction: LexerAction, CustomStringConvertible {
override
public var hashValue: Int {
var hash: Int = MurmurHash.initialize()
var hash = MurmurHash.initialize()
hash = MurmurHash.update(hash, getActionType().rawValue)
return MurmurHash.finish(hash, 1)

View File

@ -59,7 +59,7 @@ public final class LexerPopModeAction: LexerAction, CustomStringConvertible {
override
public var hashValue: Int {
var hash: Int = MurmurHash.initialize()
var hash = MurmurHash.initialize()
hash = MurmurHash.update(hash, getActionType().rawValue)
return MurmurHash.finish(hash, 1)

View File

@ -66,7 +66,7 @@ public final class LexerPushModeAction: LexerAction, CustomStringConvertible {
override
public var hashValue: Int {
var hash: Int = MurmurHash.initialize()
var hash = MurmurHash.initialize()
hash = MurmurHash.update(hash, getActionType().rawValue)
hash = MurmurHash.update(hash, mode)
return MurmurHash.finish(hash, 2)

View File

@ -58,7 +58,7 @@ public final class LexerSkipAction: LexerAction, CustomStringConvertible {
override
public var hashValue: Int {
var hash: Int = MurmurHash.initialize()
var hash = MurmurHash.initialize()
hash = MurmurHash.update(hash, getActionType().rawValue)
return MurmurHash.finish(hash, 1)
}

View File

@ -64,7 +64,7 @@ public class LexerTypeAction: LexerAction, CustomStringConvertible {
override
public var hashValue: Int {
var hash: Int = MurmurHash.initialize()
var hash = MurmurHash.initialize()
hash = MurmurHash.update(hash, getActionType().rawValue)
hash = MurmurHash.update(hash, type)
return MurmurHash.finish(hash, 2)

View File

@ -236,10 +236,11 @@
import Foundation
open class ParserATNSimulator: ATNSimulator {
public let debug: Bool = false
public let debug_list_atn_decisions: Bool = false
public let dfa_debug: Bool = false
public let retry_debug: Bool = false
public let debug = false
public let debug_list_atn_decisions = false
public let dfa_debug = false
public let retry_debug = false
///
/// Just in case this optimization is bad, add an ENV variable to turn it off
///
@ -249,7 +250,8 @@ open class ParserATNSimulator: ATNSimulator {
}
return false
}()
internal final var parser: Parser
internal final unowned let parser: Parser
public final var decisionToDFA: [DFA]
@ -257,7 +259,7 @@ open class ParserATNSimulator: ATNSimulator {
/// SLL, LL, or LL + exact ambig detection?
///
private var mode: PredictionMode = PredictionMode.LL
private var mode = PredictionMode.LL
///
/// Each prediction operation uses a cache for merge of prediction contexts.
@ -272,7 +274,7 @@ open class ParserATNSimulator: ATNSimulator {
// LAME globals to avoid parameters!!!!! I need these down deep in predTransition
internal var _input: TokenStream!
internal var _startIndex: Int = 0
internal var _startIndex = 0
internal var _outerContext: ParserRuleContext!
internal var _dfa: DFA?
@ -310,7 +312,6 @@ open class ParserATNSimulator: ATNSimulator {
override
open func clearDFA() {
//for var d: Int = 0; d < decisionToDFA.count; d++ {
for d in 0..<decisionToDFA.count {
decisionToDFA[d] = DFA(atn.getDecisionState(d)!, d)
}
@ -331,11 +332,11 @@ open class ParserATNSimulator: ATNSimulator {
_input = input
_startIndex = input.index()
_outerContext = outerContext
var dfa: DFA = decisionToDFA[decision]
let dfa = decisionToDFA[decision]
_dfa = dfa
var m: Int = input.mark()
var index: Int = _startIndex
let m = input.mark()
let index = _startIndex
// Now we are certain to have a specific decision's DFA
// But, do we still need an initial state?
@ -363,10 +364,8 @@ open class ParserATNSimulator: ATNSimulator {
print(debugInfo)
}
var fullCtx: Bool = false
var s0_closure: ATNConfigSet = try computeStartState(dfa.atnStartState,
ParserRuleContext.EMPTY,
fullCtx)
let fullCtx = false
var s0_closure = try computeStartState(dfa.atnStartState, ParserRuleContext.EMPTY, fullCtx)
if dfa.isPrecedenceDfa() {
///
@ -379,24 +378,22 @@ open class ParserATNSimulator: ATNSimulator {
//added by janyou 20160224
// dfa.s0!.configs = s0_closure // not used for prediction but useful to know start configs anyway
s0_closure = try applyPrecedenceFilter(s0_closure)
s0 = try addDFAState(dfa, DFAState(s0_closure))
s0 = addDFAState(dfa, DFAState(s0_closure))
try dfa.setPrecedenceStartState(parser.getPrecedence(), s0!)
} else {
s0 = try addDFAState(dfa, DFAState(s0_closure))
s0 = addDFAState(dfa, DFAState(s0_closure))
dfa.s0 = s0
}
}
var alt: Int = try execATN(dfa, s0!, input, index, outerContext!)
let alt = try execATN(dfa, s0!, input, index, outerContext!)
if debug {
print("DFA after predictATN: \(dfa.toString(parser.getVocabulary()))")
}
defer {
mergeCache = nil // wack cache after each prediction
_dfa = nil
try! input.seek(index)
try! input.release(m)
}
return alt
}
@ -440,13 +437,13 @@ open class ParserATNSimulator: ATNSimulator {
try print("execATN decision \(dfa.decision) exec LA(1)==\(getLookaheadName(input)) line \(input.LT(1)!.getLine()):\(input.LT(1)!.getCharPositionInLine())")
}
var previousD: DFAState = s0
var previousD = s0
if debug {
print("s0 = \(s0)")
}
var t: Int = try input.LA(1)
var t = try input.LA(1)
while true {
// while more work
@ -467,9 +464,9 @@ open class ParserATNSimulator: ATNSimulator {
// ATN states in SLL implies LL will also get nowhere.
// If conflict in states that dip out, choose min since we
// will get error no matter what.
let e: NoViableAltException = try noViableAlt(input, outerContext, previousD.configs, startIndex)
let e = noViableAlt(input, outerContext, previousD.configs, startIndex)
try input.seek(startIndex)
let alt: Int = try getSynValidOrSemInvalidAltThatFinishedDecisionEntryRule(previousD.configs, outerContext)
let alt = try getSynValidOrSemInvalidAltThatFinishedDecisionEntryRule(previousD.configs, outerContext)
if alt != ATN.INVALID_ALT_NUMBER {
return alt
}
@ -480,12 +477,12 @@ open class ParserATNSimulator: ATNSimulator {
if D.requiresFullContext && (mode != PredictionMode.SLL) {
// IF PREDS, MIGHT RESOLVE TO SINGLE ALT => SLL (or syntax error)
var conflictingAlts: BitSet = D.configs.conflictingAlts!
var conflictingAlts = D.configs.conflictingAlts!
if D.predicates != nil {
if debug {
print("DFA state has preds in DFA sim LL failover")
}
let conflictIndex: Int = input.index()
let conflictIndex = input.index()
if conflictIndex != startIndex {
try input.seek(startIndex)
}
@ -495,7 +492,7 @@ open class ParserATNSimulator: ATNSimulator {
if debug {
print("Full LL avoided")
}
return try conflictingAlts.nextSetBit(0)
return conflictingAlts.firstSetBit()
}
if conflictIndex != startIndex {
@ -508,12 +505,10 @@ open class ParserATNSimulator: ATNSimulator {
if dfa_debug {
print("ctx sensitive state \(outerContext) in \(D)")
}
let fullCtx: Bool = true
let s0_closure: ATNConfigSet =
try computeStartState(dfa.atnStartState, outerContext,
fullCtx)
try reportAttemptingFullContext(dfa, conflictingAlts, D.configs, startIndex, input.index())
let alt: Int = try execATNWithFullContext(dfa, D, s0_closure,
let fullCtx = true
let s0_closure = try computeStartState(dfa.atnStartState, outerContext, fullCtx)
reportAttemptingFullContext(dfa, conflictingAlts, D.configs, startIndex, input.index())
let alt = try execATNWithFullContext(dfa, D, s0_closure,
input, startIndex,
outerContext)
return alt
@ -524,22 +519,22 @@ open class ParserATNSimulator: ATNSimulator {
return D.prediction
}
let stopIndex: Int = input.index()
let stopIndex = input.index()
try input.seek(startIndex)
let alts: BitSet = try evalSemanticContext(D.predicates!, outerContext, true)
let alts = try evalSemanticContext(D.predicates!, outerContext, true)
switch alts.cardinality() {
case 0:
throw try ANTLRException.recognition(e: noViableAlt(input, outerContext, D.configs, startIndex))
throw ANTLRException.recognition(e: noViableAlt(input, outerContext, D.configs, startIndex))
case 1:
return try alts.nextSetBit(0)
return alts.firstSetBit()
default:
// report ambiguity after predicate evaluation to make sure the correct
// set of ambig alts is reported.
try reportAmbiguity(dfa, D, startIndex, stopIndex, false, alts, D.configs)
return try alts.nextSetBit(0)
reportAmbiguity(dfa, D, startIndex, stopIndex, false, alts, D.configs)
return alts.firstSetBit()
}
}
@ -564,7 +559,7 @@ open class ParserATNSimulator: ATNSimulator {
/// already cached
///
func getExistingTargetState(_ previousD: DFAState, _ t: Int) -> DFAState? {
var edges: [DFAState?]? = previousD.edges
var edges = previousD.edges
if edges == nil || (t + 1) < 0 || (t + 1) >= (edges!.count) {
return nil
}
@ -586,9 +581,9 @@ open class ParserATNSimulator: ATNSimulator {
///
func computeTargetState(_ dfa: DFA, _ previousD: DFAState, _ t: Int) throws -> DFAState {
let reach: ATNConfigSet? = try computeReachSet(previousD.configs, t, false)
let reach = try computeReachSet(previousD.configs, t, false)
if reach == nil {
try addDFAEdge(dfa, previousD, t, ATNSimulator.ERROR)
addDFAEdge(dfa, previousD, t, ATNSimulator.ERROR)
return ATNSimulator.ERROR
}
@ -598,8 +593,8 @@ open class ParserATNSimulator: ATNSimulator {
let predictedAlt: Int = ParserATNSimulator.getUniqueAlt(reach!)
if debug {
let altSubSets: Array<BitSet> = try PredictionMode.getConflictingAltSubsets(reach!)
print("SLL altSubSets=\(altSubSets), configs=\(reach!), predict=\(predictedAlt), allSubsetsConflict=\(PredictionMode.allSubsetsConflict(altSubSets)), conflictingAlts=\(try! getConflictingAlts(reach!))")
let altSubSets = PredictionMode.getConflictingAltSubsets(reach!)
print("SLL altSubSets=\(altSubSets), configs=\(reach!), predict=\(predictedAlt), allSubsetsConflict=\(PredictionMode.allSubsetsConflict(altSubSets)), conflictingAlts=\(getConflictingAlts(reach!))")
}
if predictedAlt != ATN.INVALID_ALT_NUMBER {
@ -608,44 +603,43 @@ open class ParserATNSimulator: ATNSimulator {
D.configs.uniqueAlt = predictedAlt
D.prediction = predictedAlt
} else {
if try PredictionMode.hasSLLConflictTerminatingPrediction(mode, reach!) {
if PredictionMode.hasSLLConflictTerminatingPrediction(mode, reach!) {
// MORE THAN ONE VIABLE ALTERNATIVE
D.configs.conflictingAlts = try getConflictingAlts(reach!)
D.configs.conflictingAlts = getConflictingAlts(reach!)
D.requiresFullContext = true
// in SLL-only mode, we will stop at this state and return the minimum alt
D.isAcceptState = true
D.prediction = try D.configs.conflictingAlts!.nextSetBit(0)
D.prediction = D.configs.conflictingAlts!.firstSetBit()
}
}
if D.isAcceptState && D.configs.hasSemanticContext {
try predicateDFAState(D, atn.getDecisionState(dfa.decision)!)
predicateDFAState(D, atn.getDecisionState(dfa.decision)!)
if D.predicates != nil {
D.prediction = ATN.INVALID_ALT_NUMBER
}
}
// all adds to dfa are done after we've created full D state
D = try addDFAEdge(dfa, previousD, t, D)!
D = addDFAEdge(dfa, previousD, t, D)!
return D
}
final func predicateDFAState(_ dfaState: DFAState, _ decisionState: DecisionState) throws {
final func predicateDFAState(_ dfaState: DFAState, _ decisionState: DecisionState) {
// We need to test all predicates, even in DFA states that
// uniquely predict alternative.
let nalts: Int = decisionState.getNumberOfTransitions()
let nalts = decisionState.getNumberOfTransitions()
// Update DFA so reach becomes accept state with (predicate,alt)
// pairs if preds found for conflicting alts
let altsToCollectPredsFrom: BitSet = try getConflictingAltsOrUniqueAlt(dfaState.configs)
let altToPred: [SemanticContext?]? = try getPredsForAmbigAlts(altsToCollectPredsFrom, dfaState.configs, nalts)
if altToPred != nil {
dfaState.predicates = try getPredicatePredictions(altsToCollectPredsFrom, altToPred!)
let altsToCollectPredsFrom = getConflictingAltsOrUniqueAlt(dfaState.configs)
if let altToPred = getPredsForAmbigAlts(altsToCollectPredsFrom, dfaState.configs, nalts) {
dfaState.predicates = getPredicatePredictions(altsToCollectPredsFrom, altToPred)
dfaState.prediction = ATN.INVALID_ALT_NUMBER // make sure we use preds
} else {
// There are preds in configs but they might go away
// when OR'd together like {p}? || NONE == NONE. If neither
// alt has preds, resolve to min alt
dfaState.prediction = try altsToCollectPredsFrom.nextSetBit(0)
dfaState.prediction = altsToCollectPredsFrom.firstSetBit()
}
}
@ -658,13 +652,13 @@ open class ParserATNSimulator: ATNSimulator {
if debug || debug_list_atn_decisions {
print("execATNWithFullContext \(s0)")
}
let fullCtx: Bool = true
var foundExactAmbig: Bool = false
let fullCtx = true
var foundExactAmbig = false
var reach: ATNConfigSet? = nil
var previous: ATNConfigSet = s0
var previous = s0
try input.seek(startIndex)
var t: Int = try input.LA(1)
var predictedAlt: Int = 0
var t = try input.LA(1)
var predictedAlt = 0
while true {
// while more work
if let computeReach = try computeReachSet(previous, t, fullCtx) {
@ -679,9 +673,9 @@ open class ParserATNSimulator: ATNSimulator {
// ATN states in SLL implies LL will also get nowhere.
// If conflict in states that dip out, choose min since we
// will get error no matter what.
let e: NoViableAltException = try noViableAlt(input, outerContext, previous, startIndex)
let e = noViableAlt(input, outerContext, previous, startIndex)
try input.seek(startIndex)
let alt: Int = try getSynValidOrSemInvalidAltThatFinishedDecisionEntryRule(previous, outerContext)
let alt = try getSynValidOrSemInvalidAltThatFinishedDecisionEntryRule(previous, outerContext)
if alt != ATN.INVALID_ALT_NUMBER {
return alt
}
@ -689,9 +683,9 @@ open class ParserATNSimulator: ATNSimulator {
}
if let reach = reach {
let altSubSets: Array<BitSet> = try PredictionMode.getConflictingAltSubsets(reach)
let altSubSets = PredictionMode.getConflictingAltSubsets(reach)
if debug {
print("LL altSubSets=\(altSubSets), predict=\(try PredictionMode.getUniqueAlt(altSubSets)), resolvesToJustOneViableAlt=\(try PredictionMode.resolvesToJustOneViableAlt(altSubSets))")
print("LL altSubSets=\(altSubSets), predict=\(PredictionMode.getUniqueAlt(altSubSets)), resolvesToJustOneViableAlt=\(PredictionMode.resolvesToJustOneViableAlt(altSubSets))")
}
@ -702,7 +696,7 @@ open class ParserATNSimulator: ATNSimulator {
break
}
if mode != PredictionMode.LL_EXACT_AMBIG_DETECTION {
predictedAlt = try PredictionMode.resolvesToJustOneViableAlt(altSubSets)
predictedAlt = PredictionMode.resolvesToJustOneViableAlt(altSubSets)
if predictedAlt != ATN.INVALID_ALT_NUMBER {
break
}
@ -712,7 +706,7 @@ open class ParserATNSimulator: ATNSimulator {
if PredictionMode.allSubsetsConflict(altSubSets) &&
PredictionMode.allSubsetsEqual(altSubSets) {
foundExactAmbig = true
predictedAlt = try PredictionMode.getSingleViableAlt(altSubSets)
predictedAlt = PredictionMode.getSingleViableAlt(altSubSets)
break
}
// else there are multiple non-conflicting subsets or
@ -732,7 +726,7 @@ open class ParserATNSimulator: ATNSimulator {
// without conflict, then we know that it's a full LL decision
// not SLL.
if reach.uniqueAlt != ATN.INVALID_ALT_NUMBER {
try reportContextSensitivity(dfa, predictedAlt, reach, startIndex, input.index())
reportContextSensitivity(dfa, predictedAlt, reach, startIndex, input.index())
return predictedAlt
}
@ -763,7 +757,7 @@ open class ParserATNSimulator: ATNSimulator {
/// the fact that we should predict alternative 1. We just can't say for
/// sure that there is an ambiguity without looking further.
///
try reportAmbiguity(dfa, D, startIndex, input.index(), foundExactAmbig,
reportAmbiguity(dfa, D, startIndex, input.index(), foundExactAmbig,
reach.getAlts(), reach)
}
return predictedAlt
@ -780,7 +774,7 @@ open class ParserATNSimulator: ATNSimulator {
mergeCache = DoubleKeyMap<PredictionContext, PredictionContext, PredictionContext>()
}
let intermediate: ATNConfigSet = ATNConfigSet(fullCtx)
let intermediate = ATNConfigSet(fullCtx)
///
/// Configurations already in a rule stop state indicate reaching the end
@ -793,39 +787,33 @@ open class ParserATNSimulator: ATNSimulator {
/// ensure that the alternative matching the longest overall sequence is
/// chosen when multiple such configurations can match the input.
///
var skippedStopStates: Array<ATNConfig>? = nil
var skippedStopStates: [ATNConfig]? = nil
// First figure out where we can reach on input t
let length = closureConfigSet.configs.count
let configs = closureConfigSet.configs
for i in 0..<length {
//for c: ATNConfig in closureConfigSet.configs {
for config in configs {
if debug {
print("testing \(getTokenName(t)) at \(configs[i].description)")
print("testing \(getTokenName(t)) at \(config.description)")
}
if configs[i].state is RuleStopState {
assert(configs[i].context!.isEmpty(), "Expected: c.context.isEmpty()")
if config.state is RuleStopState {
assert(config.context!.isEmpty(), "Expected: c.context.isEmpty()")
if fullCtx || t == BufferedTokenStream.EOF {
if skippedStopStates == nil {
skippedStopStates = Array<ATNConfig>()
skippedStopStates = [ATNConfig]()
}
skippedStopStates?.append(configs[i])
skippedStopStates!.append(config)
}
continue
}
let n: Int = configs[i].state.getNumberOfTransitions()
let n = config.state.getNumberOfTransitions()
for ti in 0..<n {
// for each transition
let trans: Transition = configs[i].state.transition(ti)
let target: ATNState? = getReachableTarget(trans, t)
if target != nil {
try intermediate.add(ATNConfig(configs[i], target!), &mergeCache)
let trans = config.state.transition(ti)
if let target = getReachableTarget(trans, t) {
try! intermediate.add(ATNConfig(config, target), &mergeCache)
}
}
}
@ -866,14 +854,10 @@ open class ParserATNSimulator: ATNSimulator {
///
if reach == nil {
reach = ATNConfigSet(fullCtx)
var closureBusy: Set<ATNConfig> = Set<ATNConfig>()
let treatEofAsEpsilon: Bool = t == CommonToken.EOF
let configs = intermediate.configs
let length = configs.count
for i in 0..<length {
//for c: ATNConfig in intermediate.configs {
// print(__FUNCTION__)
try closure(configs[i], reach!, &closureBusy, false, fullCtx, treatEofAsEpsilon)
var closureBusy = Set<ATNConfig>()
let treatEofAsEpsilon = (t == CommonToken.EOF)
for config in intermediate.configs {
try closure(config, reach!, &closureBusy, false, fullCtx, treatEofAsEpsilon)
}
}
@ -896,7 +880,7 @@ open class ParserATNSimulator: ATNSimulator {
/// already guaranteed to meet this condition whether or not it's
/// required.
///
reach = try removeAllConfigsNotInRuleStopState(reach!, reach! === intermediate)
reach = removeAllConfigsNotInRuleStopState(reach!, reach! === intermediate)
}
///
@ -909,10 +893,10 @@ open class ParserATNSimulator: ATNSimulator {
/// multiple alternatives are viable.
///
if let reach = reach {
if skippedStopStates != nil && (!fullCtx || !PredictionMode.hasConfigInRuleStopState(reach)) {
assert(!skippedStopStates!.isEmpty, "Expected: !skippedStopStates.isEmpty()")
for c: ATNConfig in skippedStopStates! {
try reach.add(c, &mergeCache)
if let skippedStopStates = skippedStopStates, (!fullCtx || !PredictionMode.hasConfigInRuleStopState(reach)) {
assert(!skippedStopStates.isEmpty, "Expected: !skippedStopStates.isEmpty()")
for c in skippedStopStates {
try! reach.add(c, &mergeCache)
}
}
@ -943,29 +927,22 @@ open class ParserATNSimulator: ATNSimulator {
/// rule stop state, otherwise return a new configuration set containing only
/// the configurations from `configs` which are in a rule stop state
///
final func removeAllConfigsNotInRuleStopState(_ configs: ATNConfigSet, _ lookToEndOfRule: Bool) throws -> ATNConfigSet {
let result = try configs.removeAllConfigsNotInRuleStopState(&mergeCache,lookToEndOfRule,atn)
return result
final func removeAllConfigsNotInRuleStopState(_ configs: ATNConfigSet, _ lookToEndOfRule: Bool) -> ATNConfigSet {
return configs.removeAllConfigsNotInRuleStopState(&mergeCache,lookToEndOfRule,atn)
}
final func computeStartState(_ p: ATNState,
_ ctx: RuleContext,
_ fullCtx: Bool) throws -> ATNConfigSet {
let initialContext: PredictionContext = PredictionContext.fromRuleContext(atn, ctx)
let configs: ATNConfigSet = ATNConfigSet(fullCtx)
final func computeStartState(_ p: ATNState, _ ctx: RuleContext, _ fullCtx: Bool) throws -> ATNConfigSet {
let initialContext = PredictionContext.fromRuleContext(atn, ctx)
let configs = ATNConfigSet(fullCtx)
let length = p.getNumberOfTransitions()
for i in 0..<length {
let target: ATNState = p.transition(i).target
let c: ATNConfig = ATNConfig(target, i + 1, initialContext)
var closureBusy: Set<ATNConfig> = Set<ATNConfig>()
let target = p.transition(i).target
let c = ATNConfig(target, i + 1, initialContext)
var closureBusy = Set<ATNConfig>()
try closure(c, configs, &closureBusy, true, fullCtx, false)
}
return configs
}
@ -1128,9 +1105,7 @@ open class ParserATNSimulator: ATNSimulator {
/// calling _org.antlr.v4.runtime.Parser#getPrecedence_).
///
final internal func applyPrecedenceFilter(_ configs: ATNConfigSet) throws -> ATNConfigSet {
let configSet = try configs.applyPrecedenceFilter(&mergeCache,parser,_outerContext)
return configSet
return try configs.applyPrecedenceFilter(&mergeCache,parser,_outerContext)
}
final internal func getReachableTarget(_ trans: Transition, _ ttype: Int) -> ATNState? {
@ -1144,7 +1119,7 @@ open class ParserATNSimulator: ATNSimulator {
final internal func getPredsForAmbigAlts(_ ambigAlts: BitSet,
_ configs: ATNConfigSet,
_ nalts: Int) throws -> [SemanticContext?]? {
_ nalts: Int) -> [SemanticContext?]? {
// REACH=[1|1|[]|0:0, 1|2|[]|0:1]
///
/// altToPred starts as an array of all null contexts. The entry at index i
@ -1158,8 +1133,7 @@ open class ParserATNSimulator: ATNSimulator {
///
/// From this, it is clear that NONE||anything==NONE.
///
let altToPred: [SemanticContext?]? = try configs.getPredsForAmbigAlts(ambigAlts,nalts)
let altToPred = configs.getPredsForAmbigAlts(ambigAlts,nalts)
if debug {
print("getPredsForAmbigAlts result \(String(describing: altToPred))")
}
@ -1167,17 +1141,17 @@ open class ParserATNSimulator: ATNSimulator {
}
final internal func getPredicatePredictions(_ ambigAlts: BitSet?,
_ altToPred: [SemanticContext?]) throws -> [DFAState.PredPrediction]? {
var pairs: Array<DFAState.PredPrediction> = Array<DFAState.PredPrediction>()
var containsPredicate: Bool = false
_ altToPred: [SemanticContext?]) -> [DFAState.PredPrediction]? {
var pairs = [DFAState.PredPrediction]()
var containsPredicate = false
let length = altToPred.count
for i in 1..<length {
let pred: SemanticContext? = altToPred[i]
let pred = altToPred[i]
// unpredicated is indicated by SemanticContext.NONE
assert(pred != nil, "Expected: pred!=null")
if try ambigAlts != nil && ambigAlts!.get(i) {
if let ambigAlts = ambigAlts, try! ambigAlts.get(i) {
pairs.append(DFAState.PredPrediction(pred!, i))
}
if pred != SemanticContext.NONE {
@ -1234,18 +1208,15 @@ open class ParserATNSimulator: ATNSimulator {
///
final internal func getSynValidOrSemInvalidAltThatFinishedDecisionEntryRule(_ configs: ATNConfigSet,
_ outerContext: ParserRuleContext) throws -> Int {
let sets: (ATNConfigSet, ATNConfigSet) = try
splitAccordingToSemanticValidity(configs, outerContext)
let semValidConfigs: ATNConfigSet = sets.0
let semInvalidConfigs: ATNConfigSet = sets.1
var alt: Int = try getAltThatFinishedDecisionEntryRule(semValidConfigs)
let (semValidConfigs, semInvalidConfigs) = try splitAccordingToSemanticValidity(configs, outerContext)
var alt = getAltThatFinishedDecisionEntryRule(semValidConfigs)
if alt != ATN.INVALID_ALT_NUMBER {
// semantically/syntactically viable path exists
return alt
}
// Is there a syntactically valid path with a failed pred?
if semInvalidConfigs.size() > 0 {
alt = try getAltThatFinishedDecisionEntryRule(semInvalidConfigs)
alt = getAltThatFinishedDecisionEntryRule(semInvalidConfigs)
if alt != ATN.INVALID_ALT_NUMBER {
// syntactically viable path exists
return alt
@ -1254,9 +1225,9 @@ open class ParserATNSimulator: ATNSimulator {
return ATN.INVALID_ALT_NUMBER
}
final internal func getAltThatFinishedDecisionEntryRule(_ configs: ATNConfigSet) throws -> Int {
final internal func getAltThatFinishedDecisionEntryRule(_ configs: ATNConfigSet) -> Int {
return try configs.getAltThatFinishedDecisionEntryRule()
return configs.getAltThatFinishedDecisionEntryRule()
}
///
@ -1286,18 +1257,18 @@ open class ParserATNSimulator: ATNSimulator {
final internal func evalSemanticContext(_ predPredictions: [DFAState.PredPrediction],
_ outerContext: ParserRuleContext,
_ complete: Bool) throws -> BitSet {
let predictions: BitSet = BitSet()
for pair: DFAState.PredPrediction in predPredictions {
let predictions = BitSet()
for pair in predPredictions {
if pair.pred == SemanticContext.NONE {
try predictions.set(pair.alt)
try! predictions.set(pair.alt)
if !complete {
break
}
continue
}
let fullCtx: Bool = false // in dfa
let predicateEvaluationResult: Bool = try evalSemanticContext(pair.pred, outerContext, pair.alt, fullCtx)
let fullCtx = false // in dfa
let predicateEvaluationResult = try evalSemanticContext(pair.pred, outerContext, pair.alt, fullCtx)
if debug || dfa_debug {
print("eval pred \(pair)= \(predicateEvaluationResult)")
}
@ -1306,7 +1277,7 @@ open class ParserATNSimulator: ATNSimulator {
if debug || dfa_debug {
print("PREDICT \(pair.alt)")
}
try predictions.set(pair.alt)
try! predictions.set(pair.alt)
if !complete {
break
}
@ -1360,10 +1331,8 @@ open class ParserATNSimulator: ATNSimulator {
_ collectPredicates: Bool,
_ fullCtx: Bool,
_ treatEofAsEpsilon: Bool) throws {
let initialDepth: Int = 0
try closureCheckingStopState(config, configs, &closureBusy, collectPredicates,
fullCtx,
initialDepth, treatEofAsEpsilon)
let initialDepth = 0
try closureCheckingStopState(config, configs, &closureBusy, collectPredicates, fullCtx, initialDepth, treatEofAsEpsilon)
assert(!fullCtx || !configs.dipsIntoOuterContext, "Expected: !fullCtx||!configs.dipsIntoOuterContext")
}
@ -1389,7 +1358,7 @@ open class ParserATNSimulator: ATNSimulator {
for i in 0..<length {
if configContext.getReturnState(i) == PredictionContext.EMPTY_RETURN_STATE {
if fullCtx {
try configs.add(ATNConfig(config, config.state, PredictionContext.EMPTY), &mergeCache)
try! configs.add(ATNConfig(config, config.state, PredictionContext.EMPTY), &mergeCache)
continue
} else {
// we have no context info, just chase follow links (if greedy)
@ -1420,10 +1389,9 @@ open class ParserATNSimulator: ATNSimulator {
return
} else if fullCtx {
// reached end of start rule
try configs.add(config,&mergeCache)
try! configs.add(config, &mergeCache)
return
} else {
// print("FALLING off rule \(getRuleName(config.state.ruleIndex!))")
// else if we have no context info, just chase follow links (if greedy)
if debug {
print("FALLING off rule \(getRuleName(config.state.ruleIndex!))")
@ -1431,8 +1399,7 @@ open class ParserATNSimulator: ATNSimulator {
}
}
try closure_(config, configs, &closureBusy, collectPredicates,
fullCtx, depth, treatEofAsEpsilon)
try closure_(config, configs, &closureBusy, collectPredicates, fullCtx, depth, treatEofAsEpsilon)
}
///
@ -1447,10 +1414,10 @@ open class ParserATNSimulator: ATNSimulator {
_ treatEofAsEpsilon: Bool) throws {
// print(__FUNCTION__)
//long startTime = System.currentTimeMillis();
let p: ATNState = config.state
let p = config.state
// optimization
if !p.onlyHasEpsilonTransitions() {
try configs.add(config, &mergeCache)
try! configs.add(config, &mergeCache)
// make sure to not return here, because EOF transitions can act as
// both epsilon transitions and non-epsilon transitions.
// if ( debug ) print("added config "+configs);
@ -1461,11 +1428,9 @@ open class ParserATNSimulator: ATNSimulator {
canDropLoopEntryEdgeInLeftRecursiveRule(config) {
continue
}
let t: Transition = p.transition(i)
let continueCollecting: Bool =
!(t is ActionTransition) && collectPredicates
let c: ATNConfig? = try getEpsilonTarget(config, t, continueCollecting,
depth == 0, fullCtx, treatEofAsEpsilon)
let t = p.transition(i)
let continueCollecting = !(t is ActionTransition) && collectPredicates
let c = try getEpsilonTarget(config, t, continueCollecting, depth == 0, fullCtx, treatEofAsEpsilon)
if let c = c {
if !t.isEpsilon() {
// avoid infinite recursion for EOF* and EOF+
@ -1476,7 +1441,7 @@ open class ParserATNSimulator: ATNSimulator {
}
}
var newDepth: Int = depth
var newDepth = depth
if config.state is RuleStopState {
assert(!fullCtx, "Expected: !fullCtx")
// target fell off end of rule; mark resulting c as having dipped into outer context
@ -1619,7 +1584,7 @@ open class ParserATNSimulator: ATNSimulator {
if ParserATNSimulator.TURN_OFF_LR_LOOP_ENTRY_BRANCH_OPT {
return false
}
let p: ATNState = config.state
let p = config.state
guard let configContext = config.context else {
return false
}
@ -1636,28 +1601,28 @@ open class ParserATNSimulator: ATNSimulator {
// Require all return states to return back to the same rule
// that p is in.
let numCtxs: Int = configContext.size()
let numCtxs = configContext.size()
for i in 0 ..< numCtxs { // for each stack context
let returnState: ATNState = atn.states[configContext.getReturnState(i)]!
let returnState = atn.states[configContext.getReturnState(i)]!
if returnState.ruleIndex != p.ruleIndex
{return false}
}
let decisionStartState: BlockStartState = (p.transition(0).target as! BlockStartState)
let blockEndStateNum: Int = decisionStartState.endState!.stateNumber
let blockEndState: BlockEndState = (atn.states[blockEndStateNum] as! BlockEndState)
let decisionStartState = (p.transition(0).target as! BlockStartState)
let blockEndStateNum = decisionStartState.endState!.stateNumber
let blockEndState = (atn.states[blockEndStateNum] as! BlockEndState)
// Verify that the top of each stack context leads to loop entry/exit
// state through epsilon edges and w/o leaving rule.
for i in 0 ..< numCtxs { // for each stack context
let returnStateNumber: Int = configContext.getReturnState(i)
let returnState: ATNState = atn.states[returnStateNumber]!
let returnStateNumber = configContext.getReturnState(i)
let returnState = atn.states[returnStateNumber]!
// all states must have single outgoing epsilon edge
if returnState.getNumberOfTransitions() != 1 || !returnState.transition(0).isEpsilon(){
return false
}
// Look for prefix op case like 'not expr', (' type ')' expr
let returnStateTarget: ATNState = returnState.transition(0).target
let returnStateTarget = returnState.transition(0).target
if returnState.getStateType() == ATNState.BLOCK_END &&
returnStateTarget == p {
continue
@ -1771,19 +1736,20 @@ open class ParserATNSimulator: ATNSimulator {
// during closure, which dramatically reduces the size of
// the config sets. It also obviates the need to test predicates
// later during conflict resolution.
let currentPosition: Int = _input.index()
let currentPosition = _input.index()
try _input.seek(_startIndex)
let predSucceeds: Bool = try evalSemanticContext(pt.getPredicate(), _outerContext, config.alt, fullCtx)
let predSucceeds = try evalSemanticContext(pt.getPredicate(), _outerContext, config.alt, fullCtx)
try _input.seek(currentPosition)
if predSucceeds {
c = ATNConfig(config, pt.target) // no pred context
}
} else {
let newSemCtx: SemanticContext =
SemanticContext.and(config.semanticContext, pt.getPredicate())
}
else {
let newSemCtx = SemanticContext.and(config.semanticContext, pt.getPredicate())
c = ATNConfig(config, pt.target, newSemCtx)
}
} else {
}
else {
c = ATNConfig(config, pt.target)
}
@ -1814,16 +1780,15 @@ open class ParserATNSimulator: ATNSimulator {
// during closure, which dramatically reduces the size of
// the config sets. It also obviates the need to test predicates
// later during conflict resolution.
let currentPosition: Int = _input.index()
let currentPosition = _input.index()
try _input.seek(_startIndex)
let predSucceeds: Bool = try evalSemanticContext(pt.getPredicate(), _outerContext, config.alt, fullCtx)
let predSucceeds = try evalSemanticContext(pt.getPredicate(), _outerContext, config.alt, fullCtx)
try _input.seek(currentPosition)
if predSucceeds {
c = ATNConfig(config, pt.target) // no pred context
}
} else {
let newSemCtx: SemanticContext =
SemanticContext.and(config.semanticContext, pt.getPredicate())
let newSemCtx = SemanticContext.and(config.semanticContext, pt.getPredicate())
c = ATNConfig(config, pt.target, newSemCtx)
}
} else {
@ -1842,9 +1807,8 @@ open class ParserATNSimulator: ATNSimulator {
print("CALL rule \(getRuleName(t.target.ruleIndex!)), ctx=\(String(describing: config.context))")
}
let returnState: ATNState = t.followState
let newContext: PredictionContext =
SingletonPredictionContext.create(config.context, returnState.stateNumber)
let returnState = t.followState
let newContext = SingletonPredictionContext.create(config.context, returnState.stateNumber)
return ATNConfig(config, t.target, newContext)
}
@ -1857,8 +1821,8 @@ open class ParserATNSimulator: ATNSimulator {
/// conflicting alternative subsets. If `configs` does not contain any
/// conflicting subsets, this method returns an empty _java.util.BitSet_.
///
final func getConflictingAlts(_ configs: ATNConfigSet) throws -> BitSet {
let altsets: Array<BitSet> = try PredictionMode.getConflictingAltSubsets(configs)
final func getConflictingAlts(_ configs: ATNConfigSet) -> BitSet {
let altsets = PredictionMode.getConflictingAltSubsets(configs)
return PredictionMode.getAlts(altsets)
}
@ -1898,11 +1862,11 @@ open class ParserATNSimulator: ATNSimulator {
/// ignore a set of conflicting alts when we have an alternative
/// that we still need to pursue.
///
final func getConflictingAltsOrUniqueAlt(_ configs: ATNConfigSet) throws -> BitSet {
final func getConflictingAltsOrUniqueAlt(_ configs: ATNConfigSet) -> BitSet {
var conflictingAlts: BitSet
if configs.uniqueAlt != ATN.INVALID_ALT_NUMBER {
conflictingAlts = BitSet()
try conflictingAlts.set(configs.uniqueAlt)
try! conflictingAlts.set(configs.uniqueAlt)
} else {
conflictingAlts = configs.conflictingAlts!
}
@ -1914,9 +1878,8 @@ open class ParserATNSimulator: ATNSimulator {
if t == CommonToken.EOF {
return "EOF"
}
//var vocabulary : Vocabulary = parser != nil ? parser.getVocabulary() : Vocabulary.EMPTY_VOCABULARY;
let vocabulary: Vocabulary = parser.getVocabulary()
let displayName: String = vocabulary.getDisplayName(t)
let vocabulary = parser.getVocabulary()
let displayName = vocabulary.getDisplayName(t)
if displayName == String(t) {
return displayName
}
@ -1935,19 +1898,16 @@ open class ParserATNSimulator: ATNSimulator {
///
public final func dumpDeadEndConfigs(_ nvae: NoViableAltException) {
errPrint("dead end configs: ")
for c: ATNConfig in nvae.getDeadEndConfigs()!.configs {
var trans: String = "no edges"
for c in nvae.getDeadEndConfigs()!.configs {
var trans = "no edges"
if c.state.getNumberOfTransitions() > 0 {
let t: Transition = c.state.transition(0)
if t is AtomTransition {
let at: AtomTransition = t as! AtomTransition
let t = c.state.transition(0)
if let at = t as? AtomTransition {
trans = "Atom " + getTokenName(at.label)
} else {
if t is SetTransition {
let st: SetTransition = t as! SetTransition
let not: Bool = st is NotSetTransition
trans = (not ? "~" : "") + "Set " + st.set.toString()
}
else if let st = t as? SetTransition {
let not = st is NotSetTransition
trans = (not ? "~" : "") + "Set " + st.set.toString()
}
}
errPrint("\(c.toString(parser, true)):\(trans)")
@ -1958,11 +1918,15 @@ open class ParserATNSimulator: ATNSimulator {
final func noViableAlt(_ input: TokenStream,
_ outerContext: ParserRuleContext,
_ configs: ATNConfigSet,
_ startIndex: Int) throws -> NoViableAltException {
return try NoViableAltException(parser, input,
input.get(startIndex),
input.LT(1)!,
configs, outerContext)
_ startIndex: Int) -> NoViableAltException {
let startToken = try! input.get(startIndex)
var offendingToken: Token? = nil
do {
offendingToken = try input.LT(1)
}
catch {
}
return NoViableAltException(parser, input, startToken, offendingToken, configs, outerContext)
}
internal static func getUniqueAlt(_ configs: ATNConfigSet) -> Int {
@ -1994,7 +1958,7 @@ open class ParserATNSimulator: ATNSimulator {
final func addDFAEdge(_ dfa: DFA,
_ from: DFAState?,
_ t: Int,
_ to: DFAState?) throws -> DFAState? {
_ to: DFAState?) -> DFAState? {
var to = to
if debug {
print("EDGE \(String(describing: from)) -> \(String(describing: to)) upon \(getTokenName(t))")
@ -2004,7 +1968,7 @@ open class ParserATNSimulator: ATNSimulator {
return nil
}
to = try addDFAState(dfa, to!) // used existing if possible not incoming
to = addDFAState(dfa, to!) // used existing if possible not incoming
if from == nil || t < -1 || t > atn.maxTokenType {
return to
}
@ -2021,7 +1985,6 @@ open class ParserATNSimulator: ATNSimulator {
}
if debug {
// print ("DFA=\n"+dfa.toString(parser != nil ? parser.getVocabulary() : Vocabulary.EMPTY_VOCABULARY));
print("DFA=\n" + dfa.toString(parser.getVocabulary()))
}
@ -2043,12 +2006,12 @@ open class ParserATNSimulator: ATNSimulator {
/// state if `D` is already in the DFA, or `D` itself if the
/// state was not already present.
///
final func addDFAState(_ dfa: DFA, _ D: DFAState) throws -> DFAState {
final func addDFAState(_ dfa: DFA, _ D: DFAState) -> DFAState {
if D == ATNSimulator.ERROR {
return D
}
return try dfaStatesMutex.synchronized {
return dfaStatesMutex.synchronized {
if let existing = dfa.states[D] {
return existing!
}
@ -2056,7 +2019,7 @@ open class ParserATNSimulator: ATNSimulator {
D.stateNumber = dfa.states.count
if !D.configs.isReadonly() {
try D.configs.optimizeConfigs(self)
try! D.configs.optimizeConfigs(self)
D.configs.setReadonly(true)
}
@ -2069,24 +2032,20 @@ open class ParserATNSimulator: ATNSimulator {
}
}
func reportAttemptingFullContext(_ dfa: DFA, _ conflictingAlts: BitSet?, _ configs: ATNConfigSet, _ startIndex: Int, _ stopIndex: Int) throws {
func reportAttemptingFullContext(_ dfa: DFA, _ conflictingAlts: BitSet?, _ configs: ATNConfigSet, _ startIndex: Int, _ stopIndex: Int) {
if debug || retry_debug {
let interval: Interval = Interval.of(startIndex, stopIndex)
try print("reportAttemptingFullContext decision=\(dfa.decision):\(configs), input=\(parser.getTokenStream()!.getText(interval))")
let input = getTextInInterval(startIndex, stopIndex)
print("reportAttemptingFullContext decision=\(dfa.decision):\(configs), input=\(input)")
}
// if ( parser=nil ) {
try parser.getErrorListenerDispatch().reportAttemptingFullContext(parser, dfa, startIndex, stopIndex, conflictingAlts, configs)
// }
parser.getErrorListenerDispatch().reportAttemptingFullContext(parser, dfa, startIndex, stopIndex, conflictingAlts, configs)
}
func reportContextSensitivity(_ dfa: DFA, _ prediction: Int, _ configs: ATNConfigSet, _ startIndex: Int, _ stopIndex: Int) throws {
func reportContextSensitivity(_ dfa: DFA, _ prediction: Int, _ configs: ATNConfigSet, _ startIndex: Int, _ stopIndex: Int) {
if debug || retry_debug {
let interval: Interval = Interval.of(startIndex, stopIndex)
try print("reportContextSensitivity decision=\(dfa.decision):\(configs), input=\(parser.getTokenStream()!.getText(interval))")
let input = getTextInInterval(startIndex, stopIndex)
print("reportContextSensitivity decision=\(dfa.decision):\(configs), input=\(input)")
}
//if ( parser=nil ) {
try parser.getErrorListenerDispatch().reportContextSensitivity(parser, dfa, startIndex, stopIndex, prediction, configs)
// }
parser.getErrorListenerDispatch().reportContextSensitivity(parser, dfa, startIndex, stopIndex, prediction, configs)
}
///
@ -2098,17 +2057,24 @@ open class ParserATNSimulator: ATNSimulator {
_ startIndex: Int, _ stopIndex: Int,
_ exact: Bool,
_ ambigAlts: BitSet,
_ configs: ATNConfigSet) throws
_ configs: ATNConfigSet)
{
if debug || retry_debug {
let interval: Interval = Interval.of(startIndex, stopIndex)
try print("reportAmbiguity \(ambigAlts):\(configs), input=\(parser.getTokenStream()!.getText(interval))")
let input = getTextInInterval(startIndex, stopIndex)
print("reportAmbiguity \(ambigAlts):\(configs), input=\(input)")
}
//TODO ( parser != nil ?
//if ( parser != nil ) {
try parser .getErrorListenerDispatch().reportAmbiguity(parser, dfa, startIndex, stopIndex,
parser.getErrorListenerDispatch().reportAmbiguity(parser, dfa, startIndex, stopIndex,
exact, ambigAlts, configs)
//}
}
private func getTextInInterval(_ startIndex: Int, _ stopIndex: Int) -> String {
let interval = Interval.of(startIndex, stopIndex)
do {
return try parser.getTokenStream()?.getText(interval) ?? "<unknown>"
}
catch {
return "<unknown>"
}
}
public final func setPredictionMode(_ mode: PredictionMode) {
@ -2120,9 +2086,6 @@ open class ParserATNSimulator: ATNSimulator {
return mode
}
///
/// - 4.3
///
public final func getParser() -> Parser {
return parser
}

View File

@ -12,18 +12,19 @@ public class PredictionContext: Hashable, CustomStringConvertible {
/// Represents `$` in local context prediction, which means wildcard.
/// `+x = *`.
///
public static let EMPTY: EmptyPredictionContext = EmptyPredictionContext()
public static let EMPTY = EmptyPredictionContext()
///
/// Represents `$` in an array in full context mode, when `$`
/// doesn't mean wildcard: `$ + x = [$,x]`. Here,
/// `$` = _#EMPTY_RETURN_STATE_.
///
public static let EMPTY_RETURN_STATE: Int = Int(Int32.max)
public static let EMPTY_RETURN_STATE = Int(Int32.max)
private static let INITIAL_HASH: Int = 1
private static let INITIAL_HASH = UInt32(1)
public static var globalNodeCount = 0
public static var globalNodeCount: Int = 0
public final let id: Int = {
let oldGlobalNodeCount = globalNodeCount
globalNodeCount += 1
@ -62,12 +63,7 @@ public class PredictionContext: Hashable, CustomStringConvertible {
/// Return _#EMPTY_ if `outerContext` is empty or null.
///
public static func fromRuleContext(_ atn: ATN, _ outerContext: RuleContext?) -> PredictionContext {
var _outerContext: RuleContext
if let outerContext = outerContext {
_outerContext = outerContext
}else {
_outerContext = RuleContext.EMPTY
}
let _outerContext = outerContext ?? RuleContext.EMPTY
// if we are in RuleContext of start rule, s, then PredictionContext
// is EMPTY. Nobody called us. (if we are empty, return empty)
@ -76,29 +72,25 @@ public class PredictionContext: Hashable, CustomStringConvertible {
}
// If we have a parent, convert it to a PredictionContext graph
var parent: PredictionContext = EMPTY
parent = PredictionContext.fromRuleContext(atn, _outerContext.parent)
let parent = PredictionContext.fromRuleContext(atn, _outerContext.parent)
let state: ATNState = atn.states[_outerContext.invokingState]!
let transition: RuleTransition = state.transition(0) as! RuleTransition
let state = atn.states[_outerContext.invokingState]!
let transition = state.transition(0) as! RuleTransition
return SingletonPredictionContext.create(parent, transition.followState.stateNumber)
}
public func size() -> Int {
RuntimeException(#function + " must be overridden")
return 0
fatalError(#function + " must be overridden")
}
public func getParent(_ index: Int) -> PredictionContext? {
RuntimeException(#function + " must be overridden")
return nil
fatalError(#function + " must be overridden")
}
public func getReturnState(_ index: Int) -> Int {
RuntimeException(#function + " must be overridden")
return 0
fatalError(#function + " must be overridden")
}
@ -118,21 +110,19 @@ public class PredictionContext: Hashable, CustomStringConvertible {
}
static func calculateEmptyHashCode() -> Int {
var hash: Int = MurmurHash.initialize(INITIAL_HASH)
hash = MurmurHash.finish(hash, 0)
return hash
let hash = MurmurHash.initialize(INITIAL_HASH)
return MurmurHash.finish(hash, 0)
}
static func calculateHashCode(_ parent: PredictionContext?, _ returnState: Int) -> Int {
var hash: Int = MurmurHash.initialize(INITIAL_HASH)
var hash = MurmurHash.initialize(INITIAL_HASH)
hash = MurmurHash.update(hash, parent)
hash = MurmurHash.update(hash, returnState)
hash = MurmurHash.finish(hash, 2)
return hash
return MurmurHash.finish(hash, 2)
}
static func calculateHashCode(_ parents: [PredictionContext?], _ returnStates: [Int]) -> Int {
var hash: Int = MurmurHash.initialize(INITIAL_HASH)
var hash = MurmurHash.initialize(INITIAL_HASH)
var length = parents.count
for i in 0..<length {
hash = MurmurHash.update(hash, parents[i])
@ -142,8 +132,7 @@ public class PredictionContext: Hashable, CustomStringConvertible {
hash = MurmurHash.update(hash, returnStates[i])
}
hash = MurmurHash.finish(hash, 2 * parents.count)
return hash
return MurmurHash.finish(hash, 2 * parents.count)
}
// dispatch
@ -163,29 +152,27 @@ public class PredictionContext: Hashable, CustomStringConvertible {
return a
}
if (a is SingletonPredictionContext && b is SingletonPredictionContext) {
return mergeSingletons(a as! SingletonPredictionContext,
b as! SingletonPredictionContext,
rootIsWildcard, &mergeCache)
if let spc_a = a as? SingletonPredictionContext, let spc_b = b as? SingletonPredictionContext {
return mergeSingletons(spc_a, spc_b, rootIsWildcard, &mergeCache)
}
// At least one of a or b is array
// If one is $ and rootIsWildcard, return $ as * wildcard
if (rootIsWildcard) {
if (a is EmptyPredictionContext) {
if rootIsWildcard {
if a is EmptyPredictionContext {
return a
}
if (b is EmptyPredictionContext) {
if b is EmptyPredictionContext {
return b
}
}
// convert singleton so both are arrays to normalize
if (a is SingletonPredictionContext) {
a = ArrayPredictionContext(a as! SingletonPredictionContext)
if let spc_a = a as? SingletonPredictionContext {
a = ArrayPredictionContext(spc_a)
}
if (b is SingletonPredictionContext) {
b = ArrayPredictionContext(b as! SingletonPredictionContext)
if let spc_b = b as? SingletonPredictionContext {
b = ArrayPredictionContext(spc_b)
}
return mergeArrays(a as! ArrayPredictionContext, b as! ArrayPredictionContext,
rootIsWildcard, &mergeCache)
@ -225,7 +212,7 @@ public class PredictionContext: Hashable, CustomStringConvertible {
_ mergeCache: inout DoubleKeyMap<PredictionContext, PredictionContext, PredictionContext>?) -> PredictionContext {
if let mergeCache = mergeCache {
var previous: PredictionContext? = mergeCache.get(a, b)
var previous = mergeCache.get(a, b)
if previous != nil {
return previous!
}
@ -243,45 +230,45 @@ public class PredictionContext: Hashable, CustomStringConvertible {
return rootMerge
}
if (a.returnState == b.returnState) {
if a.returnState == b.returnState {
// a == b
let parent: PredictionContext = merge(a.parent!, b.parent!, rootIsWildcard, &mergeCache);
let parent = merge(a.parent!, b.parent!, rootIsWildcard, &mergeCache)
// if parent is same as existing a or b parent or reduced to a parent, return it
if (parent === a.parent!) {
if parent === a.parent! {
return a
} // ax + bx = ax, if a=b
if (parent === b.parent!) {
if parent === b.parent! {
return b
} // ax + bx = bx, if a=b
// else: ax + ay = a'[x,y]
// merge parents x and y, giving array node with x,y then remainders
// of those graphs. dup a, a' points at merged array
// new joined parent so create new singleton pointing to it, a'
let a_: PredictionContext = SingletonPredictionContext.create(parent, a.returnState);
if (mergeCache != nil) {
let a_ = SingletonPredictionContext.create(parent, a.returnState);
if mergeCache != nil {
mergeCache!.put(a, b, a_)
}
return a_
} else {
// a != b payloads differ
// see if we can collapse parents due to $+x parents if local ctx
var singleParent: PredictionContext? = nil;
var singleParent: PredictionContext? = nil
//added by janyou
if a === b || (a.parent != nil && a.parent! == b.parent) {
// ax + bx = [a,b]x
singleParent = a.parent
}
if (singleParent != nil) {
if singleParent != nil {
// parents are same
// sort payloads and use same parent
var payloads: [Int] = [a.returnState, b.returnState];
if (a.returnState > b.returnState) {
var payloads = [a.returnState, b.returnState]
if a.returnState > b.returnState {
payloads[0] = b.returnState
payloads[1] = a.returnState
}
let parents: [PredictionContext?] = [singleParent, singleParent]
let a_: PredictionContext = ArrayPredictionContext(parents, payloads)
if (mergeCache != nil) {
let parents = [singleParent, singleParent]
let a_ = ArrayPredictionContext(parents, payloads)
if mergeCache != nil {
mergeCache!.put(a, b, a_)
}
return a_
@ -289,19 +276,19 @@ public class PredictionContext: Hashable, CustomStringConvertible {
// parents differ and can't merge them. Just pack together
// into array; can't merge.
// ax + by = [ax,by]
var payloads: [Int] = [a.returnState, b.returnState]
var parents: [PredictionContext?] = [a.parent, b.parent];
if (a.returnState > b.returnState) {
var payloads = [a.returnState, b.returnState]
var parents = [a.parent, b.parent]
if a.returnState > b.returnState {
// sort by payload
payloads[0] = b.returnState
payloads[1] = a.returnState
parents = [b.parent, a.parent]
}
if a is EmptyPredictionContext {
// print("parenet is null")
// print("parent is null")
}
let a_: PredictionContext = ArrayPredictionContext(parents, payloads);
if (mergeCache != nil) {
let a_ = ArrayPredictionContext(parents, payloads)
if mergeCache != nil {
mergeCache!.put(a, b, a_)
}
return a_
@ -349,31 +336,29 @@ public class PredictionContext: Hashable, CustomStringConvertible {
public static func mergeRoot(_ a: SingletonPredictionContext,
_ b: SingletonPredictionContext,
_ rootIsWildcard: Bool) -> PredictionContext? {
if (rootIsWildcard) {
if (a === PredictionContext.EMPTY) {
if rootIsWildcard {
if a === PredictionContext.EMPTY {
return PredictionContext.EMPTY
} // * + b = *
if (b === PredictionContext.EMPTY) {
if b === PredictionContext.EMPTY {
return PredictionContext.EMPTY
} // a + * = *
} else {
if (a === PredictionContext.EMPTY && b === PredictionContext.EMPTY) {
if a === PredictionContext.EMPTY && b === PredictionContext.EMPTY {
return PredictionContext.EMPTY
} // $ + $ = $
if (a === PredictionContext.EMPTY) {
if a === PredictionContext.EMPTY {
// $ + x = [$,x]
let payloads: [Int] = [b.returnState, EMPTY_RETURN_STATE]
let parents: [PredictionContext?] = [b.parent, nil]
let joined: PredictionContext =
ArrayPredictionContext(parents, payloads)
return joined;
let payloads = [b.returnState, EMPTY_RETURN_STATE]
let parents = [b.parent, nil]
let joined = ArrayPredictionContext(parents, payloads)
return joined
}
if (b === PredictionContext.EMPTY) {
if b === PredictionContext.EMPTY {
// x + $ = [$,x] ($ is always first if present)
let payloads: [Int] = [a.returnState, EMPTY_RETURN_STATE]
let parents: [PredictionContext?] = [a.parent, nil]
let joined: PredictionContext =
ArrayPredictionContext(parents, payloads)
let payloads = [a.returnState, EMPTY_RETURN_STATE]
let parents = [a.parent, nil]
let joined = ArrayPredictionContext(parents, payloads)
return joined
}
}
@ -405,30 +390,29 @@ public class PredictionContext: Hashable, CustomStringConvertible {
_ rootIsWildcard: Bool,
_ mergeCache: inout DoubleKeyMap<PredictionContext, PredictionContext, PredictionContext>?) -> PredictionContext {
if (mergeCache != nil) {
var previous: PredictionContext? = mergeCache!.get(a, b)
if (previous != nil) {
if mergeCache != nil {
var previous = mergeCache!.get(a, b)
if previous != nil {
return previous!
}
previous = mergeCache!.get(b, a)
if (previous != nil) {
if previous != nil {
return previous!
}
}
// merge sorted payloads a + b => M
var i: Int = 0 // walks a
var j: Int = 0 // walks b
var k: Int = 0// walks target M array
var i = 0 // walks a
var j = 0 // walks b
var k = 0 // walks target M array
let aReturnStatesLength = a.returnStates.count
let bReturnStatesLength = b.returnStates.count
let mergedReturnStatesLength = aReturnStatesLength + bReturnStatesLength
var mergedReturnStates: [Int] = [Int](repeating: 0, count: mergedReturnStatesLength)
var mergedReturnStates = [Int](repeating: 0, count: mergedReturnStatesLength)
var mergedParents: [PredictionContext?] = [PredictionContext?](repeating: nil, count: mergedReturnStatesLength)
//new PredictionContext[a.returnStates.length + b.returnStates.length];
var mergedParents = [PredictionContext?](repeating: nil, count: mergedReturnStatesLength)
// walk and merge to yield mergedParents, mergedReturnStates
let aReturnStates = a.returnStates
let bReturnStates = b.returnStates
@ -436,35 +420,27 @@ public class PredictionContext: Hashable, CustomStringConvertible {
let bParents = b.parents
while i < aReturnStatesLength && j < bReturnStatesLength {
let a_parent: PredictionContext? = aParents[i]
let b_parent: PredictionContext? = bParents[j]
if (aReturnStates[i] == bReturnStates[j]) {
let a_parent = aParents[i]
let b_parent = bParents[j]
if aReturnStates[i] == bReturnStates[j] {
// same payload (stack tops are equal), must yield merged singleton
let payload: Int = aReturnStates[i]
let payload = aReturnStates[i]
// $+$ = $
var both$: Bool = (payload == EMPTY_RETURN_STATE)
both$ = both$ && a_parent == nil
both$ = both$ && b_parent == nil
// let both$: Bool = ((payload == EMPTY_RETURN_STATE) &&
// a_parent == nil && b_parent == nil)
var ax_ax: Bool = (a_parent != nil && b_parent != nil)
ax_ax = ax_ax && a_parent! == b_parent!
// let ax_ax: Bool = (a_parent != nil && b_parent != nil) && a_parent! == b_parent! // ax+ax -> ax
let both$ = ((payload == EMPTY_RETURN_STATE) && a_parent == nil && b_parent == nil)
let ax_ax = (a_parent != nil && b_parent != nil && a_parent! == b_parent!)
if (both$ || ax_ax) {
if both$ || ax_ax {
mergedParents[k] = a_parent // choose left
mergedReturnStates[k] = payload
} else {
// ax+ay -> a'[x,y]
let mergedParent: PredictionContext =
merge(a_parent!, b_parent!, rootIsWildcard, &mergeCache)
let mergedParent = merge(a_parent!, b_parent!, rootIsWildcard, &mergeCache)
mergedParents[k] = mergedParent
mergedReturnStates[k] = payload
}
i += 1 // hop over left one as usual
j += 1 // but also skip one in right side since we merge
} else if (aReturnStates[i] < bReturnStates[j]) {
} else if aReturnStates[i] < bReturnStates[j] {
// copy a[i] to M
mergedParents[k] = a_parent
mergedReturnStates[k] = aReturnStates[i]
@ -479,7 +455,7 @@ public class PredictionContext: Hashable, CustomStringConvertible {
}
// copy over any payloads remaining in either array
if (i < aReturnStatesLength) {
if i < aReturnStatesLength {
for p in i..<aReturnStatesLength {
mergedParents[k] = aParents[p]
@ -495,14 +471,12 @@ public class PredictionContext: Hashable, CustomStringConvertible {
}
// trim merged if we combined a few that had same stack tops
if (k < mergedParents.count) {
if k < mergedParents.count {
// write index < last position; trim
if (k == 1) {
if k == 1 {
// for just one merged element, return singleton top
let a_: PredictionContext =
SingletonPredictionContext.create(mergedParents[0],
mergedReturnStates[0])
if (mergeCache != nil) {
let a_ = SingletonPredictionContext.create(mergedParents[0], mergedReturnStates[0])
if mergeCache != nil {
mergeCache!.put(a, b, a_)
}
//print("merge array 1 \(a_)")
@ -512,8 +486,7 @@ public class PredictionContext: Hashable, CustomStringConvertible {
mergedReturnStates = Array(mergedReturnStates[0 ..< k])
}
let M: ArrayPredictionContext =
ArrayPredictionContext(mergedParents, mergedReturnStates)
let M = ArrayPredictionContext(mergedParents, mergedReturnStates)
// if we created same array as a or b, return that instead
// TODO: track whether this is possible above during merge sort for speed
@ -542,24 +515,24 @@ public class PredictionContext: Hashable, CustomStringConvertible {
}
public static func toDOTString(_ context: PredictionContext?) -> String {
if (context == nil) {
if context == nil {
return ""
}
let buf: StringBuilder = StringBuilder()
let buf = StringBuilder()
buf.append("digraph G {\n")
buf.append("rankdir=LR;\n")
var nodes: Array<PredictionContext> = getAllContextNodes(context!)
var nodes = getAllContextNodes(context!)
nodes.sort(by: { $0.id > $1.id })
nodes.sort { $0.id > $1.id }
for current: PredictionContext in nodes {
if (current is SingletonPredictionContext) {
let s: String = String(current.id)
for current in nodes {
if current is SingletonPredictionContext {
let s = String(current.id)
buf.append(" s").append(s)
var returnState: String = String(current.getReturnState(0))
if (current is EmptyPredictionContext) {
var returnState = String(current.getReturnState(0))
if current is EmptyPredictionContext {
returnState = "$"
}
buf.append(" [label=\"")
@ -567,17 +540,17 @@ public class PredictionContext: Hashable, CustomStringConvertible {
buf.append("\"];\n")
continue
}
let arr: ArrayPredictionContext = current as! ArrayPredictionContext
let arr = current as! ArrayPredictionContext
buf.append(" s").append(arr.id)
buf.append(" [shape=box, label=\"")
buf.append("[")
var first: Bool = true
var first = true
let returnStates = arr.returnStates
for inv: Int in returnStates {
if (!first) {
for inv in returnStates {
if !first {
buf.append(", ")
}
if (inv == EMPTY_RETURN_STATE) {
if inv == EMPTY_RETURN_STATE {
buf.append("$")
} else {
buf.append(inv)
@ -588,8 +561,8 @@ public class PredictionContext: Hashable, CustomStringConvertible {
buf.append("\"];\n")
}
for current: PredictionContext in nodes {
if (current === EMPTY) {
for current in nodes {
if current === EMPTY {
continue
}
let length = current.size()
@ -597,13 +570,13 @@ public class PredictionContext: Hashable, CustomStringConvertible {
guard let currentParent = current.getParent(i) else {
continue
}
let s: String = String(current.id)
let s = String(current.id)
buf.append(" s").append(s)
buf.append("->")
buf.append("s")
buf.append(currentParent.id)
if (current.size() > 1) {
buf.append(" [label=\"parent[\(i)]\"];\n");
if current.size() > 1 {
buf.append(" [label=\"parent[\(i)]\"];\n")
} else {
buf.append(";\n")
}
@ -619,23 +592,23 @@ public class PredictionContext: Hashable, CustomStringConvertible {
_ context: PredictionContext,
_ contextCache: PredictionContextCache,
_ visited: HashMap<PredictionContext, PredictionContext>) -> PredictionContext {
if (context.isEmpty()) {
if context.isEmpty() {
return context
}
var existing: PredictionContext? = visited[context]
if (existing != nil) {
var existing = visited[context]
if existing != nil {
return existing!
}
existing = contextCache.get(context)
if (existing != nil) {
if existing != nil {
visited[context] = existing!
return existing!
}
var changed: Bool = false
var parents: [PredictionContext?] = [PredictionContext?](repeating: nil, count: context.size())
var changed = false
var parents = [PredictionContext?](repeating: nil, count: context.size())
let length = parents.count
for i in 0..<length {
//added by janyou
@ -643,10 +616,10 @@ public class PredictionContext: Hashable, CustomStringConvertible {
return context
}
let parent: PredictionContext = getCachedContext(context.getParent(i)!, contextCache, visited)
let parent = getCachedContext(context.getParent(i)!, contextCache, visited)
//modified by janyou != !==
if (changed || parent !== context.getParent(i)) {
if (!changed) {
if changed || parent !== context.getParent(i) {
if !changed {
parents = [PredictionContext?](repeating: nil, count: context.size())
for j in 0..<context.size() {
@ -660,22 +633,22 @@ public class PredictionContext: Hashable, CustomStringConvertible {
}
}
if (!changed) {
if !changed {
contextCache.add(context)
visited[context] = context
return context
}
var updated: PredictionContext
if (parents.count == 0) {
let updated: PredictionContext
if parents.isEmpty {
updated = EMPTY
} else {
if (parents.count == 1) {
updated = SingletonPredictionContext.create(parents[0], context.getReturnState(0))
} else {
let arrayPredictionContext: ArrayPredictionContext = context as! ArrayPredictionContext
updated = ArrayPredictionContext(parents, arrayPredictionContext.returnStates)
}
else if parents.count == 1 {
updated = SingletonPredictionContext.create(parents[0], context.getReturnState(0))
}
else {
let arrayPredictionContext = context as! ArrayPredictionContext
updated = ArrayPredictionContext(parents, arrayPredictionContext.returnStates)
}
contextCache.add(updated)
@ -688,19 +661,16 @@ public class PredictionContext: Hashable, CustomStringConvertible {
// ter's recursive version of Sam's getAllNodes()
public static func getAllContextNodes(_ context: PredictionContext) -> Array<PredictionContext> {
var nodes: Array<PredictionContext> = Array<PredictionContext>()
let visited: HashMap<PredictionContext, PredictionContext> =
HashMap<PredictionContext, PredictionContext>()
public static func getAllContextNodes(_ context: PredictionContext) -> [PredictionContext] {
var nodes = [PredictionContext]()
let visited = HashMap<PredictionContext, PredictionContext>()
getAllContextNodes_(context, &nodes, visited)
return nodes
}
public static func getAllContextNodes_(_ context: PredictionContext?,
_ nodes: inout Array<PredictionContext>,
_ nodes: inout [PredictionContext],
_ visited: HashMap<PredictionContext, PredictionContext>) {
//if (context == nil || visited.keys.contains(context!)) {
guard let context = context, visited[context] == nil else {
return
}
@ -723,50 +693,50 @@ public class PredictionContext: Hashable, CustomStringConvertible {
// FROM SAM
public func toStrings<T>(_ recognizer: Recognizer<T>?, _ stop: PredictionContext, _ currentState: Int) -> [String] {
var result: Array<String> = Array<String>()
var perm: Int = 0
var result = [String]()
var perm = 0
outer: while true {
var offset: Int = 0
var last: Bool = true
var p: PredictionContext = self
var stateNumber: Int = currentState
let localBuffer: StringBuilder = StringBuilder()
var offset = 0
var last = true
var p = self
var stateNumber = currentState
let localBuffer = StringBuilder()
localBuffer.append("[")
while !p.isEmpty() && p !== stop {
var index: Int = 0
if (p.size() > 0) {
var bits: Int = 1
var index = 0
if p.size() > 0 {
var bits = 1
while (1 << bits) < p.size() {
bits += 1
}
let mask: Int = (1 << bits) - 1
let mask = (1 << bits) - 1
index = (perm >> offset) & mask
//last &= index >= p.size() - 1;
//last = Bool(Int(last) & (index >= p.size() - 1));
last = last && (index >= p.size() - 1)
if (index >= p.size()) {
if index >= p.size() {
continue outer
}
offset += bits
}
if let recognizer = recognizer {
if (localBuffer.length > 1) {
if localBuffer.length > 1 {
// first char is '[', if more than that this isn't the first rule
localBuffer.append(" ")
}
let atn: ATN = recognizer.getATN()
let s: ATNState = atn.states[stateNumber]!
let ruleName: String = recognizer.getRuleNames()[s.ruleIndex!]
let atn = recognizer.getATN()
let s = atn.states[stateNumber]!
let ruleName = recognizer.getRuleNames()[s.ruleIndex!]
localBuffer.append(ruleName)
} else {
if (p.getReturnState(index) != PredictionContext.EMPTY_RETURN_STATE) {
if (!p.isEmpty()) {
if (localBuffer.length > 1) {
}
else if p.getReturnState(index) != PredictionContext.EMPTY_RETURN_STATE {
if !p.isEmpty() {
if localBuffer.length > 1 {
// first char is '[', if more than that this isn't the first rule
localBuffer.append(" ")
}
@ -774,14 +744,13 @@ public class PredictionContext: Hashable, CustomStringConvertible {
localBuffer.append(p.getReturnState(index))
}
}
}
stateNumber = p.getReturnState(index)
p = p.getParent(index)!
}
localBuffer.append("]")
result.append(localBuffer.toString())
if (last) {
if last {
break
}
@ -792,17 +761,18 @@ public class PredictionContext: Hashable, CustomStringConvertible {
}
public var description: String {
return String(describing: PredictionContext.self) + "@" + String(Unmanaged.passUnretained(self).toOpaque().hashValue)
}
}
public func ==(lhs: RuleContext, rhs: ParserRuleContext) -> Bool {
if !(lhs is ParserRuleContext) {
if let lhs = lhs as? ParserRuleContext {
return lhs === rhs
}
else {
return false
}
return (lhs as! ParserRuleContext) === rhs
}
public func ==(lhs: PredictionContext, rhs: PredictionContext) -> Bool {
@ -810,16 +780,16 @@ public func ==(lhs: PredictionContext, rhs: PredictionContext) -> Bool {
if lhs === rhs {
return true
}
if (lhs is EmptyPredictionContext) {
if lhs is EmptyPredictionContext {
return lhs === rhs
}
if (lhs is SingletonPredictionContext) && (rhs is SingletonPredictionContext) {
return (lhs as! SingletonPredictionContext) == (rhs as! SingletonPredictionContext)
if let lhs = lhs as? SingletonPredictionContext, let rhs = rhs as? SingletonPredictionContext {
return lhs == rhs
}
if (lhs is ArrayPredictionContext) && (rhs is ArrayPredictionContext) {
return (lhs as! ArrayPredictionContext) == (rhs as! ArrayPredictionContext)
if let lhs = lhs as? ArrayPredictionContext, let rhs = rhs as? ArrayPredictionContext {
return lhs == rhs
}
return false

View File

@ -164,7 +164,7 @@ public enum PredictionMode {
/// the configurations to strip out all of the predicates so that a standard
/// _org.antlr.v4.runtime.atn.ATNConfigSet_ will merge everything ignoring predicates.
///
public static func hasSLLConflictTerminatingPrediction(_ mode: PredictionMode,_ configs: ATNConfigSet) throws -> Bool {
public static func hasSLLConflictTerminatingPrediction(_ mode: PredictionMode,_ configs: ATNConfigSet) -> Bool {
var configs = configs
///
/// Configs in rule stop states indicate reaching the end of the decision
@ -183,17 +183,16 @@ public enum PredictionMode {
// since we'll often fail over anyway.
if configs.hasSemanticContext {
// dup configs, tossing out semantic predicates
configs = try configs.dupConfigsWithoutSemanticPredicates()
configs = configs.dupConfigsWithoutSemanticPredicates()
}
// now we have combined contexts for configs with dissimilar preds
}
// pure SLL or combined SLL+LL mode parsing
let altsets: Array<BitSet> = try getConflictingAltSubsets(configs)
let altsets = getConflictingAltSubsets(configs)
let heuristic: Bool =
try hasConflictingAltSet(altsets) && !hasStateAssociatedWithOneAlt(configs)
let heuristic = hasConflictingAltSet(altsets) && !hasStateAssociatedWithOneAlt(configs)
return heuristic
}
@ -364,8 +363,8 @@ public enum PredictionMode {
/// we need exact ambiguity detection when the sets look like
/// `A={{1,2`}} or `{{1,2`,{1,2}}}, etc...
///
public static func resolvesToJustOneViableAlt(_ altsets: Array<BitSet>) throws -> Int {
return try getSingleViableAlt(altsets)
public static func resolvesToJustOneViableAlt(_ altsets: [BitSet]) -> Int {
return getSingleViableAlt(altsets)
}
///
@ -376,7 +375,7 @@ public enum PredictionMode {
/// - returns: `true` if every _java.util.BitSet_ in `altsets` has
/// _java.util.BitSet#cardinality cardinality_ &gt; 1, otherwise `false`
///
public static func allSubsetsConflict(_ altsets: Array<BitSet>) -> Bool {
public static func allSubsetsConflict(_ altsets: [BitSet]) -> Bool {
return !hasNonConflictingAltSet(altsets)
}
@ -388,7 +387,7 @@ public enum PredictionMode {
/// - returns: `true` if `altsets` contains a _java.util.BitSet_ with
/// _java.util.BitSet#cardinality cardinality_ 1, otherwise `false`
///
public static func hasNonConflictingAltSet(_ altsets: Array<BitSet>) -> Bool {
public static func hasNonConflictingAltSet(_ altsets: [BitSet]) -> Bool {
for alts: BitSet in altsets {
if alts.cardinality() == 1 {
return true
@ -405,7 +404,7 @@ public enum PredictionMode {
/// - returns: `true` if `altsets` contains a _java.util.BitSet_ with
/// _java.util.BitSet#cardinality cardinality_ &gt; 1, otherwise `false`
///
public static func hasConflictingAltSet(_ altsets: Array<BitSet>) -> Bool {
public static func hasConflictingAltSet(_ altsets: [BitSet]) -> Bool {
for alts: BitSet in altsets {
if alts.cardinality() > 1 {
return true
@ -421,7 +420,7 @@ public enum PredictionMode {
/// - returns: `true` if every member of `altsets` is equal to the
/// others, otherwise `false`
///
public static func allSubsetsEqual(_ altsets: Array<BitSet>) -> Bool {
public static func allSubsetsEqual(_ altsets: [BitSet]) -> Bool {
let first: BitSet = altsets[0]
for it in altsets {
@ -440,10 +439,10 @@ public enum PredictionMode {
///
/// - parameter altsets: a collection of alternative subsets
///
public static func getUniqueAlt(_ altsets: Array<BitSet>) throws -> Int {
public static func getUniqueAlt(_ altsets: [BitSet]) -> Int {
let all: BitSet = getAlts(altsets)
if all.cardinality() == 1 {
return try all.nextSetBit(0)
return all.firstSetBit()
}
return ATN.INVALID_ALT_NUMBER
}
@ -467,9 +466,8 @@ public enum PredictionMode {
///
/// Get union of all alts from configs. - Since: 4.5.1
///
public static func getAlts(_ configs: ATNConfigSet) throws -> BitSet {
return try configs.getAltBitSet()
public static func getAlts(_ configs: ATNConfigSet) -> BitSet {
return configs.getAltBitSet()
}
@ -483,9 +481,8 @@ public enum PredictionMode {
///
///
public static func getConflictingAltSubsets(_ configs: ATNConfigSet) throws -> Array<BitSet> {
return try configs.getConflictingAltSubsets()
public static func getConflictingAltSubsets(_ configs: ATNConfigSet) -> [BitSet] {
return configs.getConflictingAltSubsets()
}
///
@ -496,16 +493,13 @@ public enum PredictionMode {
/// map[c._org.antlr.v4.runtime.atn.ATNConfig#state state_] U= c._org.antlr.v4.runtime.atn.ATNConfig#alt alt_
///
///
public static func getStateToAltMap(_ configs: ATNConfigSet) throws -> HashMap<ATNState, BitSet> {
return try configs.getStateToAltMap()
public static func getStateToAltMap(_ configs: ATNConfigSet) -> HashMap<ATNState, BitSet> {
return configs.getStateToAltMap()
}
public static func hasStateAssociatedWithOneAlt(_ configs: ATNConfigSet) throws -> Bool {
let x: HashMap<ATNState, BitSet> = try getStateToAltMap(configs)
let values = x.values
for alts: BitSet in values {
public static func hasStateAssociatedWithOneAlt(_ configs: ATNConfigSet) -> Bool {
let x = getStateToAltMap(configs)
for alts in x.values {
if alts.cardinality() == 1 {
return true
}
@ -513,17 +507,17 @@ public enum PredictionMode {
return false
}
public static func getSingleViableAlt(_ altsets: Array<BitSet>) throws -> Int {
let viableAlts: BitSet = BitSet()
for alts: BitSet in altsets {
let minAlt: Int = try alts.nextSetBit(0)
try viableAlts.set(minAlt)
public static func getSingleViableAlt(_ altsets: [BitSet]) -> Int {
let viableAlts = BitSet()
for alts in altsets {
let minAlt = alts.firstSetBit()
try! viableAlts.set(minAlt)
if viableAlts.cardinality() > 1 {
// more than 1 viable alt
return ATN.INVALID_ALT_NUMBER
}
}
return try viableAlts.nextSetBit(0)
return viableAlts.firstSetBit()
}
}

View File

@ -113,7 +113,7 @@ public class ProfilingATNSimulator: ParserATNSimulator {
override
internal func computeTargetState(_ dfa: DFA, _ previousD: DFAState, _ t: Int) throws -> DFAState {
let state: DFAState = try super.computeTargetState(dfa, previousD, t)
let state = try super.computeTargetState(dfa, previousD, t)
currentState = state
return state
}
@ -126,7 +126,7 @@ public class ProfilingATNSimulator: ParserATNSimulator {
_llStopIndex = _input.index()
}
let reachConfigs: ATNConfigSet? = try super.computeReachSet(closure, t, fullCtx)
let reachConfigs = try super.computeReachSet(closure, t, fullCtx)
if fullCtx {
decisions[currentDecision].LL_ATNTransitions += 1 // count computation even if error
if reachConfigs != nil {
@ -152,10 +152,10 @@ public class ProfilingATNSimulator: ParserATNSimulator {
override
internal func evalSemanticContext(_ pred: SemanticContext, _ parserCallStack: ParserRuleContext, _ alt: Int, _ fullCtx: Bool) throws -> Bool {
let result: Bool = try super.evalSemanticContext(pred, parserCallStack, alt, fullCtx)
let result = try super.evalSemanticContext(pred, parserCallStack, alt, fullCtx)
if !(pred is SemanticContext.PrecedencePredicate) {
let fullContext: Bool = _llStopIndex >= 0
let stopIndex: Int = fullContext ? _llStopIndex : _sllStopIndex
let fullContext = _llStopIndex >= 0
let stopIndex = fullContext ? _llStopIndex : _sllStopIndex
decisions[currentDecision].predicateEvals.append(
PredicateEvalInfo(currentDecision, _input, _startIndex, stopIndex, pred, result, alt, fullCtx)
)
@ -165,34 +165,36 @@ public class ProfilingATNSimulator: ParserATNSimulator {
}
override
internal func reportAttemptingFullContext(_ dfa: DFA, _ conflictingAlts: BitSet?, _ configs: ATNConfigSet, _ startIndex: Int, _ stopIndex: Int) throws {
internal func reportAttemptingFullContext(_ dfa: DFA, _ conflictingAlts: BitSet?, _ configs: ATNConfigSet, _ startIndex: Int, _ stopIndex: Int) {
if let conflictingAlts = conflictingAlts {
conflictingAltResolvedBySLL = try conflictingAlts.nextSetBit(0)
conflictingAltResolvedBySLL = conflictingAlts.firstSetBit()
} else {
conflictingAltResolvedBySLL = try configs.getAlts().nextSetBit(0)
let configAlts = configs.getAlts()
conflictingAltResolvedBySLL = configAlts.firstSetBit()
}
decisions[currentDecision].LL_Fallback += 1
try super.reportAttemptingFullContext(dfa, conflictingAlts, configs, startIndex, stopIndex)
super.reportAttemptingFullContext(dfa, conflictingAlts, configs, startIndex, stopIndex)
}
override
internal func reportContextSensitivity(_ dfa: DFA, _ prediction: Int, _ configs: ATNConfigSet, _ startIndex: Int, _ stopIndex: Int) throws {
internal func reportContextSensitivity(_ dfa: DFA, _ prediction: Int, _ configs: ATNConfigSet, _ startIndex: Int, _ stopIndex: Int) {
if prediction != conflictingAltResolvedBySLL {
decisions[currentDecision].contextSensitivities.append(
ContextSensitivityInfo(currentDecision, configs, _input, startIndex, stopIndex)
)
}
try super.reportContextSensitivity(dfa, prediction, configs, startIndex, stopIndex)
super.reportContextSensitivity(dfa, prediction, configs, startIndex, stopIndex)
}
override
internal func reportAmbiguity(_ dfa: DFA, _ D: DFAState, _ startIndex: Int, _ stopIndex: Int, _ exact: Bool,
_ ambigAlts: BitSet?, _ configs: ATNConfigSet) throws {
_ ambigAlts: BitSet?, _ configs: ATNConfigSet) {
var prediction: Int
if let ambigAlts = ambigAlts {
prediction = try ambigAlts.nextSetBit(0)
prediction = ambigAlts.firstSetBit()
} else {
prediction = try configs.getAlts().nextSetBit(0)
let configAlts = configs.getAlts()
prediction = configAlts.firstSetBit()
}
if configs.fullCtx && prediction != conflictingAltResolvedBySLL {
// Even though this is an ambiguity we are reporting, we can
@ -208,7 +210,7 @@ public class ProfilingATNSimulator: ParserATNSimulator {
AmbiguityInfo(currentDecision, configs, ambigAlts!,
_input, startIndex, stopIndex, configs.fullCtx)
)
try super.reportAmbiguity(dfa, D, startIndex, stopIndex, exact, ambigAlts!, configs)
super.reportAmbiguity(dfa, D, startIndex, stopIndex, exact, ambigAlts!, configs)
}

View File

@ -22,9 +22,8 @@ public final class RangeTransition: Transition, CustomStringConvertible {
}
override
//old label()
public func labelIntervalSet() throws -> IntervalSet {
return try IntervalSet.of(from, to)
public func labelIntervalSet() -> IntervalSet? {
return IntervalSet.of(from, to)
}
override

View File

@ -38,8 +38,7 @@ public class SemanticContext: Hashable, CustomStringConvertible {
/// dependent predicate evaluation.
///
public func eval<T>(_ parser: Recognizer<T>, _ parserCallStack: RuleContext) throws -> Bool {
RuntimeException(#function + " must be overridden")
return false
fatalError(#function + " must be overridden")
}
///
@ -61,13 +60,13 @@ public class SemanticContext: Hashable, CustomStringConvertible {
public func evalPrecedence<T>(_ parser: Recognizer<T>, _ parserCallStack: RuleContext) throws -> SemanticContext? {
return self
}
public var hashValue: Int {
RuntimeException(#function + " must be overridden")
return 0
fatalError(#function + " must be overridden")
}
public var description: String {
RuntimeException(#function + " must be overridden")
return ""
fatalError(#function + " must be overridden")
}
public class Predicate: SemanticContext {
@ -91,18 +90,17 @@ public class SemanticContext: Hashable, CustomStringConvertible {
override
public func eval<T>(_ parser: Recognizer<T>, _ parserCallStack: RuleContext) throws -> Bool {
let localctx: RuleContext? = isCtxDependent ? parserCallStack : nil
let localctx = isCtxDependent ? parserCallStack : nil
return try parser.sempred(localctx, ruleIndex, predIndex)
}
override
public var hashValue: Int {
var hashCode: Int = MurmurHash.initialize()
var hashCode = MurmurHash.initialize()
hashCode = MurmurHash.update(hashCode, ruleIndex)
hashCode = MurmurHash.update(hashCode, predIndex)
hashCode = MurmurHash.update(hashCode, isCtxDependent ? 1 : 0)
hashCode = MurmurHash.finish(hashCode, 3)
return hashCode
return MurmurHash.finish(hashCode, 3)
}
@ -127,12 +125,12 @@ public class SemanticContext: Hashable, CustomStringConvertible {
override
public func eval<T>(_ parser: Recognizer<T>, _ parserCallStack: RuleContext) throws -> Bool {
return try parser.precpred(parserCallStack, precedence)
return parser.precpred(parserCallStack, precedence)
}
override
public func evalPrecedence<T>(_ parser: Recognizer<T>, _ parserCallStack: RuleContext) throws -> SemanticContext? {
if try parser.precpred(parserCallStack, precedence) {
if parser.precpred(parserCallStack, precedence) {
return SemanticContext.NONE
} else {
return nil
@ -172,8 +170,7 @@ public class SemanticContext: Hashable, CustomStringConvertible {
///
public func getOperands() -> Array<SemanticContext> {
RuntimeException(" must overriden ")
return Array<SemanticContext>()
fatalError(#function + " must be overridden")
}
}
@ -186,7 +183,7 @@ public class SemanticContext: Hashable, CustomStringConvertible {
public let opnds: [SemanticContext]
public init(_ a: SemanticContext, _ b: SemanticContext) {
var operands: Set<SemanticContext> = Set<SemanticContext>()
var operands = Set<SemanticContext>()
if let aAnd = a as? AND {
operands.formUnion(aAnd.opnds)
} else {
@ -212,7 +209,7 @@ public class SemanticContext: Hashable, CustomStringConvertible {
}
override
public func getOperands() -> Array<SemanticContext> {
public func getOperands() -> [SemanticContext] {
return opnds
}
@ -234,7 +231,7 @@ public class SemanticContext: Hashable, CustomStringConvertible {
///
override
public func eval<T>(_ parser: Recognizer<T>, _ parserCallStack: RuleContext) throws -> Bool {
for opnd: SemanticContext in opnds {
for opnd in opnds {
if try !opnd.eval(parser, parserCallStack) {
return false
}
@ -244,10 +241,10 @@ public class SemanticContext: Hashable, CustomStringConvertible {
override
public func evalPrecedence<T>(_ parser: Recognizer<T>, _ parserCallStack: RuleContext) throws -> SemanticContext? {
var differs: Bool = false
var operands: Array<SemanticContext> = Array<SemanticContext>()
for context: SemanticContext in opnds {
let evaluated: SemanticContext? = try context.evalPrecedence(parser, parserCallStack)
var differs = false
var operands = [SemanticContext]()
for context in opnds {
let evaluated = try context.evalPrecedence(parser, parserCallStack)
//TODO differs |= (evaluated != context)
//differs |= (evaluated != context);
differs = differs || (evaluated != context)
@ -255,13 +252,12 @@ public class SemanticContext: Hashable, CustomStringConvertible {
if evaluated == nil {
// The AND context is false if any element is false
return nil
} else {
if evaluated != SemanticContext.NONE {
}
else if evaluated != SemanticContext.NONE {
// Reduce the result by skipping true elements
operands.append(evaluated!)
}
}
}
if !differs {
return self
@ -272,7 +268,7 @@ public class SemanticContext: Hashable, CustomStringConvertible {
return SemanticContext.NONE
}
var result: SemanticContext = operands[0]
var result = operands[0]
let length = operands.count
for i in 1..<length {
result = SemanticContext.and(result, operands[i])
@ -327,8 +323,8 @@ public class SemanticContext: Hashable, CustomStringConvertible {
}
override
public func getOperands() -> Array<SemanticContext> {
return opnds //Arrays.asList(opnds);
public func getOperands() -> [SemanticContext] {
return opnds
}
@ -347,7 +343,7 @@ public class SemanticContext: Hashable, CustomStringConvertible {
///
override
public func eval<T>(_ parser: Recognizer<T>, _ parserCallStack: RuleContext) throws -> Bool {
for opnd: SemanticContext in opnds {
for opnd in opnds {
if try opnd.eval(parser, parserCallStack) {
return true
}
@ -357,21 +353,18 @@ public class SemanticContext: Hashable, CustomStringConvertible {
override
public func evalPrecedence<T>(_ parser: Recognizer<T>, _ parserCallStack: RuleContext) throws -> SemanticContext? {
var differs: Bool = false
var operands: Array<SemanticContext> = Array<SemanticContext>()
for context: SemanticContext in opnds {
let evaluated: SemanticContext? = try context.evalPrecedence(parser, parserCallStack)
//differs |= (evaluated != context);
var differs = false
var operands = [SemanticContext]()
for context in opnds {
let evaluated = try context.evalPrecedence(parser, parserCallStack)
differs = differs || (evaluated != context)
if evaluated == SemanticContext.NONE {
// The OR context is true if any element is true
return SemanticContext.NONE
} else {
if evaluated != nil {
// Reduce the result by skipping false elements
operands.append(evaluated!)
//operands.add(evaluated);
}
else if let evaluated = evaluated {
// Reduce the result by skipping false elements
operands.append(evaluated)
}
}
@ -384,7 +377,7 @@ public class SemanticContext: Hashable, CustomStringConvertible {
return nil
}
var result: SemanticContext = operands[0]
var result = operands[0]
let length = operands.count
for i in 1..<length {
result = SemanticContext.or(result, operands[i])

View File

@ -26,10 +26,7 @@ public class SetTransition: Transition, CustomStringConvertible {
}
override
///
/// /old label()
///
public func labelIntervalSet() -> IntervalSet {
public func labelIntervalSet() -> IntervalSet? {
return set
}

View File

@ -82,8 +82,7 @@ public class Transition {
}
public func getSerializationType() -> Int {
RuntimeException(#function + " must be overridden")
fatalError()
fatalError(#function + " must be overridden")
}
///
@ -100,12 +99,11 @@ public class Transition {
}
public func labelIntervalSet() throws -> IntervalSet? {
public func labelIntervalSet() -> IntervalSet? {
return nil
}
public func matches(_ symbol: Int, _ minVocabSymbol: Int, _ maxVocabSymbol: Int) -> Bool {
RuntimeException(#function + " must be overridden")
fatalError()
fatalError(#function + " must be overridden")
}
}

View File

@ -170,23 +170,12 @@ public class DFA: CustomStringConvertible {
return description
}
///
/// - Use _#toString(org.antlr.v4.runtime.Vocabulary)_ instead.
///
public func toString(_ tokenNames: [String?]?) -> String {
if s0 == nil {
return ""
}
let serializer: DFASerializer = DFASerializer(self, tokenNames)
return serializer.toString()
}
public func toString(_ vocabulary: Vocabulary) -> String {
if s0 == nil {
return ""
}
let serializer: DFASerializer = DFASerializer(self, vocabulary)
let serializer = DFASerializer(self, vocabulary)
return serializer.toString()
}
@ -194,7 +183,7 @@ public class DFA: CustomStringConvertible {
if s0 == nil {
return ""
}
let serializer: DFASerializer = LexerDFASerializer(self)
let serializer = LexerDFASerializer(self)
return serializer.toString()
}

View File

@ -10,19 +10,9 @@
///
public class DFASerializer: CustomStringConvertible {
private let dfa: DFA
private let vocabulary: Vocabulary
///
/// - Use _#DFASerializer(org.antlr.v4.runtime.dfa.DFA, org.antlr.v4.runtime.Vocabulary)_ instead.
///
//@Deprecated
public convenience init(_ dfa: DFA, _ tokenNames: [String?]?) {
self.init(dfa, Vocabulary.fromTokenNames(tokenNames))
}
public init(_ dfa: DFA, _ vocabulary: Vocabulary) {
self.dfa = dfa
self.vocabulary = vocabulary
@ -32,18 +22,17 @@ public class DFASerializer: CustomStringConvertible {
if dfa.s0 == nil {
return ""
}
let buf: StringBuilder = StringBuilder()
let states: Array<DFAState> = dfa.getStates()
for s: DFAState in states {
var n: Int = 0
if let sEdges = s.edges {
n = sEdges.count
let buf = StringBuilder()
let states = dfa.getStates()
for s in states {
guard let edges = s.edges else {
continue
}
let n = edges.count
for i in 0..<n {
let t: DFAState? = s.edges![i]
if let t = t , t.stateNumber != Int.max {
if let t = s.edges![i], t.stateNumber != Int.max {
buf.append(getStateString(s))
let label: String = getEdgeLabel(i)
let label = getEdgeLabel(i)
buf.append("-")
buf.append(label)
buf.append("->")
@ -53,7 +42,7 @@ public class DFASerializer: CustomStringConvertible {
}
}
let output: String = buf.toString()
let output = buf.toString()
if output.length == 0 {
return ""
}
@ -72,16 +61,16 @@ public class DFASerializer: CustomStringConvertible {
internal func getStateString(_ s: DFAState) -> String {
let n: Int = s.stateNumber
let n = s.stateNumber
let s1 = s.isAcceptState ? ":" : ""
let s2 = s.requiresFullContext ? "^" : ""
let baseStateStr: String = s1 + "s" + String(n) + s2
let baseStateStr = s1 + "s" + String(n) + s2
if s.isAcceptState {
if let predicates = s.predicates {
return baseStateStr + "=>\(predicates)"
} else {
return baseStateStr + "=>\(s.prediction!)"
return baseStateStr + "=>\(s.prediction)"
}
} else {
return baseStateStr

View File

@ -32,10 +32,9 @@
///
public class DFAState: Hashable, CustomStringConvertible {
public var stateNumber: Int = -1
public var stateNumber = -1
public var configs: ATNConfigSet = ATNConfigSet()
public var configs = ATNConfigSet()
///
/// `edges[symbol]` points to target of symbol. Shift up by 1 so (-1)
@ -43,14 +42,14 @@ public class DFAState: Hashable, CustomStringConvertible {
///
public var edges: [DFAState?]!
public var isAcceptState: Bool = false
public var isAcceptState = false
///
/// if accept state, what ttype do we match or alt do we predict?
/// This is set to _org.antlr.v4.runtime.atn.ATN#INVALID_ALT_NUMBER_ when _#predicates_`!=null` or
/// _#requiresFullContext_.
///
public var prediction: Int! = 0
public var prediction = 0
public var lexerActionExecutor: LexerActionExecutor!
@ -60,7 +59,7 @@ public class DFAState: Hashable, CustomStringConvertible {
/// _org.antlr.v4.runtime.atn.ParserATNSimulator#execATN_ invocations immediately jumped doing
/// full context prediction if this field is true.
///
public var requiresFullContext: Bool = false
public var requiresFullContext = false
///
/// During SLL parsing, this is a list of predicates associated with the
@ -83,20 +82,17 @@ public class DFAState: Hashable, CustomStringConvertible {
///
public class PredPrediction: CustomStringConvertible {
public final var pred: SemanticContext
// never null; at least SemanticContext.NONE
public final var alt: Int
public init(_ pred: SemanticContext, _ alt: Int) {
self.alt = alt
self.pred = pred
}
public var description: String {
return "(\(pred),\(alt))"
}
}
@ -116,18 +112,14 @@ public class DFAState: Hashable, CustomStringConvertible {
/// DFA state.
///
public func getAltSet() -> Set<Int>? {
let alts = configs.getAltSet()
return alts
return configs.getAltSet()
}
public var hashValue: Int {
var hash: Int = MurmurHash.initialize(7)
var hash = MurmurHash.initialize(7)
hash = MurmurHash.update(hash, configs.hashValue)
hash = MurmurHash.finish(hash, 1)
return hash
return MurmurHash.finish(hash, 1)
}
///
@ -144,7 +136,7 @@ public class DFAState: Hashable, CustomStringConvertible {
/// _#stateNumber_ is irrelevant.
///
public var description: String {
let buf: StringBuilder = StringBuilder()
let buf = StringBuilder()
buf.append(stateNumber).append(":").append(configs)
if isAcceptState {
buf.append("=>")
@ -156,15 +148,11 @@ public class DFAState: Hashable, CustomStringConvertible {
}
return buf.toString()
}
}
public func ==(lhs: DFAState, rhs: DFAState) -> Bool {
if lhs === rhs {
return true
}
let sameSet: Bool = lhs.configs == rhs.configs
return sameSet
return (lhs.configs == rhs.configs)
}

View File

@ -592,6 +592,13 @@ public class BitSet: Hashable, CustomStringConvertible {
return result
}
///
/// Equivalent to nextSetBit(0), but guaranteed not to throw an exception.
///
public func firstSetBit() -> Int {
return try! nextSetBit(0)
}
///
/// Returns the index of the first bit that is set to `true`
/// that occurs on or after the specified starting index. If no such
@ -601,7 +608,7 @@ public class BitSet: Hashable, CustomStringConvertible {
/// use the following loop:
///
/// `
/// for (int i = bs.nextSetBit(0); i >= 0; i = bs.nextSetBit(i+1)) {
/// for (int i = bs.firstSetBit(); i >= 0; i = bs.nextSetBit(i+1)) {
/// // operate on index i here
/// `}
///
@ -1114,25 +1121,21 @@ public class BitSet: Hashable, CustomStringConvertible {
//let numBits: Int = (wordsInUse > 128) ?
// cardinality() : wordsInUse * BitSet.BITS_PER_WORD
let b: StringBuilder = StringBuilder()
let b = StringBuilder()
b.append("{")
do {
var i: Int = try nextSetBit(0)
var i = firstSetBit()
if i != -1 {
b.append(i)
i = try nextSetBit(i + 1)
i = try! nextSetBit(i + 1)
while i >= 0 {
let endOfRun: Int = try nextClearBit(i)
let endOfRun = try! nextClearBit(i)
repeat {
b.append(", ").append(i)
i += 1
} while i < endOfRun
i = try nextSetBit(i + 1)
i = try! nextSetBit(i + 1)
}
}
} catch {
print("BitSet description error")
}
b.append("}")
return b.toString()

View File

@ -74,19 +74,19 @@ public final class HashMap<K: Hashable,V>: Sequence
///
/// The default initial capacity - MUST be a power of two.
///
let DEFAULT_INITIAL_CAPACITY: Int = 16
private let DEFAULT_INITIAL_CAPACITY: Int = 16
///
/// The maximum capacity, used if a higher value is implicitly specified
/// by either of the constructors with arguments.
/// MUST be a power of two <= 1<<30.
///
let MAXIMUM_CAPACITY: Int = 1 << 30
private let MAXIMUM_CAPACITY: Int = 1 << 30
///
/// The load factor used when none specified in constructor.
///
let DEFAULT_LOAD_FACTOR: Float = 0.75
private let DEFAULT_LOAD_FACTOR: Float = 0.75
///
/// The table, resized as necessary. Length MUST Always be a power of two.

View File

@ -43,7 +43,7 @@ public protocol IntSet {
/// current set and `a`. The value `null` may be returned in
/// place of an empty result set.
///
func and(_ a: IntSet?) throws -> IntSet?
func and(_ a: IntSet?) -> IntSet?
///
/// Return a new _org.antlr.v4.runtime.misc.IntSet_ object containing all elements that are
@ -60,7 +60,7 @@ public protocol IntSet {
/// `elements` but not present in the current set. The value
/// `null` may be returned in place of an empty result set.
///
func complement(_ elements: IntSet?) throws -> IntSet?
func complement(_ elements: IntSet?) -> IntSet?
///
/// Return a new _org.antlr.v4.runtime.misc.IntSet_ object containing all elements that are
@ -76,7 +76,7 @@ public protocol IntSet {
/// set and `a`. The value `null` may be returned in place of an
/// empty result set.
///
func or(_ a: IntSet) throws -> IntSet
func or(_ a: IntSet) -> IntSet
///
/// Return a new _org.antlr.v4.runtime.misc.IntSet_ object containing all elements that are
@ -93,7 +93,7 @@ public protocol IntSet {
/// `elements` but not present in the current set. The value
/// `null` may be returned in place of an empty result set.
///
func subtract(_ a: IntSet?) throws -> IntSet
func subtract(_ a: IntSet?) -> IntSet
///
/// Return the total number of elements represented by the current set.
@ -145,7 +145,7 @@ public protocol IntSet {
/// - returns: A list containing all element present in the current set, sorted
/// in ascending numerical order.
///
func toList() -> Array<Int>
func toList() -> [Int]
func toString() -> String
}

View File

@ -21,13 +21,13 @@
public class IntervalSet: IntSet, Hashable, CustomStringConvertible {
public static let COMPLETE_CHAR_SET: IntervalSet =
{
let set = try! IntervalSet.of(Lexer.MIN_CHAR_VALUE, Lexer.MAX_CHAR_VALUE)
let set = IntervalSet.of(Lexer.MIN_CHAR_VALUE, Lexer.MAX_CHAR_VALUE)
try! set.setReadonly(true)
return set
}()
public static let EMPTY_SET: IntervalSet = {
let set = try! IntervalSet()
let set = IntervalSet()
try! set.setReadonly(true)
return set
}()
@ -36,47 +36,36 @@ public class IntervalSet: IntSet, Hashable, CustomStringConvertible {
///
/// The list of sorted, disjoint intervals.
///
internal var intervals: Array<Interval>
internal var intervals: [Interval]
internal var readonly: Bool = false
public init(_ intervals: Array<Interval>) {
internal var readonly = false
public init(_ intervals: [Interval]) {
self.intervals = intervals
}
public convenience init(_ set: IntervalSet) throws {
try self.init()
try addAll(set)
public convenience init(_ set: IntervalSet) {
self.init()
try! addAll(set)
}
public init(_ els: Int...) throws {
if els.count == 0 {
intervals = Array<Interval>() // most sets are 1 or 2 elements
public init(_ els: Int...) {
if els.isEmpty {
intervals = [Interval]() // most sets are 1 or 2 elements
} else {
intervals = Array<Interval>()
for e: Int in els {
try add(e)
intervals = [Interval]()
for e in els {
try! add(e)
}
}
}
///
/// Create a set with a single element, el.
///
public static func of(_ a: Int) throws -> IntervalSet {
let s: IntervalSet = try IntervalSet()
try s.add(a)
return s
}
///
/// Create a set with all ints within range [a..b] (inclusive)
///
public static func of(_ a: Int, _ b: Int) throws -> IntervalSet {
let s: IntervalSet = try IntervalSet()
try s.add(a, b)
public static func of(_ a: Int, _ b: Int) -> IntervalSet {
let s = IntervalSet()
try! s.add(a, b)
return s
}
@ -96,7 +85,7 @@ public class IntervalSet: IntSet, Hashable, CustomStringConvertible {
if readonly {
throw ANTLRError.illegalState(msg: "can't alter readonly IntervalSet")
}
try add(el, el)
try! add(el, el)
}
///
@ -126,21 +115,20 @@ public class IntervalSet: IntSet, Hashable, CustomStringConvertible {
while i < intervals.count {
let r: Interval = intervals[i]
let r = intervals[i]
if addition == r {
return
}
if addition.adjacent(r) || !addition.disjoint(r) {
// next to each other, make a single larger interval
let bigger: Interval = addition.union(r)
let bigger = addition.union(r)
//iter.set(bigger);
intervals[i] = bigger
// make sure we didn't just create an interval that
// should be merged with next interval in list
//while iter.hasNext() {
while i < intervals.count - 1 {
i += 1
let next: Interval = intervals[i] //iter.next();
let next = intervals[i]
if !bigger.adjacent(next) && bigger.disjoint(next) {
break
}
@ -155,14 +143,11 @@ public class IntervalSet: IntSet, Hashable, CustomStringConvertible {
intervals.remove(at: i)
i -= 1
intervals[i] = bigger.union(next)
}
return
}
if addition.startsBeforeDisjoint(r) {
// insert before r
//iter.previous();
//iter.add(addition);
intervals.insert(addition, at: i)
return
}
@ -178,10 +163,10 @@ public class IntervalSet: IntSet, Hashable, CustomStringConvertible {
///
/// combine all sets in the array returned the or'd value
///
public func or(_ sets: [IntervalSet]) throws -> IntSet {
let r: IntervalSet = try IntervalSet()
for s: IntervalSet in sets {
try r.addAll(s)
public func or(_ sets: [IntervalSet]) -> IntSet {
let r = IntervalSet()
for s in sets {
try! r.addAll(s)
}
return r
}
@ -194,14 +179,12 @@ public class IntervalSet: IntSet, Hashable, CustomStringConvertible {
}
if let other = set as? IntervalSet {
// walk set and add each interval
let n: Int = other.intervals.count
for i in 0..<n {
let I: Interval = other.intervals[i]
try self.add(I.a, I.b)
for interval in other.intervals {
try add(interval)
}
} else {
let setList = set.toList()
for value: Int in setList {
for value in setList {
try add(value)
}
}
@ -209,15 +192,15 @@ public class IntervalSet: IntSet, Hashable, CustomStringConvertible {
return self
}
public func complement(_ minElement: Int, _ maxElement: Int) throws -> IntSet? {
return try self.complement(IntervalSet.of(minElement, maxElement))
public func complement(_ minElement: Int, _ maxElement: Int) -> IntSet? {
return complement(IntervalSet.of(minElement, maxElement))
}
///
///
///
public func complement(_ vocabulary: IntSet?) throws -> IntSet? {
public func complement(_ vocabulary: IntSet?) -> IntSet? {
guard let vocabulary = vocabulary, !vocabulary.isNil() else {
return nil // nothing in common with null set
}
@ -225,25 +208,25 @@ public class IntervalSet: IntSet, Hashable, CustomStringConvertible {
if let vocabulary = vocabulary as? IntervalSet {
vocabularyIS = vocabulary
} else {
vocabularyIS = try IntervalSet()
try vocabularyIS.addAll(vocabulary)
vocabularyIS = IntervalSet()
try! vocabularyIS.addAll(vocabulary)
}
return try vocabularyIS.subtract(self)
return vocabularyIS.subtract(self)
}
public func subtract(_ a: IntSet?) throws -> IntSet {
public func subtract(_ a: IntSet?) -> IntSet {
guard let a = a, !a.isNil() else {
return try IntervalSet(self)
return IntervalSet(self)
}
if let a = a as? IntervalSet {
return try subtract(self, a)
return subtract(self, a)
}
let other: IntervalSet = try IntervalSet()
try other.addAll(a)
return try subtract(self, other)
let other = IntervalSet()
try! other.addAll(a)
return subtract(self, other)
}
///
@ -252,23 +235,23 @@ public class IntervalSet: IntSet, Hashable, CustomStringConvertible {
/// `null`, it is treated as though it was an empty set.
///
public func subtract(_ left: IntervalSet?, _ right: IntervalSet?) throws -> IntervalSet {
public func subtract(_ left: IntervalSet?, _ right: IntervalSet?) -> IntervalSet {
guard let left = left, !left.isNil() else {
return try IntervalSet()
return IntervalSet()
}
let result: IntervalSet = try IntervalSet(left)
let result = IntervalSet(left)
guard let right = right, !right.isNil() else {
// right set has no elements; just return the copy of the current set
return result
}
var resultI: Int = 0
var rightI: Int = 0
var resultI = 0
var rightI = 0
while resultI < result.intervals.count && rightI < right.intervals.count {
let resultInterval: Interval = result.intervals[resultI]
let rightInterval: Interval = right.intervals[rightI]
let resultInterval = result.intervals[resultI]
let rightInterval = right.intervals[rightI]
// operation: (resultInterval - rightInterval) and update indexes
@ -296,9 +279,7 @@ public class IntervalSet: IntSet, Hashable, CustomStringConvertible {
if let afterCurrent = afterCurrent {
// split the current interval into two
result.intervals[resultI] = beforeCurrent
//result.intervals.set(beforeCurrent,resultI);
result.intervals.insert(afterCurrent, at: resultI + 1)
//result.intervals.add(, afterCurrent);
resultI += 1
rightI += 1
continue
@ -330,10 +311,10 @@ public class IntervalSet: IntSet, Hashable, CustomStringConvertible {
}
public func or(_ a: IntSet) throws -> IntSet {
let o: IntervalSet = try IntervalSet()
try o.addAll(self)
try o.addAll(a)
public func or(_ a: IntSet) -> IntSet {
let o = IntervalSet()
try! o.addAll(self)
try! o.addAll(a)
return o
}
@ -341,24 +322,23 @@ public class IntervalSet: IntSet, Hashable, CustomStringConvertible {
///
///
public func and(_ other: IntSet?) throws -> IntSet? {
public func and(_ other: IntSet?) -> IntSet? {
if other == nil {
//|| !(other instanceof IntervalSet) ) {
return nil // nothing in common with null set
}
var myIntervals: Array<Interval> = self.intervals
var theirIntervals: Array<Interval> = (other as! IntervalSet).intervals
var myIntervals = self.intervals
var theirIntervals = (other as! IntervalSet).intervals
var intersection: IntervalSet? = nil
let mySize: Int = myIntervals.count
let theirSize: Int = theirIntervals.count
var i: Int = 0
var j: Int = 0
let mySize = myIntervals.count
let theirSize = theirIntervals.count
var i = 0
var j = 0
// iterate down both interval lists looking for nondisjoint intervals
while i < mySize && j < theirSize {
let mine: Interval = myIntervals[i]
let theirs: Interval = theirIntervals[j]
//System.out.println("mine="+mine+" and theirs="+theirs);
let mine = myIntervals[i]
let theirs = theirIntervals[j]
if mine.startsBeforeDisjoint(theirs) {
// move this iterator looking for interval that might overlap
i += 1
@ -370,26 +350,26 @@ public class IntervalSet: IntSet, Hashable, CustomStringConvertible {
if mine.properlyContains(theirs) {
// overlap, add intersection, get next theirs
if intersection == nil {
intersection = try IntervalSet()
intersection = IntervalSet()
}
try intersection!.add(mine.intersection(theirs))
try! intersection!.add(mine.intersection(theirs))
j += 1
} else {
if theirs.properlyContains(mine) {
// overlap, add intersection, get next mine
if intersection == nil {
intersection = try IntervalSet()
intersection = IntervalSet()
}
try intersection!.add(mine.intersection(theirs))
try! intersection!.add(mine.intersection(theirs))
i += 1
} else {
if !mine.disjoint(theirs) {
// overlap, add intersection
if intersection == nil {
intersection = try IntervalSet()
intersection = IntervalSet()
}
try intersection!.add(mine.intersection(theirs))
try! intersection!.add(mine.intersection(theirs))
// Move the iterator of lower range [a..b], but not
// the upper range as it may contain elements that will collide
// with the next iterator. So, if mine=[0..115] and
@ -411,7 +391,7 @@ public class IntervalSet: IntSet, Hashable, CustomStringConvertible {
}
}
if intersection == nil {
return try IntervalSet()
return IntervalSet()
}
return intersection
}
@ -421,11 +401,9 @@ public class IntervalSet: IntSet, Hashable, CustomStringConvertible {
///
public func contains(_ el: Int) -> Bool {
let n: Int = intervals.count
for i in 0..<n {
let I: Interval = intervals[i]
let a: Int = I.a
let b: Int = I.b
for interval in intervals {
let a = interval.a
let b = interval.b
if el < a {
break // list is sorted and el is before this interval; not here
}
@ -434,18 +412,6 @@ public class IntervalSet: IntSet, Hashable, CustomStringConvertible {
}
}
return false
///
/// for (ListIterator iter = intervals.listIterator(); iter.hasNext();) {
/// Interval I = (Interval) iter.next();
/// if ( el<I.a ) {
/// break; // list is sorted and el is before this interval; not here
/// }
/// if ( el>=I.a && el<=I.b ) {
/// return true; // found in this interval
/// }
/// }
/// return false;
///
}
///
@ -461,11 +427,10 @@ public class IntervalSet: IntSet, Hashable, CustomStringConvertible {
///
public func getSingleElement() -> Int {
//intervals=nil && intervals.count==1 )
if intervals.count == 1 {
let I: Interval = intervals[0]
if I.a == I.b {
return I.a
let interval = intervals[0]
if interval.a == interval.b {
return interval.a
}
}
return CommonToken.INVALID_TYPE
@ -481,7 +446,7 @@ public class IntervalSet: IntSet, Hashable, CustomStringConvertible {
if isNil() {
return CommonToken.INVALID_TYPE
}
let last: Interval = intervals[intervals.count - 1]
let last = intervals[intervals.count - 1]
return last.b
}
@ -502,30 +467,28 @@ public class IntervalSet: IntSet, Hashable, CustomStringConvertible {
///
/// Return a list of Interval objects.
///
public func getIntervals() -> Array<Interval> {
public func getIntervals() -> [Interval] {
return intervals
}
public func hashCode() -> Int {
var hash: Int = MurmurHash.initialize()
var hash = MurmurHash.initialize()
for I: Interval in intervals {
hash = MurmurHash.update(hash, I.a)
hash = MurmurHash.update(hash, I.b)
}
hash = MurmurHash.finish(hash, intervals.count * 2)
return hash
return MurmurHash.finish(hash, intervals.count * 2)
}
public var hashValue: Int {
var hash: Int = MurmurHash.initialize()
var hash = MurmurHash.initialize()
for I: Interval in intervals {
hash = MurmurHash.update(hash, I.a)
hash = MurmurHash.update(hash, I.b)
}
hash = MurmurHash.finish(hash, intervals.count * 2)
return hash
return MurmurHash.finish(hash, intervals.count * 2)
}
///
/// Are two IntervalSets equal? Because all intervals are sorted
@ -551,25 +514,21 @@ public class IntervalSet: IntSet, Hashable, CustomStringConvertible {
}
public func toString(_ elemAreChar: Bool) -> String {
let buf: StringBuilder = StringBuilder()
//if ( self.intervals==nil || self.intervals.isEmpty() ) {
let buf = StringBuilder()
if self.intervals.isEmpty {
return "{}"
}
if self.size() > 1 {
buf.append("{")
}
//var iter : Iterator<Interval> = self.intervals.iterator();
//while iter.hasNext() {
var first = true
for I: Interval in intervals {
for interval in intervals {
if !first {
buf.append(", ")
}
first = false
//var I : Interval = iter.next();
let a: Int = I.a
let b: Int = I.b
let a = interval.a
let b = interval.b
if a == b {
if a == CommonToken.EOF {
buf.append("<EOF>")
@ -587,9 +546,6 @@ public class IntervalSet: IntSet, Hashable, CustomStringConvertible {
buf.append(a).append("..").append(b)
}
}
//if ( iter.hasNext() ) {
// buf.append(", ");
//}
}
if self.size() > 1 {
buf.append("}")
@ -597,16 +553,8 @@ public class IntervalSet: IntSet, Hashable, CustomStringConvertible {
return buf.toString()
}
///
/// - Use _#toString(org.antlr.v4.runtime.Vocabulary)_ instead.
/// /@Deprecated
///
public func toString(_ tokenNames: [String?]?) -> String {
return toString(Vocabulary.fromTokenNames(tokenNames))
}
public func toString(_ vocabulary: Vocabulary) -> String {
let buf: StringBuilder = StringBuilder()
let buf = StringBuilder()
if self.intervals.isEmpty {
return "{}"
@ -616,14 +564,14 @@ public class IntervalSet: IntSet, Hashable, CustomStringConvertible {
}
var first = true
for I: Interval in intervals {
for interval in intervals {
if !first {
buf.append(", ")
}
first = false
//var I : Interval = iter.next();
let a: Int = I.a
let b: Int = I.b
let a = interval.a
let b = interval.b
if a == b {
buf.append(elementName(vocabulary, a))
} else {
@ -642,15 +590,6 @@ public class IntervalSet: IntSet, Hashable, CustomStringConvertible {
return buf.toString()
}
///
/// - Use _#elementName(org.antlr.v4.runtime.Vocabulary, int)_ instead.
/// /@Deprecated
///
internal func elementName(_ tokenNames: [String?]?, _ a: Int) -> String {
return elementName(Vocabulary.fromTokenNames(tokenNames), a)
}
internal func elementName(_ vocabulary: Vocabulary, _ a: Int) -> String {
if a == CommonToken.EOF {
return "<EOF>"
@ -665,43 +604,25 @@ public class IntervalSet: IntSet, Hashable, CustomStringConvertible {
public func size() -> Int {
var n: Int = 0
let numIntervals: Int = intervals.count
var n = 0
let numIntervals = intervals.count
if numIntervals == 1 {
let firstInterval: Interval = self.intervals[0]
let firstInterval = intervals[0]
return firstInterval.b - firstInterval.a + 1
}
for i in 0..<numIntervals {
let I: Interval = intervals[i]
n += (I.b - I.a + 1)
let interval = intervals[i]
n += (interval.b - interval.a + 1)
}
return n
}
public func toIntegerList() -> Array<Int> {
var values: Array<Int> = Array<Int>()
let n: Int = intervals.count
for i in 0..<n {
let I: Interval = intervals[i]
let a: Int = I.a
let b: Int = I.b
for v in a...b {
values.append(v)
}
}
return values
}
public func toList() -> Array<Int> {
var values: Array<Int> = Array<Int>()
let n: Int = intervals.count
for i in 0..<n {
let I: Interval = intervals[i]
let a: Int = I.a
let b: Int = I.b
public func toList() -> [Int] {
var values = [Int]()
for interval in intervals {
let a = interval.a
let b = interval.b
for v in a...b {
values.append(v)
@ -711,13 +632,12 @@ public class IntervalSet: IntSet, Hashable, CustomStringConvertible {
}
public func toSet() -> Set<Int> {
var s: Set<Int> = Set<Int>()
for I: Interval in intervals {
let a: Int = I.a
let b: Int = I.b
var s = Set<Int>()
for interval in intervals {
let a = interval.a
let b = interval.b
for v in a...b {
s.insert(v)
//s.add(v);
}
}
return s
@ -729,12 +649,10 @@ public class IntervalSet: IntSet, Hashable, CustomStringConvertible {
/// ANTLR code gen target.
///
public func get(_ i: Int) -> Int {
let n: Int = intervals.count
var index: Int = 0
for j in 0..<n {
let I: Interval = intervals[j]
let a: Int = I.a
let b: Int = I.b
var index = 0
for interval in intervals {
let a = interval.a
let b = interval.b
for v in a...b {
if index == i {
return v
@ -745,44 +663,36 @@ public class IntervalSet: IntSet, Hashable, CustomStringConvertible {
return -1
}
public func toArray() -> [Int] {
return toIntegerList()
}
public func remove(_ el: Int) throws {
if readonly {
throw ANTLRError.illegalState(msg: "can't alter readonly IntervalSet")
}
let n: Int = intervals.count
for i in 0..<n {
let I: Interval = intervals[i]
let a: Int = I.a
let b: Int = I.b
for interval in intervals {
let a = interval.a
let b = interval.b
if el < a {
break // list is sorted and el is before this interval; not here
}
// if whole interval x..x, rm
if el == a && el == b {
intervals.remove(at: i)
//intervals.remove(i);
intervals.removeObject(interval)
break
}
// if on left edge x..b, adjust left
if el == a {
I.a += 1
interval.a += 1
break
}
// if on right edge a..x, adjust right
if el == b {
I.b -= 1
interval.b -= 1
break
}
// if in middle a..x..b, split interval
if el > a && el < b {
// found in this interval
let oldb: Int = I.b
I.b = el - 1 // [a..x-1]
let oldb = interval.b
interval.b = el - 1 // [a..x-1]
try add(el + 1, oldb) // add [x+1..b]
}
}

View File

@ -6,20 +6,28 @@
///
/// https://en.wikipedia.org/wiki/MurmurHash
///
/// - Author: Sam Harwell
///
public final class MurmurHash {
private static let DEFAULT_SEED: Int = 0
private static let DEFAULT_SEED: UInt32 = 0
private static let c1 = UInt32(0xCC9E2D51)
private static let c2 = UInt32(0x1B873593)
private static let r1 = UInt32(15)
private static let r2 = UInt32(13)
private static let m = UInt32(5)
private static let n = UInt32(0xE6546B64)
///
/// Initialize the hash using the default seed value.
///
/// - Returns: the intermediate hash value
///
public static func initialize() -> Int {
public static func initialize() -> UInt32 {
return initialize(DEFAULT_SEED)
}
@ -29,42 +37,16 @@ public final class MurmurHash {
/// - Parameter seed: the seed
/// - Returns: the intermediate hash value
///
public static func initialize(_ seed: Int) -> Int {
public static func initialize(_ seed: UInt32) -> UInt32 {
return seed
}
///
/// Update the intermediate hash value for the next input `value`.
///
/// - Parameter hash: the intermediate hash value
/// - Parameter value: the value to add to the current hash
/// - Returns: the updated intermediate hash value
///
public static func update2(_ hashIn: Int, _ value: Int) -> Int {
let c1: Int32 = -862048943//0xCC9E2D51;
let c2: Int32 = 0x1B873593
let r1: Int32 = 15
let r2: Int32 = 13
let m: Int32 = 5
let n: Int32 = -430675100//0xE6546B64;
var k: Int32 = Int32(truncatingIfNeeded: value)
k = k.multipliedReportingOverflow(by: c1).partialValue
// (k,_) = UInt32.multiplyWithOverflow(k, c1) ;//( k * c1);
//TODO: CHECKE >>>
k = (k << r1) | (k >>> (Int32(32) - r1)) //k = (k << r1) | (k >>> (32 - r1));
//k = UInt32 (truncatingBitPattern:Int64(Int64(k) * Int64(c2)));//( k * c2);
//(k,_) = UInt32.multiplyWithOverflow(k, c2)
k = k.multipliedReportingOverflow(by: c2).partialValue
var hash = Int32(hashIn)
hash = hash ^ k
hash = (hash << r2) | (hash >>> (Int32(32) - r2))//hash = (hash << r2) | (hash >>> (32 - r2));
hash = hash.multipliedReportingOverflow(by: m).partialValue
hash = hash.addingReportingOverflow(n).partialValue
//hash = hash * m + n;
// print("murmur update2 : \(hash)")
return Int(hash)
private static func calcK(_ value: UInt32) -> UInt32 {
var k = value
k = k &* c1
k = (k << r1) | (k >> (32 - r1))
k = k &* c2
return k
}
///
@ -74,9 +56,25 @@ public final class MurmurHash {
/// - Parameter value: the value to add to the current hash
/// - Returns: the updated intermediate hash value
///
public static func update<T:Hashable>(_ hash: Int, _ value: T?) -> Int {
public static func update2(_ hashIn: UInt32, _ value: Int) -> UInt32 {
let k = calcK(UInt32(truncatingIfNeeded: value))
var hash = hashIn
hash = hash ^ k
hash = (hash << r2) | (hash >> (32 - r2))
hash = hash &* m &+ n
// print("murmur update2 : \(hash)")
return hash
}
///
/// Update the intermediate hash value for the next input `value`.
///
/// - Parameter hash: the intermediate hash value
/// - Parameter value: the value to add to the current hash
/// - Returns: the updated intermediate hash value
///
public static func update<T:Hashable>(_ hash: UInt32, _ value: T?) -> UInt32 {
return update2(hash, value != nil ? value!.hashValue : 0)
// return update2(hash, value);
}
///
@ -84,21 +82,24 @@ public final class MurmurHash {
/// to form the final result of the MurmurHash 3 hash function.
///
/// - Parameter hash: the intermediate hash value
/// - Parameter numberOfWords: the number of integer values added to the hash
/// - Parameter numberOfWords: the number of UInt32 values added to the hash
/// - Returns: the final hash result
///
public static func finish(_ hashin: Int, _ numberOfWordsIn: Int) -> Int {
var hash = Int32(hashin)
let numberOfWords = Int32(numberOfWordsIn)
hash = hash ^ numberOfWords.multipliedReportingOverflow(by: 4).partialValue //(numberOfWords * UInt32(4));
hash = hash ^ (hash >>> Int32(16)) //hash = hash ^ (hash >>> 16);
hash = hash.multipliedReportingOverflow(by: -2048144789).partialValue //hash * UInt32(0x85EBCA6B);
hash = hash ^ (hash >>> Int32(13))//hash = hash ^ (hash >>> 13);
//hash = UInt32(truncatingBitPattern: UInt64(hash) * UInt64(0xC2B2AE35)) ;
hash = hash.multipliedReportingOverflow(by: -1028477387).partialValue
hash = hash ^ (hash >>> Int32(16))// hash = hash ^ (hash >>> 16);
public static func finish(_ hashin: UInt32, _ numberOfWords: Int) -> Int {
return Int(finish(hashin, byteCount: (numberOfWords &* 4)))
}
private static func finish(_ hashin: UInt32, byteCount byteCountInt: Int) -> UInt32 {
let byteCount = UInt32(truncatingIfNeeded: byteCountInt)
var hash = hashin
hash ^= byteCount
hash ^= (hash >> 16)
hash = hash &* 0x85EBCA6B
hash ^= (hash >> 13)
hash = hash &* 0xC2B2AE35
hash ^= (hash >> 16)
//print("murmur finish : \(hash)")
return Int(hash)
return hash
}
///
@ -111,14 +112,55 @@ public final class MurmurHash {
/// - Returns: the hash code of the data
///
public static func hashCode<T:Hashable>(_ data: [T], _ seed: Int) -> Int {
var hash: Int = initialize(seed)
for value: T in data {
//var hashValue = value != nil ? value.hashValue : 0
hash = update(hash, value.hashValue)
var hash = initialize(UInt32(truncatingIfNeeded: seed))
for value in data {
hash = update(hash, value)
}
hash = finish(hash, data.count)
return hash
return finish(hash, data.count)
}
///
/// Compute a hash for the given String and seed. The String is encoded
/// using UTF-8, then the bytes are interpreted as unsigned 32-bit
/// little-endian values, giving UInt32 values for the update call.
///
/// If the bytes do not evenly divide by 4, the final bytes are treated
/// slightly differently (not doing the final rotate / multiply / add).
///
/// This matches the treatment of byte sequences in publicly available
/// test patterns (see MurmurHashTests.swift) and the example code on
/// Wikipedia.
///
public static func hashString(_ s: String, _ seed: UInt32) -> UInt32 {
let bytes = Array(s.utf8)
return hashBytesLittleEndian(bytes, seed)
}
private static func hashBytesLittleEndian(_ bytes: [UInt8], _ seed: UInt32) -> UInt32 {
let byteCount = bytes.count
var hash = seed
for i in stride(from: 0, to: byteCount - 3, by: 4) {
var word = UInt32(bytes[i])
word |= UInt32(bytes[i + 1]) << 8
word |= UInt32(bytes[i + 2]) << 16
word |= UInt32(bytes[i + 3]) << 24
hash = update(hash, word)
}
let remaining = byteCount & 3
if remaining != 0 {
var lastWord = UInt32(0)
for r in 0 ..< remaining {
lastWord |= UInt32(bytes[byteCount - 1 - r]) << (8 * (remaining - 1 - r))
}
let k = calcK(lastWord)
hash ^= k
}
return finish(hash, byteCount: byteCount)
}
private init() {

View File

@ -16,7 +16,7 @@ public class Triple<A:Hashable, B:Hashable, C:Hashable>: Hashable, CustomStringC
self.c = c
}
public var hashValue: Int {
var hash: Int = MurmurHash.initialize()
var hash = MurmurHash.initialize()
hash = MurmurHash.update(hash, a)
hash = MurmurHash.update(hash, b)
hash = MurmurHash.update(hash, c)

View File

@ -34,19 +34,6 @@ public class Utils {
}
public static func writeFile(_ fileName: String, _ content: String, _ encoding: String.Encoding = String.Encoding.utf8) {
//writing
do {
try content.write(toFile: fileName, atomically: false, encoding: encoding)
} catch {
/* error handling here */
RuntimeException(" write file fail \(error)")
}
}
public static func readFile(_ path: String, _ encoding: String.Encoding = String.Encoding.utf8) -> [Character] {
var fileContents: String
@ -60,35 +47,6 @@ public class Utils {
return Array(fileContents.characters)
}
public static func readFile2String(_ fileName: String, _ encoding: String.Encoding = String.Encoding.utf8) -> String {
let path = Bundle.main.path(forResource: fileName, ofType: nil)
if path == nil {
return ""
}
var fileContents: String? = nil
do {
fileContents = try String(contentsOfFile: path!, encoding: encoding)
} catch {
return ""
}
return fileContents ?? ""
}
public static func readFile2StringByPath(_ path: String, _ encoding: String.Encoding = String.Encoding.utf8) -> String {
var fileContents: String? = nil
do {
fileContents = try String(contentsOfFile: path, encoding: String.Encoding.utf8)
} catch {
return ""
}
return fileContents ?? ""
}
public static func toMap(_ keys: [String]) -> Dictionary<String, Int> {
var m = Dictionary<String, Int>()
for (index,v) in keys.enumerated() {

View File

@ -16,5 +16,5 @@ import Foundation
public enum ANTLRException: Error {
case cannotInvokeStartRule
case recognition(e:AnyObject)
case recognition(e: RecognitionException)
}

View File

@ -70,17 +70,6 @@ func log(_ message: String = "", file: String = #file, function: String = #funct
// #endif
}
func RuntimeException(_ message: String = "", file: String = #file, function: String = #function, lineNum: Int = #line) {
// #if DEBUG
let info = "FILE: \(URL(fileURLWithPath: file).pathComponents.last!),FUNC: \(function), LINE: \(lineNum) MESSAGE: \(message)"
// #else
// let info = "FILE: \(NSURL(fileURLWithPath: file).pathComponents!.last!),FUNC: \(function), LINE: \(lineNum) MESSAGE: \(message)"
// #endif
fatalError(info)
}
func toInt(_ c: Character) -> Int {
return c.unicodeValue
}

View File

@ -11,21 +11,12 @@
///
/// The payload is either a _org.antlr.v4.runtime.Token_ or a _org.antlr.v4.runtime.RuleContext_ object.
///
//public protocol ParseTree : SyntaxTree {
open class ParseTree: SyntaxTree, CustomStringConvertible, CustomDebugStringConvertible {
// the following methods narrow the return type; they are not additional methods
//func getParent() -> ParseTree?
//func getChild(i : Int) -> ParseTree?
/// The _org.antlr.v4.runtime.tree.ParseTreeVisitor_ needs a double dispatch method.
open func accept<T>(_ visitor: ParseTreeVisitor<T>) -> T? {
RuntimeException(" must overriden !")
fatalError()
fatalError(#function + " must be overridden")
}
/// Return the combined text of all leaf nodes. Does not get any
@ -33,59 +24,45 @@ open class ParseTree: SyntaxTree, CustomStringConvertible , CustomDebugStringCon
/// comments if they are sent to parser on hidden channel.
///
open func getText() -> String {
RuntimeException(" must overriden !")
return ""
fatalError(#function + " must be overridden")
}
/// Specialize toStringTree so that it can print out more information
/// based upon the parser.
///
open func toStringTree(_ parser: Parser) -> String {
RuntimeException(" must overriden !")
return ""
fatalError(#function + " must be overridden")
}
open func getSourceInterval() -> Interval {
RuntimeException(" must overriden !")
fatalError()
fatalError(#function + " must be overridden")
}
open func getParent() -> Tree? {
RuntimeException(" must overriden !")
fatalError()
fatalError(#function + " must be overridden")
}
open func getPayload() -> AnyObject {
RuntimeException(" must overriden !")
fatalError()
fatalError(#function + " must be overridden")
}
open func getChild(_ i: Int) -> Tree? {
RuntimeException(" must overriden !")
fatalError()
fatalError(#function + " must be overridden")
}
open func getChildCount() -> Int {
RuntimeException(" must overriden !")
fatalError()
fatalError(#function + " must be overridden")
}
open func toStringTree() -> String {
RuntimeException(" must overriden !")
fatalError()
fatalError(#function + " must be overridden")
}
open var description: String {
RuntimeException(" must overriden !")
fatalError()
fatalError(#function + " must be overridden")
}
open var debugDescription: String {
RuntimeException(" must overriden !")
fatalError()
fatalError(#function + " must be overridden")
}
}

View File

@ -27,9 +27,7 @@ open class ParseTreeVisitor<T> {
/// - Returns: The result of visiting the parse tree.
///
open func visit(_ tree: ParseTree) -> T? {
RuntimeException(" must overriden !")
return nil
fatalError(#function + " must be overridden")
}
///
@ -40,9 +38,7 @@ open class ParseTreeVisitor<T> {
/// - Returns: The result of visiting the children of the node.
///
open func visitChildren(_ node: RuleNode) -> T? {
RuntimeException(" must overriden !")
return nil
fatalError(#function + " must be overridden")
}
///
@ -52,9 +48,7 @@ open class ParseTreeVisitor<T> {
/// - Returns: The result of visiting the node.
///
open func visitTerminal(_ node: TerminalNode) -> T? {
RuntimeException(" must overriden !")
return nil
fatalError(#function + " must be overridden")
}
///
@ -64,7 +58,6 @@ open class ParseTreeVisitor<T> {
/// - Returns: The result of visiting the node.
///
open func visitErrorNode(_ node: ErrorNode) -> T? {
RuntimeException(" must overriden !")
return nil
fatalError(#function + " must be overridden")
}
}

View File

@ -6,7 +6,6 @@
open class RuleNode: ParseTree {
open func getRuleContext() -> RuleContext {
RuntimeException(" must overriden !")
fatalError()
fatalError(#function + " must be overridden")
}
}

Some files were not shown because too many files have changed in this diff Show More