forked from jasder/antlr
This commit is contained in:
commit
dde893d365
|
@ -36,6 +36,9 @@ matrix:
|
|||
compiler: clang
|
||||
osx_image: xcode8.1
|
||||
env: TARGET=swift
|
||||
- os: linux
|
||||
compiler: clang
|
||||
env: TARGET=swift
|
||||
- os: osx
|
||||
osx_image: xcode8.2
|
||||
env: TARGET=dotnet
|
||||
|
|
|
@ -0,0 +1,15 @@
|
|||
set -euo pipefail
|
||||
|
||||
# make sure we use trusty repositories (travis by default uses precise)
|
||||
curl https://repogen.simplylinux.ch/txt/trusty/sources_c4aa56bd26c0f54f391d8fae3e687ef5f6e97c26.txt | sudo tee /etc/apt/sources.list
|
||||
|
||||
# install dependencies
|
||||
# some packages below will be update, swift assumes newer versions
|
||||
# of, for example, sqlite3 and libicu, without the update some
|
||||
# tools will not work
|
||||
sudo apt-get update
|
||||
sudo apt-get install clang libicu-dev libxml2 sqlite3
|
||||
|
||||
# This would fix a know linker issue mentioned in:
|
||||
# https://bugs.swift.org/browse/SR-2299
|
||||
sudo ln -sf ld.gold /usr/bin/ld
|
|
@ -1,4 +1,20 @@
|
|||
#!/bin/bash
|
||||
|
||||
# only test swift as we develop on os x so likely well tested and its dog slow on travis
|
||||
# linux specific setup, those setup have to be
|
||||
# here since environment variables doesn't pass
|
||||
# across scripts
|
||||
if [ $TRAVIS_OS_NAME == "linux" ]; then
|
||||
export SWIFT_VERSION=swift-3.1.1
|
||||
export SWIFT_HOME=$(pwd)/swift/$SWIFT_VERSION-RELEASE-ubuntu14.04/usr/bin/
|
||||
export PATH=$SWIFT_HOME:$PATH
|
||||
|
||||
# download swift
|
||||
mkdir swift
|
||||
curl https://swift.org/builds/$SWIFT_VERSION-release/ubuntu1404/$SWIFT_VERSION-RELEASE/$SWIFT_VERSION-RELEASE-ubuntu14.04.tar.gz -s | tar xz -C swift &> /dev/null
|
||||
fi
|
||||
|
||||
# check swift
|
||||
swift --version
|
||||
swift build --version
|
||||
|
||||
mvn -q -Dtest=swift.* test
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
|
||||
package org.antlr.v4.test.runtime.swift;
|
||||
|
||||
import org.antlr.v4.Tool;
|
||||
import org.antlr.v4.runtime.misc.Pair;
|
||||
import org.antlr.v4.test.runtime.ErrorQueue;
|
||||
import org.antlr.v4.test.runtime.RuntimeTestSupport;
|
||||
import org.antlr.v4.test.runtime.StreamVacuum;
|
||||
|
@ -20,113 +20,70 @@ import java.util.Arrays;
|
|||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import static org.antlr.v4.test.runtime.BaseRuntimeTest.antlrOnString;
|
||||
import static org.antlr.v4.test.runtime.BaseRuntimeTest.mkdir;
|
||||
import static org.antlr.v4.test.runtime.BaseRuntimeTest.writeFile;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
public class BaseSwiftTest implements RuntimeTestSupport {
|
||||
|
||||
/**
|
||||
* The base test directory is the directory where generated files get placed
|
||||
* during unit test execution.
|
||||
* Path of the ANTLR runtime.
|
||||
*/
|
||||
private static final String BASE_TEST_DIR;
|
||||
|
||||
private static String ANTLR_FRAMEWORK_DIR;
|
||||
private static String ANTLR_RUNTIME_PATH;
|
||||
|
||||
/**
|
||||
* Common routine to setup the ANTLR4 runtime.
|
||||
* Absolute path to swift command.
|
||||
*/
|
||||
private static String SWIFT_CMD;
|
||||
|
||||
/**
|
||||
* Environment variable name for swift home.
|
||||
*/
|
||||
private static final String SWIFT_HOME_ENV_KEY = "SWIFT_HOME";
|
||||
|
||||
static {
|
||||
String baseTestDir = System.getProperty("antlr-swift-test-dir");
|
||||
if (baseTestDir == null || baseTestDir.isEmpty()) {
|
||||
baseTestDir = System.getProperty("java.io.tmpdir");
|
||||
}
|
||||
Map<String, String> env = System.getenv();
|
||||
String swiftHome = env.containsKey(SWIFT_HOME_ENV_KEY) ? env.get(SWIFT_HOME_ENV_KEY) : "";
|
||||
SWIFT_CMD = swiftHome + "swift";
|
||||
|
||||
if (!new File(baseTestDir).isDirectory()) {
|
||||
throw new UnsupportedOperationException("The specified base test directory does not exist: " + baseTestDir);
|
||||
}
|
||||
|
||||
BASE_TEST_DIR = baseTestDir;
|
||||
|
||||
//add antlr.swift
|
||||
final ClassLoader loader = Thread.currentThread().getContextClassLoader();
|
||||
|
||||
final URL swiftRuntime = loader.getResource("Swift/Sources/Antlr4");
|
||||
ClassLoader loader = Thread.currentThread().getContextClassLoader();
|
||||
// build swift runtime
|
||||
URL swiftRuntime = loader.getResource("Swift");
|
||||
if (swiftRuntime == null) {
|
||||
throw new RuntimeException("Swift runtime file not found at:" + swiftRuntime.getPath());
|
||||
}
|
||||
String swiftRuntimePath = swiftRuntime.getPath();
|
||||
ANTLR_RUNTIME_PATH = swiftRuntime.getPath();
|
||||
fastFailRunProcess(ANTLR_RUNTIME_PATH, SWIFT_CMD, "build");
|
||||
|
||||
try {
|
||||
String commandLine = "find " + swiftRuntimePath + "/ -iname *.swift -not -name merge.swift -exec cat {} ;";
|
||||
ProcessBuilder builder = new ProcessBuilder(commandLine.split(" "));
|
||||
builder.redirectError(ProcessBuilder.Redirect.INHERIT);
|
||||
Process p = builder.start();
|
||||
StreamVacuum stdoutVacuum = new StreamVacuum(p.getInputStream());
|
||||
stdoutVacuum.start();
|
||||
p.waitFor();
|
||||
stdoutVacuum.join();
|
||||
|
||||
String antlrSwift = stdoutVacuum.toString();
|
||||
//write to Antlr4
|
||||
ANTLR_FRAMEWORK_DIR = new File(BASE_TEST_DIR, "Antlr4").getAbsolutePath();
|
||||
mkdir(ANTLR_FRAMEWORK_DIR);
|
||||
writeFile(ANTLR_FRAMEWORK_DIR, "Antlr4.swift", antlrSwift);
|
||||
//compile Antlr4 module
|
||||
buildAntlr4Framework();
|
||||
String argsString;
|
||||
}
|
||||
catch (Exception e) {
|
||||
e.printStackTrace(System.err);
|
||||
}
|
||||
// shutdown logic
|
||||
Runtime.getRuntime().addShutdownHook(new Thread() {
|
||||
public void run() {
|
||||
// shutdown logic
|
||||
eraseAntlrFrameWorkDir();
|
||||
fastFailRunProcess(ANTLR_RUNTIME_PATH, SWIFT_CMD, "package", "clean");
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private static void eraseFilesIn(String dirName) {
|
||||
if (dirName == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
File dir = new File(dirName);
|
||||
String[] files = dir.list();
|
||||
if (files != null) for (String file : files) {
|
||||
new File(dirName + "/" + file).delete();
|
||||
}
|
||||
}
|
||||
|
||||
private static void eraseAntlrFrameWorkDir() {
|
||||
File frameworkdir = new File(ANTLR_FRAMEWORK_DIR);
|
||||
if (frameworkdir.exists()) {
|
||||
eraseFilesIn(ANTLR_FRAMEWORK_DIR);
|
||||
frameworkdir.delete();
|
||||
}
|
||||
}
|
||||
|
||||
private static boolean buildAntlr4Framework() throws Exception {
|
||||
String argsString = "xcrun -sdk macosx swiftc -emit-library -emit-module Antlr4.swift -module-name Antlr4 -module-link-name Antlr4 -Xlinker -install_name -Xlinker " + ANTLR_FRAMEWORK_DIR + "/libAntlr4.dylib ";
|
||||
return runProcess(argsString, ANTLR_FRAMEWORK_DIR);
|
||||
}
|
||||
|
||||
public String tmpdir = null;
|
||||
|
||||
/**
|
||||
* If error during parser execution, store stderr here; can't return
|
||||
* stdout and stderr. This doesn't trap errors from running antlr.
|
||||
*/
|
||||
private String stderrDuringParse;
|
||||
|
||||
/**
|
||||
* Errors found while running antlr
|
||||
*/
|
||||
private StringBuilder antlrToolErrors;
|
||||
|
||||
/**
|
||||
* If error during parser execution, store stderr here; can't return
|
||||
* stdout and stderr. This doesn't trap errors from running antlr.
|
||||
* Source files used in each small swift project.
|
||||
*/
|
||||
protected String stderrDuringParse;
|
||||
private Set<String> sourceFiles = new HashSet<>();
|
||||
|
||||
@Override
|
||||
public void testSetUp() throws Exception {
|
||||
|
@ -137,21 +94,20 @@ public class BaseSwiftTest implements RuntimeTestSupport {
|
|||
tmpdir = prop;
|
||||
}
|
||||
else {
|
||||
tmpdir = new File(System.getProperty("java.io.tmpdir"), getClass().getSimpleName() +
|
||||
"-" + Thread.currentThread().getName() + "-" + System.currentTimeMillis()).getAbsolutePath();
|
||||
String classSimpleName = getClass().getSimpleName();
|
||||
String threadName = Thread.currentThread().getName();
|
||||
String childPath = String.format("%s-%s-%s", classSimpleName, threadName, System.currentTimeMillis());
|
||||
tmpdir = new File(System.getProperty("java.io.tmpdir"), childPath).getAbsolutePath();
|
||||
}
|
||||
antlrToolErrors = new StringBuilder();
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testTearDown() throws Exception {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void eraseTempDir() {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -179,78 +135,80 @@ public class BaseSwiftTest implements RuntimeTestSupport {
|
|||
|
||||
@Override
|
||||
public String execLexer(String grammarFileName, String grammarStr, String lexerName, String input, boolean showDFA) {
|
||||
boolean success = rawGenerateRecognizer(grammarFileName,
|
||||
generateParser(grammarFileName,
|
||||
grammarStr,
|
||||
null,
|
||||
lexerName);
|
||||
assertTrue(success);
|
||||
writeFile(tmpdir, "input", input);
|
||||
writeLexerTestFile(lexerName, showDFA);
|
||||
addSourceFiles("main.swift");
|
||||
|
||||
compile();
|
||||
String output = execTest();
|
||||
return output;
|
||||
String projectName = "testcase-" + System.currentTimeMillis();
|
||||
String projectDir = getTmpDir() + "/" + projectName;
|
||||
buildProject(projectDir);
|
||||
return execTest(projectDir, projectName);
|
||||
}
|
||||
|
||||
private String execTest() {
|
||||
@Override
|
||||
public String execParser(String grammarFileName, String grammarStr, String parserName, String lexerName, String listenerName, String visitorName, String startRuleName, String input, boolean showDiagnosticErrors) {
|
||||
generateParser(grammarFileName,
|
||||
grammarStr,
|
||||
parserName,
|
||||
lexerName,
|
||||
"-visitor");
|
||||
writeFile(getTmpDir(), "input", input);
|
||||
return execParser(parserName,
|
||||
lexerName,
|
||||
startRuleName,
|
||||
showDiagnosticErrors,false);
|
||||
}
|
||||
|
||||
private String execTest(String projectDir, String projectName) {
|
||||
try {
|
||||
String exec = tmpdir + "/" + EXEC_NAME;
|
||||
String[] args =
|
||||
new String[]{exec, "input"};//new File(tmpdir, "input").getAbsolutePath()
|
||||
ProcessBuilder pb = new ProcessBuilder(args);
|
||||
pb.directory(new File(tmpdir));
|
||||
Process p = pb.start();
|
||||
StreamVacuum stdoutVacuum = new StreamVacuum(p.getInputStream());
|
||||
StreamVacuum stderrVacuum = new StreamVacuum(p.getErrorStream());
|
||||
stdoutVacuum.start();
|
||||
stderrVacuum.start();
|
||||
p.waitFor();
|
||||
stdoutVacuum.join();
|
||||
stderrVacuum.join();
|
||||
String output = stdoutVacuum.toString();
|
||||
if ( output.length()==0 ) {
|
||||
output = null;
|
||||
Pair<String, String> output = runProcess(projectDir, "./.build/debug/" + projectName, "input");
|
||||
if (output.b.length() > 0) {
|
||||
stderrDuringParse = output.b;
|
||||
}
|
||||
if (stderrVacuum.toString().length() > 0) {
|
||||
this.stderrDuringParse = stderrVacuum.toString();
|
||||
}
|
||||
return output;
|
||||
String stdout = output.a;
|
||||
return stdout.length() > 0 ? stdout : null;
|
||||
}
|
||||
catch (Exception e) {
|
||||
System.err.println("can't exec recognizer");
|
||||
System.err.println("Execution of testcase failed.");
|
||||
e.printStackTrace(System.err);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
private Set<String> sourceFiles = new HashSet<String>();
|
||||
|
||||
private void addSourceFiles(String... files) {
|
||||
Collections.addAll(this.sourceFiles, files);
|
||||
}
|
||||
|
||||
public boolean compile() {
|
||||
private void buildProject(String projectDir) {
|
||||
mkdir(projectDir);
|
||||
fastFailRunProcess(projectDir, SWIFT_CMD, "package", "init", "--type", "executable");
|
||||
for (String sourceFile: sourceFiles) {
|
||||
String absPath = getTmpDir() + "/" + sourceFile;
|
||||
fastFailRunProcess(getTmpDir(), "mv", "-f", absPath, projectDir + "/Sources/");
|
||||
}
|
||||
fastFailRunProcess(getTmpDir(), "mv", "-f", "input", projectDir);
|
||||
|
||||
try {
|
||||
return buildProject();
|
||||
} catch (Exception e) {
|
||||
return false;
|
||||
String dylibPath = ANTLR_RUNTIME_PATH + "/.build/debug/";
|
||||
Pair<String, String> buildResult = runProcess(projectDir, SWIFT_CMD, "build",
|
||||
"-Xswiftc", "-I"+dylibPath,
|
||||
"-Xlinker", "-L"+dylibPath,
|
||||
"-Xlinker", "-lAntlr4",
|
||||
"-Xlinker", "-rpath",
|
||||
"-Xlinker", dylibPath);
|
||||
if (buildResult.b.length() > 0) {
|
||||
throw new RuntimeException("unit test build failed: " + buildResult.b);
|
||||
}
|
||||
} catch (IOException | InterruptedException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
||||
private static final String EXEC_NAME = "Test";
|
||||
|
||||
private boolean buildProject() throws Exception {
|
||||
String fileList = sourceFiles.toString().replace("[", "").replace("]", "")
|
||||
.replace(", ", " ");
|
||||
|
||||
String argsString = "xcrun -sdk macosx swiftc " + fileList + " -o " + EXEC_NAME + " -I " + ANTLR_FRAMEWORK_DIR + " -L " + ANTLR_FRAMEWORK_DIR + " -module-link-name Antlr4 -suppress-warnings";
|
||||
return runProcess(argsString, tmpdir);
|
||||
}
|
||||
|
||||
private static boolean runProcess(String argsString, String execPath) throws IOException, InterruptedException {
|
||||
String[] args = argsString.split(" ");
|
||||
// System.err.println("Starting build " + argsString);//Utils.join(args, " "))
|
||||
private static Pair<String,String> runProcess(String execPath, String... args) throws IOException, InterruptedException {
|
||||
Process process = Runtime.getRuntime().exec(args, null, new File(execPath));
|
||||
StreamVacuum stdoutVacuum = new StreamVacuum(process.getInputStream());
|
||||
StreamVacuum stderrVacuum = new StreamVacuum(process.getErrorStream());
|
||||
|
@ -259,46 +217,26 @@ public class BaseSwiftTest implements RuntimeTestSupport {
|
|||
process.waitFor();
|
||||
stdoutVacuum.join();
|
||||
stderrVacuum.join();
|
||||
if (stderrVacuum.toString().length() > 0) {
|
||||
//this.stderrDuringParse = stderrVacuum.toString();
|
||||
System.err.println("buildProject stderrVacuum: " + stderrVacuum);
|
||||
return new Pair<>(stdoutVacuum.toString(), stderrVacuum.toString());
|
||||
}
|
||||
|
||||
private static void fastFailRunProcess(String workingDir, String... command) {
|
||||
ProcessBuilder builder = new ProcessBuilder(command);
|
||||
builder.directory(new File(workingDir));
|
||||
try {
|
||||
Process p = builder.start();
|
||||
p.waitFor();
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
return process.exitValue() == 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String execParser(String grammarFileName, String grammarStr, String parserName, String lexerName, String listenerName, String visitorName, String startRuleName, String input, boolean showDiagnosticErrors) {
|
||||
return execParser(grammarFileName, grammarStr, parserName,
|
||||
lexerName, startRuleName, input, showDiagnosticErrors, false);
|
||||
}
|
||||
|
||||
protected String execParser(String grammarFileName,
|
||||
String grammarStr,
|
||||
String parserName,
|
||||
String lexerName,
|
||||
String startRuleName,
|
||||
String input, boolean debug,boolean profile)
|
||||
private String execParser(String parserName,
|
||||
String lexerName,
|
||||
String parserStartRuleName,
|
||||
boolean debug,
|
||||
boolean profile)
|
||||
{
|
||||
boolean success = rawGenerateRecognizer(grammarFileName,
|
||||
grammarStr,
|
||||
parserName,
|
||||
lexerName,
|
||||
"-visitor");
|
||||
assertTrue(success);
|
||||
writeFile(tmpdir, "input", input);
|
||||
return rawExecRecognizer(parserName,
|
||||
lexerName,
|
||||
startRuleName,
|
||||
debug,profile);
|
||||
}
|
||||
|
||||
protected String rawExecRecognizer(String parserName,
|
||||
String lexerName,
|
||||
String parserStartRuleName,
|
||||
boolean debug,
|
||||
boolean profile)
|
||||
{
|
||||
this.stderrDuringParse = null;
|
||||
if ( parserName==null ) {
|
||||
writeLexerTestFile(lexerName, false);
|
||||
}
|
||||
|
@ -311,24 +249,22 @@ public class BaseSwiftTest implements RuntimeTestSupport {
|
|||
}
|
||||
|
||||
addSourceFiles("main.swift");
|
||||
return execRecognizer();
|
||||
String projectName = "testcase-" + System.currentTimeMillis();
|
||||
String projectDir = getTmpDir() + "/" + projectName;
|
||||
buildProject(projectDir);
|
||||
return execTest(projectDir, projectName);
|
||||
}
|
||||
|
||||
public String execRecognizer() {
|
||||
compile();
|
||||
return execTest();
|
||||
}
|
||||
|
||||
protected void writeParserTestFile(String parserName,
|
||||
String lexerName,
|
||||
String parserStartRuleName,
|
||||
boolean debug,
|
||||
boolean profile) {
|
||||
private void writeParserTestFile(String parserName,
|
||||
String lexerName,
|
||||
String parserStartRuleName,
|
||||
boolean debug,
|
||||
boolean profile) {
|
||||
|
||||
ST outputFileST = new ST(
|
||||
"import Antlr4\n" +
|
||||
"import Foundation\n" +
|
||||
"setbuf(__stdoutp, nil)\n" +
|
||||
"setbuf(stdout, nil)\n" +
|
||||
"class TreeShapeListener: ParseTreeListener{\n" +
|
||||
" func visitTerminal(_ node: TerminalNode){ }\n" +
|
||||
" func visitErrorNode(_ node: ErrorNode){ }\n" +
|
||||
|
@ -384,12 +320,12 @@ public class BaseSwiftTest implements RuntimeTestSupport {
|
|||
writeFile(tmpdir, "main.swift", outputFileST.render());
|
||||
}
|
||||
|
||||
protected void writeLexerTestFile(String lexerName, boolean showDFA) {
|
||||
private void writeLexerTestFile(String lexerName, boolean showDFA) {
|
||||
ST outputFileST = new ST(
|
||||
"import Antlr4\n" +
|
||||
"import Antlr4\n" +
|
||||
"import Foundation\n" +
|
||||
|
||||
"setbuf(__stdoutp, nil)\n" +
|
||||
"setbuf(stdout, nil)\n" +
|
||||
"let args = CommandLine.arguments\n" +
|
||||
"let input = ANTLRFileStream(args[1])\n" +
|
||||
"let lex = <lexerName>(input)\n" +
|
||||
|
@ -415,39 +351,27 @@ public class BaseSwiftTest implements RuntimeTestSupport {
|
|||
}
|
||||
|
||||
/**
|
||||
* Return true if all is well
|
||||
* Generates the parser for one test case.
|
||||
*/
|
||||
private boolean rawGenerateRecognizer(String grammarFileName,
|
||||
String grammarStr,
|
||||
String parserName,
|
||||
String lexerName,
|
||||
String... extraOptions) {
|
||||
return rawGenerateRecognizer(grammarFileName, grammarStr, parserName, lexerName, false, extraOptions);
|
||||
}
|
||||
private void generateParser(String grammarFileName,
|
||||
String grammarStr,
|
||||
String parserName,
|
||||
String lexerName,
|
||||
String... extraOptions) {
|
||||
ErrorQueue equeue = antlrOnString(getTmpDir(), "Swift", grammarFileName, grammarStr, false, extraOptions);
|
||||
assertTrue(equeue.errors.isEmpty());
|
||||
// System.out.println(getTmpDir());
|
||||
|
||||
/**
|
||||
* Return true if all is well
|
||||
*/
|
||||
private boolean rawGenerateRecognizer(String grammarFileName,
|
||||
String grammarStr,
|
||||
String parserName,
|
||||
String lexerName,
|
||||
boolean defaultListener,
|
||||
String... extraOptions) {
|
||||
ErrorQueue equeue = antlrOnString(getTmpDir(), "Swift", grammarFileName, grammarStr, defaultListener, extraOptions);
|
||||
if (!equeue.errors.isEmpty()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
List<String> files = new ArrayList<String>();
|
||||
List<String> files = new ArrayList<>();
|
||||
if (lexerName != null) {
|
||||
files.add(lexerName + ".swift");
|
||||
files.add(lexerName + "ATN.swift");
|
||||
}
|
||||
|
||||
if (parserName != null) {
|
||||
files.add(parserName + ".swift");
|
||||
files.add(parserName + "ATN.swift");
|
||||
Set<String> optionsSet = new HashSet<String>(Arrays.asList(extraOptions));
|
||||
Set<String> optionsSet = new HashSet<>(Arrays.asList(extraOptions));
|
||||
String grammarName = grammarFileName.substring(0, grammarFileName.lastIndexOf('.'));
|
||||
if (!optionsSet.contains("-no-listener")) {
|
||||
files.add(grammarName + "Listener.swift");
|
||||
|
@ -459,19 +383,5 @@ public class BaseSwiftTest implements RuntimeTestSupport {
|
|||
}
|
||||
}
|
||||
addSourceFiles(files.toArray(new String[files.size()]));
|
||||
return true;
|
||||
}
|
||||
|
||||
protected static void mkdir(String dir) {
|
||||
File f = new File(dir);
|
||||
f.mkdirs();
|
||||
}
|
||||
|
||||
protected Tool newTool(String[] args) {
|
||||
return new Tool(args);
|
||||
}
|
||||
|
||||
protected Tool newTool() {
|
||||
return new Tool(new String[]{"-o", tmpdir});
|
||||
}
|
||||
}
|
||||
|
|
|
@ -16,11 +16,7 @@ namespace antlr4 {
|
|||
protected:
|
||||
/// The data being scanned.
|
||||
// UTF-32
|
||||
#if defined(_MSC_VER) && _MSC_VER == 1900
|
||||
i32string _data; // Custom type for VS 2015.
|
||||
#else
|
||||
std::u32string _data;
|
||||
#endif
|
||||
UTF32String _data;
|
||||
|
||||
/// 0..n-1 index into string of next char </summary>
|
||||
size_t p;
|
||||
|
|
|
@ -56,11 +56,13 @@
|
|||
typedef __int32 ssize_t;
|
||||
#endif
|
||||
|
||||
#if _MSC_VER == 1900
|
||||
#if _MSC_VER >= 1900 && _MSC_VER < 2000
|
||||
// VS 2015 has a known bug when using std::codecvt_utf8<char32_t>
|
||||
// so we have to temporarily use __int32 instead.
|
||||
// https://connect.microsoft.com/VisualStudio/feedback/details/1403302/unresolved-external-when-using-codecvt-utf8
|
||||
typedef std::basic_string<__int32> i32string;
|
||||
|
||||
typedef i32string UTF32String;
|
||||
#endif
|
||||
|
||||
#ifdef ANTLR4CPP_EXPORTS
|
||||
|
@ -73,11 +75,11 @@
|
|||
#endif
|
||||
#endif
|
||||
|
||||
#ifdef _MSC_VER
|
||||
class ANTLR4CPP_PUBLIC std::exception; // Needed for VS 2015.
|
||||
#endif
|
||||
|
||||
#elif defined(__APPLE__)
|
||||
#elif __APPLE__
|
||||
typedef std::u32string UTF32String;
|
||||
|
||||
#define GUID_CFUUID
|
||||
#if __GNUC__ >= 4
|
||||
#define ANTLR4CPP_PUBLIC __attribute__ ((visibility ("default")))
|
||||
|
@ -85,6 +87,8 @@
|
|||
#define ANTLR4CPP_PUBLIC
|
||||
#endif
|
||||
#else
|
||||
typedef std::u32string UTF32String;
|
||||
|
||||
#define GUID_LIBUUID
|
||||
#if __GNUC__ >= 6
|
||||
#define ANTLR4CPP_PUBLIC __attribute__ ((visibility ("default")))
|
||||
|
|
|
@ -126,6 +126,8 @@ size_t ParserATNSimulator::adaptivePredict(TokenStream *input, size_t decision,
|
|||
} else {
|
||||
dfa::DFAState *newState = new dfa::DFAState(std::move(s0_closure)); /* mem-check: managed by the DFA or deleted below */
|
||||
s0 = addDFAState(dfa, newState);
|
||||
|
||||
delete dfa.s0; // Delete existing s0 DFA state, if there's any.
|
||||
dfa.s0 = s0;
|
||||
if (s0 != newState) {
|
||||
delete newState; // If there was already a state with this config set we don't need the new one.
|
||||
|
|
|
@ -26,35 +26,34 @@ DFA::DFA(atn::DecisionState *atnStartState, size_t decision)
|
|||
if (static_cast<atn::StarLoopEntryState *>(atnStartState)->isPrecedenceDecision) {
|
||||
_precedenceDfa = true;
|
||||
s0 = new DFAState(std::unique_ptr<atn::ATNConfigSet>(new atn::ATNConfigSet()));
|
||||
_s0Shadow = s0;
|
||||
s0->isAcceptState = false;
|
||||
s0->requiresFullContext = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
DFA::DFA(DFA &&other) : atnStartState(std::move(other.atnStartState)), decision(std::move(other.decision)) {
|
||||
DFA::DFA(DFA &&other) : atnStartState(other.atnStartState), decision(other.decision) {
|
||||
// Source states are implicitly cleared by the move.
|
||||
states = std::move(other.states);
|
||||
|
||||
// Manually move s0 pointers.
|
||||
other.atnStartState = nullptr;
|
||||
other.decision = 0;
|
||||
s0 = other.s0;
|
||||
other.s0 = nullptr;
|
||||
_s0Shadow = other._s0Shadow;
|
||||
other._s0Shadow = nullptr;
|
||||
|
||||
_precedenceDfa = other._precedenceDfa;
|
||||
other._precedenceDfa = false;
|
||||
}
|
||||
|
||||
DFA::~DFA() {
|
||||
// ml: s0 can be set either in our constructor or by external code, so we need a way to track our own creation.
|
||||
// We could use a shared pointer again (and force that on all external assignments etc.) or just shadow it.
|
||||
// I hesitate moving s0 to the normal states list as this might conflict with consumers of that list.
|
||||
delete _s0Shadow;
|
||||
|
||||
bool s0InList = (s0 == nullptr);
|
||||
for (auto state : states) {
|
||||
if (state == s0)
|
||||
s0InList = true;
|
||||
delete state;
|
||||
}
|
||||
|
||||
if (!s0InList)
|
||||
delete s0;
|
||||
}
|
||||
|
||||
bool DFA::isPrecedenceDfa() const {
|
||||
|
|
|
@ -20,10 +20,10 @@ namespace dfa {
|
|||
/// Set only allows you to see if it's there.
|
||||
|
||||
/// From which ATN state did we create this DFA?
|
||||
atn::DecisionState *const atnStartState;
|
||||
atn::DecisionState *atnStartState;
|
||||
std::unordered_set<DFAState *, DFAState::Hasher, DFAState::Comparer> states; // States are owned by this class.
|
||||
DFAState *s0;
|
||||
const size_t decision;
|
||||
size_t decision;
|
||||
|
||||
DFA(atn::DecisionState *atnStartState);
|
||||
DFA(atn::DecisionState *atnStartState, size_t decision);
|
||||
|
@ -85,7 +85,6 @@ namespace dfa {
|
|||
* {@code false}. This is the backing field for {@link #isPrecedenceDfa}.
|
||||
*/
|
||||
bool _precedenceDfa;
|
||||
DFAState *_s0Shadow = nullptr; // ml: assigned when we created s0 ourselves.
|
||||
};
|
||||
|
||||
} // namespace atn
|
||||
|
|
|
@ -6,16 +6,6 @@
|
|||
#pragma once
|
||||
|
||||
namespace antlr4 {
|
||||
class IllegalStateException;
|
||||
class IllegalArgumentException;
|
||||
class NoSuchElementException;
|
||||
class NullPointerException;
|
||||
class InputMismatchException;
|
||||
class ParseCancellationException;
|
||||
class InputMismatchException;
|
||||
class EmptyStackException;
|
||||
class LexerNoViableAltException;
|
||||
|
||||
class ANTLRErrorListener;
|
||||
class ANTLRErrorStrategy;
|
||||
class ANTLRFileStream;
|
||||
|
@ -30,7 +20,10 @@ namespace antlr4 {
|
|||
class ConsoleErrorListener;
|
||||
class DefaultErrorStrategy;
|
||||
class DiagnosticErrorListener;
|
||||
class EmptyStackException;
|
||||
class FailedPredicateException;
|
||||
class IllegalArgumentException;
|
||||
class IllegalStateException;
|
||||
class InputMismatchException;
|
||||
class IntStream;
|
||||
class InterpreterRuleContext;
|
||||
|
@ -38,7 +31,10 @@ namespace antlr4 {
|
|||
class LexerInterpreter;
|
||||
class LexerNoViableAltException;
|
||||
class ListTokenSource;
|
||||
class NoSuchElementException;
|
||||
class NoViableAltException;
|
||||
class NullPointerException;
|
||||
class ParseCancellationException;
|
||||
class Parser;
|
||||
class ParserInterpreter;
|
||||
class ParserRuleContext;
|
||||
|
@ -59,7 +55,6 @@ namespace antlr4 {
|
|||
class Interval;
|
||||
class IntervalSet;
|
||||
class MurmurHash;
|
||||
class ParseCancellationException;
|
||||
class Utils;
|
||||
class Predicate;
|
||||
}
|
||||
|
|
|
@ -7,29 +7,29 @@
|
|||
|
||||
namespace antlrcpp {
|
||||
|
||||
void replaceAll(std::string& str, const std::string& from, const std::string& to)
|
||||
void replaceAll(std::string& str, std::string const& from, std::string const& to)
|
||||
{
|
||||
if(from.empty()) {
|
||||
if (from.empty())
|
||||
return;
|
||||
}
|
||||
|
||||
size_t start_pos = 0;
|
||||
while((start_pos = str.find(from, start_pos)) != std::string::npos) {
|
||||
while ((start_pos = str.find(from, start_pos)) != std::string::npos) {
|
||||
str.replace(start_pos, from.length(), to);
|
||||
start_pos += to.length(); // In case 'to' contains 'from', like replacing 'x' with 'yx'
|
||||
start_pos += to.length(); // In case 'to' contains 'from', like replacing 'x' with 'yx'.
|
||||
}
|
||||
}
|
||||
|
||||
std::string ws2s(const std::wstring &wstr) {
|
||||
std::string ws2s(std::wstring const& wstr) {
|
||||
std::wstring_convert<std::codecvt_utf8_utf16<wchar_t>> converter;
|
||||
|
||||
std::string narrow = converter.to_bytes(wstr);
|
||||
|
||||
return narrow;
|
||||
}
|
||||
|
||||
std::wstring s2ws(const std::string &str) {
|
||||
std::wstring_convert<std::codecvt_utf8_utf16<wchar_t>> converter;
|
||||
|
||||
std::wstring wide = converter.from_bytes(str);
|
||||
|
||||
return wide;
|
||||
}
|
||||
|
||||
|
|
|
@ -8,42 +8,47 @@
|
|||
#include "antlr4-common.h"
|
||||
|
||||
namespace antlrcpp {
|
||||
|
||||
// For all conversions utf8 <-> utf32.
|
||||
// VS 2015 and VS 2017 have different bugs in std::codecvt_utf8<char32_t> (VS 2013 works fine).
|
||||
#if defined(_MSC_VER) && _MSC_VER >= 1900 && _MSC_VER < 2000
|
||||
using UtfConverterType = std::wstring_convert<std::codecvt_utf8<__int32>, __int32>;
|
||||
using UtfConverterWide = std::u32string;
|
||||
typedef std::wstring_convert<std::codecvt_utf8<__int32>, __int32> UTF32Converter;
|
||||
#else
|
||||
using UtfConverterType = std::wstring_convert<std::codecvt_utf8<char32_t>, char32_t>;
|
||||
using UtfConverterWide = std::wstring_convert<std::codecvt_utf8<char32_t>, char32_t>::wide_string;
|
||||
typedef std::wstring_convert<std::codecvt_utf8<char32_t>, char32_t> UTF32Converter;
|
||||
#endif
|
||||
|
||||
//the conversion functions fails in VS2017, so we explicitly use a workaround
|
||||
|
||||
// The conversion functions fails in VS2017, so we explicitly use a workaround.
|
||||
template<typename T>
|
||||
inline std::string utf32_to_utf8(T _data)
|
||||
inline std::string utf32_to_utf8(T const& data)
|
||||
{
|
||||
#if defined(_MSC_VER) && _MSC_VER > 1900 && _MSC_VER < 2000
|
||||
auto p = reinterpret_cast<const int32_t *>(_data.data());
|
||||
return UtfConverterType().to_bytes(p, p + _data.size());
|
||||
// Don't make the converter static or we have to serialize access to it.
|
||||
UTF32Converter converter;
|
||||
|
||||
#if _MSC_VER >= 1900 && _MSC_VER < 2000
|
||||
auto p = reinterpret_cast<const int32_t *>(data.data());
|
||||
return converter.to_bytes(p, p + data.size());
|
||||
#else
|
||||
return UtfConverterType().to_bytes(_data);
|
||||
return converter.to_bytes(data);
|
||||
#endif
|
||||
}
|
||||
|
||||
inline UtfConverterWide utf8_to_utf32(const char* first, const char* last)
|
||||
inline UTF32String utf8_to_utf32(const char* first, const char* last)
|
||||
{
|
||||
#if defined(_MSC_VER) && _MSC_VER > 1900 && _MSC_VER < 2000
|
||||
auto r = UtfConverterType().from_bytes(first, last);
|
||||
std::u32string s = reinterpret_cast<const char32_t *>(r.data());
|
||||
return s;
|
||||
UTF32Converter converter;
|
||||
|
||||
#if _MSC_VER >= 1900 && _MSC_VER < 2000
|
||||
auto r = converter.from_bytes(first, last);
|
||||
i32string s = reinterpret_cast<const int32_t *>(r.data());
|
||||
#else
|
||||
return UtfConverterType().from_bytes(first, last);
|
||||
std::u32string s = converter.from_bytes(first, last);
|
||||
#endif
|
||||
|
||||
return s;
|
||||
}
|
||||
|
||||
void replaceAll(std::string& str, const std::string& from, const std::string& to);
|
||||
void replaceAll(std::string &str, std::string const& from, std::string const& to);
|
||||
|
||||
// string <-> wstring conversion (UTF-16), e.g. for use with Window's wide APIs.
|
||||
ANTLR4CPP_PUBLIC std::string ws2s(const std::wstring &wstr);
|
||||
ANTLR4CPP_PUBLIC std::wstring s2ws(const std::string &str);
|
||||
ANTLR4CPP_PUBLIC std::string ws2s(std::wstring const& wstr);
|
||||
ANTLR4CPP_PUBLIC std::wstring s2ws(std::string const& str);
|
||||
}
|
||||
|
|
|
@ -20,8 +20,8 @@ Token* TerminalNodeImpl::getSymbol() {
|
|||
return symbol;
|
||||
}
|
||||
|
||||
void TerminalNodeImpl::setParent(RuleContext *parent) {
|
||||
this->parent = parent;
|
||||
void TerminalNodeImpl::setParent(RuleContext *parent_) {
|
||||
this->parent = parent_;
|
||||
}
|
||||
|
||||
misc::Interval TerminalNodeImpl::getSourceInterval() {
|
||||
|
|
|
@ -1,7 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<Workspace
|
||||
version = "1.0">
|
||||
<FileRef
|
||||
location = "container:Antlr4.xcodeproj">
|
||||
</FileRef>
|
||||
</Workspace>
|
|
@ -7,3 +7,13 @@ import PackageDescription
|
|||
let package = Package(
|
||||
name: "Antlr4"
|
||||
)
|
||||
|
||||
products.append(
|
||||
Product(
|
||||
name: "Antlr4",
|
||||
type: .Library(.Dynamic),
|
||||
modules: [
|
||||
"Antlr4"
|
||||
]
|
||||
)
|
||||
)
|
||||
|
|
|
@ -236,9 +236,9 @@ public class CommonToken: WritableToken {
|
|||
}
|
||||
var txt: String
|
||||
if let tokenText = getText() {
|
||||
txt = tokenText.replaceAll("\n", replacement: "\\n")
|
||||
txt = txt.replaceAll("\r", replacement: "\\r")
|
||||
txt = txt.replaceAll("\t", replacement: "\\t")
|
||||
txt = tokenText.replacingOccurrences(of: "\n", with: "\\n")
|
||||
txt = txt.replacingOccurrences(of: "\r", with: "\\r")
|
||||
txt = txt.replacingOccurrences(of: "\t", with: "\\t")
|
||||
} else {
|
||||
txt = "<no text>"
|
||||
}
|
||||
|
|
|
@ -574,9 +574,9 @@ public class DefaultErrorStrategy: ANTLRErrorStrategy {
|
|||
|
||||
internal func escapeWSAndQuote(_ s: String) -> String {
|
||||
var s = s
|
||||
s = s.replaceAll("\n", replacement: "\\n")
|
||||
s = s.replaceAll("\r", replacement: "\\r")
|
||||
s = s.replaceAll("\t", replacement: "\\t")
|
||||
s = s.replacingOccurrences(of: "\n", with: "\\n")
|
||||
s = s.replacingOccurrences(of: "\r", with: "\\r")
|
||||
s = s.replacingOccurrences(of: "\t", with: "\\t")
|
||||
return "'" + s + "'"
|
||||
}
|
||||
|
||||
|
|
|
@ -56,12 +56,10 @@ public class DiagnosticErrorListener: BaseErrorListener {
|
|||
return
|
||||
}
|
||||
|
||||
let format: String = "reportAmbiguity d=%@: ambigAlts=%@, input='%@'"
|
||||
let decision: String = getDecisionDescription(recognizer, dfa)
|
||||
let conflictingAlts: BitSet = try getConflictingAlts(ambigAlts, configs)
|
||||
let text: String = try recognizer.getTokenStream()!.getText(Interval.of(startIndex, stopIndex))
|
||||
|
||||
let message: String = NSString(format: format as NSString, decision, conflictingAlts.description, text) as String
|
||||
let decision = getDecisionDescription(recognizer, dfa)
|
||||
let conflictingAlts = try getConflictingAlts(ambigAlts, configs)
|
||||
let text = try recognizer.getTokenStream()!.getText(Interval.of(startIndex, stopIndex))
|
||||
let message = "reportAmbiguity d=\(decision): ambigAlts=\(conflictingAlts), input='\(text)'"
|
||||
try recognizer.notifyErrorListeners(message)
|
||||
}
|
||||
|
||||
|
@ -72,10 +70,9 @@ public class DiagnosticErrorListener: BaseErrorListener {
|
|||
_ stopIndex: Int,
|
||||
_ conflictingAlts: BitSet?,
|
||||
_ configs: ATNConfigSet) throws {
|
||||
let format: String = "reportAttemptingFullContext d=%@, input='%@'"
|
||||
let decision: String = getDecisionDescription(recognizer, dfa)
|
||||
let text: String = try recognizer.getTokenStream()!.getText(Interval.of(startIndex, stopIndex))
|
||||
let message: String = NSString(format: format as NSString, decision, text) as String
|
||||
let decision = getDecisionDescription(recognizer, dfa)
|
||||
let text = try recognizer.getTokenStream()!.getText(Interval.of(startIndex, stopIndex))
|
||||
let message = "reportAttemptingFullContext d=\(decision), input='\(text)'"
|
||||
try recognizer.notifyErrorListeners(message)
|
||||
}
|
||||
|
||||
|
@ -86,10 +83,9 @@ public class DiagnosticErrorListener: BaseErrorListener {
|
|||
_ stopIndex: Int,
|
||||
_ prediction: Int,
|
||||
_ configs: ATNConfigSet) throws {
|
||||
let format: String = "reportContextSensitivity d=%@, input='%@'"
|
||||
let decision: String = getDecisionDescription(recognizer, dfa)
|
||||
let text: String = try recognizer.getTokenStream()!.getText(Interval.of(startIndex, stopIndex))
|
||||
let message: String = NSString(format: format as NSString, decision, text) as String
|
||||
let decision = getDecisionDescription(recognizer, dfa)
|
||||
let text = try recognizer.getTokenStream()!.getText(Interval.of(startIndex, stopIndex))
|
||||
let message = "reportContextSensitivity d=\(decision), input='\(text)'"
|
||||
try recognizer.notifyErrorListeners(message)
|
||||
}
|
||||
|
||||
|
@ -107,8 +103,7 @@ public class DiagnosticErrorListener: BaseErrorListener {
|
|||
if ruleName.isEmpty {
|
||||
return String(decision)
|
||||
}
|
||||
|
||||
return NSString(format: "%d (%@)", decision, ruleName) as String
|
||||
return "\(decision) (\(ruleName))"
|
||||
}
|
||||
|
||||
/// Computes the set of conflicting or ambiguous alternatives from a
|
||||
|
@ -127,4 +122,5 @@ public class DiagnosticErrorListener: BaseErrorListener {
|
|||
let result = try configs.getAltBitSet()
|
||||
return result
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -62,6 +62,12 @@ open class Parser: Recognizer<ParserATNSimulator> {
|
|||
// TODO: Print exit info.
|
||||
}
|
||||
}
|
||||
|
||||
/// mutex for bypassAltsAtnCache updates
|
||||
private var bypassAltsAtnCacheMutex = Mutex()
|
||||
|
||||
/// mutex for decisionToDFA updates
|
||||
private var decisionToDFAMutex = Mutex()
|
||||
|
||||
/**
|
||||
* This field maps from the serialized ATN string to the deserialized {@link org.antlr.v4.runtime.atn.ATN} with
|
||||
|
@ -71,6 +77,7 @@ open class Parser: Recognizer<ParserATNSimulator> {
|
|||
*/
|
||||
private let bypassAltsAtnCache: HashMap<String, ATN> = HashMap<String, ATN>()
|
||||
|
||||
|
||||
/**
|
||||
* The error handling strategy for the parser. The default value is a new
|
||||
* instance of {@link org.antlr.v4.runtime.DefaultErrorStrategy}.
|
||||
|
@ -110,7 +117,6 @@ open class Parser: Recognizer<ParserATNSimulator> {
|
|||
*/
|
||||
internal var _buildParseTrees: Bool = true
|
||||
|
||||
|
||||
/**
|
||||
* When {@link #setTrace}{@code (true)} is called, a reference to the
|
||||
* {@link org.antlr.v4.runtime.Parser.TraceListener} is stored here so it can be easily removed in a
|
||||
|
@ -355,12 +361,6 @@ open class Parser: Recognizer<ParserATNSimulator> {
|
|||
_parseListeners = nil
|
||||
}
|
||||
}
|
||||
|
||||
// if (_parseListeners.remove(listener)) {
|
||||
// if (_parseListeners.isEmpty) {
|
||||
// _parseListeners = nil;
|
||||
// }
|
||||
// }
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -440,16 +440,14 @@ open class Parser: Recognizer<ParserATNSimulator> {
|
|||
let serializedAtn: String = getSerializedATN()
|
||||
|
||||
var result: ATN? = bypassAltsAtnCache[serializedAtn]
|
||||
synced(bypassAltsAtnCache) {
|
||||
bypassAltsAtnCacheMutex.synchronized {
|
||||
[unowned self] in
|
||||
if result == nil {
|
||||
let deserializationOptions: ATNDeserializationOptions = ATNDeserializationOptions()
|
||||
try! deserializationOptions.setGenerateRuleBypassTransitions(true)
|
||||
result = try! ATNDeserializer(deserializationOptions).deserialize(Array(serializedAtn.characters))
|
||||
result = try! ATNDeserializer(deserializationOptions).deserialize(Array(serializedAtn.characters))
|
||||
self.bypassAltsAtnCache[serializedAtn] = result!
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
return result!
|
||||
}
|
||||
|
@ -988,7 +986,7 @@ open class Parser: Recognizer<ParserATNSimulator> {
|
|||
guard let _interp = _interp else {
|
||||
return s
|
||||
}
|
||||
synced(_interp.decisionToDFA as AnyObject) {
|
||||
decisionToDFAMutex.synchronized {
|
||||
[unowned self] in
|
||||
|
||||
for d in 0..<_interp.decisionToDFA.count {
|
||||
|
@ -1005,7 +1003,7 @@ open class Parser: Recognizer<ParserATNSimulator> {
|
|||
guard let _interp = _interp else {
|
||||
return
|
||||
}
|
||||
synced(_interp.decisionToDFA as AnyObject) {
|
||||
decisionToDFAMutex.synchronized {
|
||||
[unowned self] in
|
||||
var seenOne: Bool = false
|
||||
|
||||
|
|
|
@ -19,6 +19,12 @@ open class Recognizer<ATNInterpreter:ATNSimulator> {
|
|||
public var _interp: ATNInterpreter!
|
||||
|
||||
private var _stateNumber: Int = -1
|
||||
|
||||
/// mutex for tokenTypeMapCache updates
|
||||
private var tokenTypeMapCacheMutex = Mutex()
|
||||
|
||||
/// mutex for ruleIndexMapCacheMutex updates
|
||||
private var ruleIndexMapCacheMutex = Mutex()
|
||||
|
||||
/** Used to print out token names like ID during debugging and
|
||||
* error reporting. The generated parsers implement a method
|
||||
|
@ -57,7 +63,7 @@ open class Recognizer<ATNInterpreter:ATNSimulator> {
|
|||
public func getTokenTypeMap() -> Dictionary<String, Int> {
|
||||
let vocabulary: Vocabulary = getVocabulary()
|
||||
var result: Dictionary<String, Int>? = self.tokenTypeMapCache[vocabulary]
|
||||
synced(tokenTypeMapCache) {
|
||||
tokenTypeMapCacheMutex.synchronized {
|
||||
[unowned self] in
|
||||
if result == nil {
|
||||
result = Dictionary<String, Int>()
|
||||
|
@ -80,8 +86,6 @@ open class Recognizer<ATNInterpreter:ATNSimulator> {
|
|||
|
||||
self.tokenTypeMapCache[vocabulary] = result!
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
return result!
|
||||
|
||||
|
@ -96,12 +100,11 @@ open class Recognizer<ATNInterpreter:ATNSimulator> {
|
|||
let ruleNames: [String] = getRuleNames()
|
||||
|
||||
let result: Dictionary<String, Int>? = self.ruleIndexMapCache[ArrayWrapper<String>(ruleNames)]
|
||||
synced(ruleIndexMapCache) {
|
||||
ruleIndexMapCacheMutex.synchronized {
|
||||
[unowned self] in
|
||||
if result == nil {
|
||||
self.ruleIndexMapCache[ArrayWrapper<String>(ruleNames)] = Utils.toMap(ruleNames)
|
||||
}
|
||||
|
||||
}
|
||||
return result!
|
||||
|
||||
|
@ -212,9 +215,9 @@ open class Recognizer<ATNInterpreter:ATNSimulator> {
|
|||
s = "<\(t.getType())>"
|
||||
}
|
||||
}
|
||||
s = s.replaceAll("\n", replacement: "\\n")
|
||||
s = s.replaceAll("\r", replacement: "\\r")
|
||||
s = s.replaceAll("\t", replacement: "\\t")
|
||||
s = s.replacingOccurrences(of: "\n", with: "\\n")
|
||||
s = s.replacingOccurrences(of: "\r", with: "\\r")
|
||||
s = s.replacingOccurrences(of: "\t", with: "\\t")
|
||||
return "\(s)"
|
||||
}
|
||||
|
||||
|
|
|
@ -554,22 +554,19 @@ public class ATNDeserializer {
|
|||
}
|
||||
|
||||
public func deserializeFromJson(_ jsonStr: String) -> ATN {
|
||||
// let jsonStr = Utils.readFile2String(jsonFileName)
|
||||
guard !jsonStr.isEmpty else {
|
||||
fatalError("ATN Serialization is empty,Please include *LexerATN.json and *ParserATN.json in TARGETS-Build Phases-Copy Bundle Resources")
|
||||
}
|
||||
if let JSONData = jsonStr.data(using: String.Encoding.utf8) {
|
||||
do {
|
||||
let JSON = try JSONSerialization.jsonObject(with: JSONData, options: JSONSerialization.ReadingOptions(rawValue: 0))
|
||||
guard let JSONDictionary: NSDictionary = JSON as? NSDictionary else {
|
||||
print("Not a Dictionary")
|
||||
// put in function
|
||||
guard let JSONDictionary = JSON as? Dictionary<String, Any> else {
|
||||
fatalError("deserializeFromJson Not a Dictionary")
|
||||
}
|
||||
|
||||
return try dictToJson(JSONDictionary)
|
||||
|
||||
} catch let JSONError as NSError {
|
||||
} catch let JSONError {
|
||||
print("\(JSONError)")
|
||||
}
|
||||
}
|
||||
|
@ -577,10 +574,10 @@ public class ATNDeserializer {
|
|||
fatalError("Could not deserialize ATN ")
|
||||
}
|
||||
|
||||
public func dictToJson(_ dict: NSDictionary) throws -> ATN {
|
||||
public func dictToJson(_ dict: Dictionary<String, Any>) throws -> ATN {
|
||||
|
||||
|
||||
let version: Int = dict.object(forKey: "version") as! Int
|
||||
let version: Int = dict["version"] as! Int
|
||||
if version != ATNDeserializer.SERIALIZED_VERSION {
|
||||
|
||||
let reason: String = "Could not deserialize ATN with version \(version) (expected \(ATNDeserializer.SERIALIZED_VERSION))."
|
||||
|
@ -588,7 +585,7 @@ public class ATNDeserializer {
|
|||
throw ANTLRError.unsupportedOperation(msg: reason)
|
||||
}
|
||||
|
||||
let uuid: UUID = UUID(uuidString: dict.object(forKey: "uuid") as! String)!
|
||||
let uuid: UUID = UUID(uuidString: dict["uuid"] as! String)!
|
||||
|
||||
if !ATNDeserializer.SUPPORTED_UUIDS.contains(uuid) {
|
||||
let reason: String = "Could not deserialize ATN with UUID \(uuid) (expected \(ATNDeserializer.SERIALIZED_UUID) or a legacy UUID)."
|
||||
|
@ -599,8 +596,8 @@ public class ATNDeserializer {
|
|||
let supportsPrecedencePredicates: Bool = isFeatureSupported(ATNDeserializer.ADDED_PRECEDENCE_TRANSITIONS, uuid)
|
||||
let supportsLexerActions: Bool = isFeatureSupported(ATNDeserializer.ADDED_LEXER_ACTIONS, uuid)
|
||||
|
||||
let grammarType: ATNType = ATNType(rawValue: dict.object(forKey: "grammarType") as! Int)!
|
||||
let maxTokenType: Int = dict.object(forKey: "maxTokenType") as! Int
|
||||
let grammarType: ATNType = ATNType(rawValue: dict["grammarType"] as! Int)!
|
||||
let maxTokenType: Int = dict["maxTokenType"] as! Int
|
||||
let atn: ATN = ATN(grammarType, maxTokenType)
|
||||
|
||||
//
|
||||
|
@ -609,22 +606,22 @@ public class ATNDeserializer {
|
|||
var loopBackStateNumbers: Array<(LoopEndState, Int)> = Array<(LoopEndState, Int)>()
|
||||
var endStateNumbers: Array<(BlockStartState, Int)> = Array<(BlockStartState, Int)>()
|
||||
|
||||
let states = dict.object(forKey: "states") as! [NSDictionary]
|
||||
|
||||
let states = dict["states"] as! [Dictionary<String, Any>]
|
||||
|
||||
for state in states {
|
||||
|
||||
|
||||
let ruleIndex: Int = state.object(forKey: "ruleIndex") as! Int
|
||||
let ruleIndex: Int = state["ruleIndex"] as! Int
|
||||
|
||||
let stype: Int = state.object(forKey: "stateType") as! Int
|
||||
let stype: Int = state["stateType"] as! Int
|
||||
let s: ATNState = try stateFactory(stype, ruleIndex)!
|
||||
if stype == ATNState.LOOP_END {
|
||||
// special case
|
||||
let loopBackStateNumber: Int = state.object(forKey: "detailStateNumber") as! Int
|
||||
let loopBackStateNumber: Int = state["detailStateNumber"] as! Int
|
||||
loopBackStateNumbers.append((s as! LoopEndState, loopBackStateNumber))
|
||||
} else {
|
||||
if s is BlockStartState {
|
||||
let endStateNumber: Int = state.object(forKey: "detailStateNumber") as! Int
|
||||
let endStateNumber: Int = state["detailStateNumber"] as! Int
|
||||
endStateNumbers.append((s as! BlockStartState, endStateNumber))
|
||||
}
|
||||
}
|
||||
|
@ -642,13 +639,13 @@ public class ATNDeserializer {
|
|||
pair.0.endState = atn.states[pair.1] as? BlockEndState
|
||||
}
|
||||
|
||||
let numNonGreedyStates = dict.object(forKey: "nonGreedyStates") as! [Int]
|
||||
let numNonGreedyStates = dict["nonGreedyStates"] as! [Int]
|
||||
for numNonGreedyState in numNonGreedyStates {
|
||||
(atn.states[numNonGreedyState] as! DecisionState).nonGreedy = true
|
||||
}
|
||||
|
||||
if supportsPrecedencePredicates {
|
||||
let numPrecedenceStates = dict.object(forKey: "precedenceStates") as! [Int]
|
||||
let numPrecedenceStates = dict["precedenceStates"] as! [Int]
|
||||
for numPrecedenceState in numPrecedenceStates {
|
||||
(atn.states[numPrecedenceState] as! RuleStartState).isPrecedenceRule = true
|
||||
}
|
||||
|
@ -658,7 +655,7 @@ public class ATNDeserializer {
|
|||
//
|
||||
// RULES
|
||||
//
|
||||
let ruleToStartState = dict.object(forKey: "ruleToStartState") as! [NSDictionary]
|
||||
let ruleToStartState = dict["ruleToStartState"] as! [Dictionary<String, Any>]
|
||||
|
||||
let nrules: Int = ruleToStartState.count
|
||||
if atn.grammarType == ATNType.lexer {
|
||||
|
@ -668,11 +665,11 @@ public class ATNDeserializer {
|
|||
atn.ruleToStartState = [RuleStartState](repeating: RuleStartState(), count: nrules) // [nrules];
|
||||
for i in 0..<nrules {
|
||||
let currentRuleToStartState = ruleToStartState[i]
|
||||
let s: Int = currentRuleToStartState.object(forKey: "stateNumber") as! Int
|
||||
let s: Int = currentRuleToStartState["stateNumber"] as! Int
|
||||
let startState: RuleStartState = atn.states[s] as! RuleStartState
|
||||
atn.ruleToStartState[i] = startState
|
||||
if atn.grammarType == ATNType.lexer {
|
||||
var tokenType: Int = currentRuleToStartState.object(forKey: "ruleToTokenType") as! Int
|
||||
var tokenType: Int = currentRuleToStartState["ruleToTokenType"] as! Int
|
||||
if tokenType == -1 {
|
||||
tokenType = CommonToken.EOF
|
||||
}
|
||||
|
@ -699,7 +696,7 @@ public class ATNDeserializer {
|
|||
//
|
||||
// MODES
|
||||
//
|
||||
let modeToStartState = dict.object(forKey: "modeToStartState") as! [Int]
|
||||
let modeToStartState = dict["modeToStartState"] as! [Int]
|
||||
//let nmodes : Int = toInt(data[p++]);
|
||||
//for var i : Int=0; i<nmodes; i++ {
|
||||
for stateNumber in modeToStartState {
|
||||
|
@ -708,33 +705,30 @@ public class ATNDeserializer {
|
|||
//atn.modeToStartState.append(atn.states[s] as! TokensStartState)
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
//
|
||||
// SETS
|
||||
//
|
||||
var sets: Array<IntervalSet> = Array<IntervalSet>()
|
||||
let nsets: Int = dict.object(forKey: "nsets") as! Int
|
||||
let intervalSet = dict.object(forKey: "IntervalSet") as! [NSDictionary]
|
||||
let nsets: Int = dict["nsets"] as! Int
|
||||
let intervalSet = dict["IntervalSet"] as! [Dictionary<String, Any>]
|
||||
|
||||
for i in 0..<nsets {
|
||||
let setBuilder = intervalSet[i]
|
||||
let nintervals: Int = setBuilder.object(forKey: "size") as! Int
|
||||
let nintervals: Int = setBuilder["size"] as! Int
|
||||
|
||||
let set: IntervalSet = try IntervalSet()
|
||||
sets.append(set)
|
||||
|
||||
let containsEof: Bool = (setBuilder.object(forKey: "containsEof") as! Int) != 0
|
||||
let containsEof: Bool = (setBuilder["containsEof"] as! Int) != 0
|
||||
if containsEof {
|
||||
try set.add(-1)
|
||||
}
|
||||
let intervalsBuilder = setBuilder.object(forKey: "Intervals") as! [NSDictionary]
|
||||
let intervalsBuilder = setBuilder["Intervals"] as! [Dictionary<String, Any>]
|
||||
|
||||
|
||||
for j in 0..<nintervals {
|
||||
let vals = intervalsBuilder[j]
|
||||
try set.add((vals.object(forKey: "a") as! Int), (vals.object(forKey: "b") as! Int))
|
||||
try set.add((vals["a"] as! Int), (vals["b"] as! Int))
|
||||
|
||||
}
|
||||
}
|
||||
|
@ -744,17 +738,17 @@ public class ATNDeserializer {
|
|||
// EDGES
|
||||
//
|
||||
// let nedges : Int = dict.objectForKey("nedges") as! Int
|
||||
let allTransitions = dict.object(forKey: "allTransitionsBuilder") as! [[NSDictionary]]
|
||||
let allTransitions = dict["allTransitionsBuilder"] as! [[Dictionary<String, Any>]]
|
||||
|
||||
for transitionsBuilder in allTransitions {
|
||||
|
||||
for transition in transitionsBuilder {
|
||||
let src: Int = transition.object(forKey: "src") as! Int
|
||||
let trg: Int = transition.object(forKey: "trg") as! Int
|
||||
let ttype: Int = transition.object(forKey: "edgeType") as! Int
|
||||
let arg1: Int = transition.object(forKey: "arg1") as! Int
|
||||
let arg2: Int = transition.object(forKey: "arg2") as! Int
|
||||
let arg3: Int = transition.object(forKey: "arg3") as! Int
|
||||
let src: Int = transition["src"] as! Int
|
||||
let trg: Int = transition["trg"] as! Int
|
||||
let ttype: Int = transition["edgeType"] as! Int
|
||||
let arg1: Int = transition["arg1"] as! Int
|
||||
let arg2: Int = transition["arg2"] as! Int
|
||||
let arg3: Int = transition["arg3"] as! Int
|
||||
let trans: Transition = try edgeFactory(atn, ttype, src, trg, arg1, arg2, arg3, sets)
|
||||
|
||||
let srcState: ATNState = atn.states[src]!
|
||||
|
@ -830,7 +824,7 @@ public class ATNDeserializer {
|
|||
//
|
||||
// DECISIONS
|
||||
//
|
||||
let ndecisions: [Int] = dict.object(forKey: "decisionToState") as! [Int]
|
||||
let ndecisions: [Int] = dict["decisionToState"] as! [Int]
|
||||
let length = ndecisions.count
|
||||
for i in 0..<length {
|
||||
let s: Int = ndecisions[i]
|
||||
|
@ -844,17 +838,17 @@ public class ATNDeserializer {
|
|||
// LEXER ACTIONS
|
||||
//
|
||||
if atn.grammarType == ATNType.lexer {
|
||||
let lexerActionsBuilder = dict.object(forKey: "lexerActions") as! [NSDictionary]
|
||||
let lexerActionsBuilder = dict["lexerActions"] as! [Dictionary<String, Any>]
|
||||
if supportsLexerActions {
|
||||
atn.lexerActions = [LexerAction](repeating: LexerAction(), count: lexerActionsBuilder.count) //[toInt(data[p++])];
|
||||
let length = atn.lexerActions.count
|
||||
for i in 0..<length {
|
||||
let actionTypeValue = lexerActionsBuilder[i].object(forKey: "actionType") as! Int
|
||||
let actionTypeValue = lexerActionsBuilder[i]["actionType"] as! Int
|
||||
let actionType: LexerActionType = LexerActionType(rawValue: actionTypeValue)! //LexerActionType.values()[toInt(data[p++])];
|
||||
let data1: Int = lexerActionsBuilder[i].object(forKey: "a") as! Int
|
||||
let data1: Int = lexerActionsBuilder[i]["a"] as! Int
|
||||
|
||||
|
||||
let data2: Int = lexerActionsBuilder[i].object(forKey: "b") as! Int
|
||||
let data2: Int = lexerActionsBuilder[i]["b"] as! Int
|
||||
|
||||
|
||||
let lexerAction: LexerAction = lexerActionFactory(actionType, data1, data2)
|
||||
|
|
|
@ -23,14 +23,12 @@ public class LL1Analyzer {
|
|||
/// - parameter s: the ATN state
|
||||
/// - returns: the expected symbols for each outgoing transition of {@code s}.
|
||||
public func getDecisionLookahead(_ s: ATNState?) throws -> [IntervalSet?]? {
|
||||
// print("LOOK("+s.stateNumber+")");
|
||||
|
||||
|
||||
guard let s = s else {
|
||||
return nil
|
||||
}
|
||||
let length = s.getNumberOfTransitions()
|
||||
var look: [IntervalSet?] = [IntervalSet?](repeating: nil, count: length)
|
||||
//new IntervalSet[s.getNumberOfTransitions()];
|
||||
for alt in 0..<length {
|
||||
look[alt] = try IntervalSet()
|
||||
var lookBusy: Set<ATNConfig> = Set<ATNConfig>()
|
||||
|
@ -138,10 +136,6 @@ public class LL1Analyzer {
|
|||
lookBusy.insert(c)
|
||||
}
|
||||
|
||||
// if ( !lookBusy.insert (c) ) {
|
||||
// return;
|
||||
// }
|
||||
|
||||
if s == stopState {
|
||||
guard let ctx = ctx else {
|
||||
try look.add(CommonToken.EPSILON)
|
||||
|
@ -175,11 +169,8 @@ public class LL1Analyzer {
|
|||
|
||||
|
||||
var removed: Bool = try calledRuleStack.get(returnState.ruleIndex!)
|
||||
//TODO try
|
||||
//try {
|
||||
try calledRuleStack.clear(returnState.ruleIndex!)
|
||||
try self._LOOK(returnState, stopState, ctx.getParent(i), look, &lookBusy, calledRuleStack, seeThruPreds, addEOF)
|
||||
//}
|
||||
defer {
|
||||
if removed {
|
||||
try! calledRuleStack.set(returnState.ruleIndex!)
|
||||
|
@ -193,18 +184,15 @@ public class LL1Analyzer {
|
|||
var n: Int = s.getNumberOfTransitions()
|
||||
for i in 0..<n {
|
||||
var t: Transition = s.transition(i)
|
||||
if type(of: t) === RuleTransition.self {
|
||||
if type(of: t) == RuleTransition.self {
|
||||
if try calledRuleStack.get((t as! RuleTransition).target.ruleIndex!) {
|
||||
continue
|
||||
}
|
||||
|
||||
var newContext: PredictionContext =
|
||||
SingletonPredictionContext.create(ctx, (t as! RuleTransition).followState.stateNumber)
|
||||
//TODO try
|
||||
//try {
|
||||
try calledRuleStack.set((t as! RuleTransition).target.ruleIndex!)
|
||||
try _LOOK(t.target, stopState, newContext, look, &lookBusy, calledRuleStack, seeThruPreds, addEOF)
|
||||
//}
|
||||
defer {
|
||||
try! calledRuleStack.clear((t as! RuleTransition).target.ruleIndex!)
|
||||
}
|
||||
|
@ -219,7 +207,7 @@ public class LL1Analyzer {
|
|||
if t.isEpsilon() {
|
||||
try _LOOK(t.target, stopState, ctx, look, &lookBusy, calledRuleStack, seeThruPreds, addEOF)
|
||||
} else {
|
||||
if type(of: t) === WildcardTransition.self {
|
||||
if type(of: t) == WildcardTransition.self {
|
||||
try look.addAll(IntervalSet.of(CommonToken.MIN_USER_TOKEN_TYPE, atn.maxTokenType))
|
||||
} else {
|
||||
|
||||
|
|
|
@ -59,7 +59,14 @@ open class LexerATNSimulator: ATNSimulator {
|
|||
public var charPositionInLine: Int = 0
|
||||
|
||||
public final var decisionToDFA: [DFA]
|
||||
|
||||
internal var mode: Int = Lexer.DEFAULT_MODE
|
||||
|
||||
/// mutex for DFAState change
|
||||
private var dfaStateMutex = Mutex()
|
||||
|
||||
/// mutex for changes to all DFAStates map
|
||||
private var dfaStatesMutex = Mutex()
|
||||
|
||||
/// Used during DFA/ATN exec to record the most recent accept configuration info
|
||||
|
||||
|
@ -648,7 +655,7 @@ open class LexerATNSimulator: ATNSimulator {
|
|||
print("EDGE \(p) -> \(q) upon \(t)")
|
||||
}
|
||||
|
||||
synced(p) {
|
||||
dfaStateMutex.synchronized {
|
||||
if p.edges == nil {
|
||||
// make room for tokens 1..n and -1 masquerading as index 0
|
||||
//TODO ARRAY COUNT
|
||||
|
@ -678,20 +685,19 @@ open class LexerATNSimulator: ATNSimulator {
|
|||
}
|
||||
|
||||
let dfa: DFA = decisionToDFA[mode]
|
||||
//synced (dfa.states) {
|
||||
let existing = dfa.states[proposed]
|
||||
if existing != nil {
|
||||
return existing!!
|
||||
|
||||
return dfaStatesMutex.synchronized {
|
||||
if let existing = dfa.states[proposed] {
|
||||
return existing!
|
||||
}
|
||||
|
||||
let newState: DFAState = proposed
|
||||
newState.stateNumber = dfa.states.count
|
||||
configs.setReadonly(true)
|
||||
newState.configs = configs
|
||||
dfa.states[newState] = newState
|
||||
return newState
|
||||
}
|
||||
|
||||
let newState: DFAState = proposed
|
||||
|
||||
newState.stateNumber = dfa.states.count
|
||||
configs.setReadonly(true)
|
||||
newState.configs = configs
|
||||
dfa.states[newState] = newState
|
||||
return newState
|
||||
//}
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -265,13 +265,18 @@ open class ParserATNSimulator: ATNSimulator {
|
|||
internal var _startIndex: Int = 0
|
||||
internal var _outerContext: ParserRuleContext!
|
||||
internal var _dfa: DFA?
|
||||
|
||||
/// mutex for DFAState change
|
||||
private var dfaStateMutex = Mutex()
|
||||
|
||||
/// mutex for changes in a DFAStates map
|
||||
private var dfaStatesMutex = Mutex()
|
||||
|
||||
/// Testing only!
|
||||
// public convenience init(_ atn : ATN, _ decisionToDFA : [DFA],
|
||||
// _ sharedContextCache : PredictionContextCache)
|
||||
// {
|
||||
// self.init(nil, atn, decisionToDFA, sharedContextCache);
|
||||
// }
|
||||
// /// Testing only!
|
||||
// public convenience init(_ atn : ATN, _ decisionToDFA : [DFA],
|
||||
// _ sharedContextCache : PredictionContextCache) {
|
||||
// self.init(nil, atn, decisionToDFA, sharedContextCache);
|
||||
// }
|
||||
|
||||
public init(_ parser: Parser, _ atn: ATN,
|
||||
_ decisionToDFA: [DFA],
|
||||
|
@ -1972,7 +1977,7 @@ open class ParserATNSimulator: ATNSimulator {
|
|||
guard let from = from else {
|
||||
return to
|
||||
}
|
||||
synced(from) {
|
||||
dfaStateMutex.synchronized {
|
||||
[unowned self] in
|
||||
if from.edges == nil {
|
||||
from.edges = [DFAState?](repeating: nil, count: self.atn.maxTokenType + 1 + 1) //new DFAState[atn.maxTokenType+1+1];
|
||||
|
@ -2006,25 +2011,26 @@ open class ParserATNSimulator: ATNSimulator {
|
|||
if D == ATNSimulator.ERROR {
|
||||
return D
|
||||
}
|
||||
//TODO: synced (dfa.states) {
|
||||
//synced (dfa.states) {
|
||||
let existing = dfa.states[D]
|
||||
if existing != nil {
|
||||
return existing!!
|
||||
}
|
||||
|
||||
return try dfaStatesMutex.synchronized {
|
||||
if let existing = dfa.states[D] {
|
||||
return existing!
|
||||
}
|
||||
|
||||
D.stateNumber = dfa.states.count
|
||||
if !D.configs.isReadonly() {
|
||||
try D.configs.optimizeConfigs(self)
|
||||
D.configs.setReadonly(true)
|
||||
}
|
||||
dfa.states[D] = D
|
||||
if debug {
|
||||
print("adding new DFA state: \(D)")
|
||||
}
|
||||
D.stateNumber = dfa.states.count
|
||||
|
||||
if !D.configs.isReadonly() {
|
||||
try D.configs.optimizeConfigs(self)
|
||||
D.configs.setReadonly(true)
|
||||
}
|
||||
|
||||
dfa.states[D] = D
|
||||
if debug {
|
||||
print("adding new DFA state: \(D)")
|
||||
}
|
||||
|
||||
//}
|
||||
return D
|
||||
return D
|
||||
}
|
||||
}
|
||||
|
||||
func reportAttemptingFullContext(_ dfa: DFA, _ conflictingAlts: BitSet?, _ configs: ATNConfigSet, _ startIndex: Int, _ stopIndex: Int) throws {
|
||||
|
|
|
@ -20,6 +20,9 @@ public class DFA: CustomStringConvertible {
|
|||
/// {@code true} if this DFA is for a precedence decision; otherwise,
|
||||
/// {@code false}. This is the backing field for {@link #isPrecedenceDfa}.
|
||||
private final var precedenceDfa: Bool
|
||||
|
||||
/// mutex for DFAState changes.
|
||||
private var dfaStateMutex = Mutex()
|
||||
|
||||
public convenience init(_ atnStartState: DecisionState) {
|
||||
self.init(atnStartState, 0)
|
||||
|
@ -102,13 +105,11 @@ public class DFA: CustomStringConvertible {
|
|||
}
|
||||
// synchronization on s0 here is ok. when the DFA is turned into a
|
||||
// precedence DFA, s0 will be initialized once and not updated again
|
||||
synced(s0) {
|
||||
dfaStateMutex.synchronized {
|
||||
// s0.edges is never null for a precedence DFA
|
||||
if precedence >= edges.count {
|
||||
let increase = [DFAState?](repeating: nil, count: (precedence + 1 - edges.count))
|
||||
s0.edges = edges + increase
|
||||
//Array( self.s0!.edges![0..<precedence + 1])
|
||||
//s0.edges = Arrays.copyOf(s0.edges, precedence + 1);
|
||||
}
|
||||
|
||||
s0.edges[precedence] = startState
|
||||
|
|
|
@ -52,7 +52,7 @@ public class Utils {
|
|||
|
||||
do {
|
||||
fileContents = try String(contentsOfFile: path, encoding: encoding)
|
||||
} catch _ as NSError {
|
||||
} catch {
|
||||
return [Character]()
|
||||
}
|
||||
|
||||
|
@ -68,22 +68,20 @@ public class Utils {
|
|||
var fileContents: String? = nil
|
||||
do {
|
||||
fileContents = try String(contentsOfFile: path!, encoding: encoding)
|
||||
} catch _ as NSError {
|
||||
} catch {
|
||||
return ""
|
||||
}
|
||||
|
||||
|
||||
|
||||
return fileContents ?? ""
|
||||
}
|
||||
|
||||
public static func readFile2StringByPath(_ path: String, _ encoding: String.Encoding = String.Encoding.utf8) -> String {
|
||||
|
||||
//let path = fileName.stringByExpandingTildeInPath
|
||||
var fileContents: String? = nil
|
||||
|
||||
do {
|
||||
fileContents = try NSString(contentsOfFile: path, encoding: String.Encoding.utf8.rawValue) as String //try String(contentsOfFile: path!, encoding: encoding)
|
||||
} catch _ as NSError {
|
||||
fileContents = try String(contentsOfFile: path, encoding: String.Encoding.utf8)
|
||||
} catch {
|
||||
return ""
|
||||
}
|
||||
|
||||
|
|
|
@ -2,36 +2,17 @@
|
|||
/// Use of this file is governed by the BSD 3-clause license that
|
||||
/// can be found in the LICENSE.txt file in the project root.
|
||||
|
||||
//import Cocoa
|
||||
|
||||
#if os(OSX)
|
||||
|
||||
import Cocoa
|
||||
|
||||
#elseif os(iOS)
|
||||
|
||||
import UIKit
|
||||
|
||||
#endif
|
||||
import Foundation
|
||||
|
||||
//http://stackoverflow.com/questions/28182441/swift-how-to-get-substring-from-start-to-last-index-of-character
|
||||
//https://github.com/williamFalcon/Bolt_Swift/blob/master/Bolt/BoltLibrary/String/String.swift
|
||||
|
||||
extension String {
|
||||
|
||||
func trim() -> String {
|
||||
return self.trimmingCharacters(in: CharacterSet.whitespaces)
|
||||
}
|
||||
|
||||
func split(_ separator: String) -> [String] {
|
||||
return self.components(separatedBy: separator)
|
||||
}
|
||||
|
||||
func replaceAll(_ from: String, replacement: String) -> String {
|
||||
|
||||
return self.replacingOccurrences(of: from, with: replacement, options: NSString.CompareOptions.literal, range: nil)
|
||||
}
|
||||
|
||||
func containsIgnoreCase(_ find: String) -> Bool {
|
||||
return self.lowercased().range(of: find.lowercased()) != nil
|
||||
}
|
||||
|
@ -53,7 +34,7 @@ extension String {
|
|||
func indexOf(_ target: String, startIndex: Int) -> Int {
|
||||
|
||||
let startRange = self.characters.index(self.startIndex, offsetBy: startIndex)
|
||||
let range = self.range(of: target, options: NSString.CompareOptions.literal, range: startRange..<self.endIndex)
|
||||
let range = self.range(of: target, options: .literal, range: startRange..<self.endIndex)
|
||||
|
||||
if let range = range {
|
||||
|
||||
|
@ -171,7 +152,6 @@ extension String {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
var result = ""
|
||||
var position = startIndex
|
||||
|
||||
|
@ -204,8 +184,8 @@ extension String {
|
|||
}
|
||||
|
||||
extension String {
|
||||
|
||||
static let htmlEscapedDictionary = [
|
||||
|
||||
"&": "&",
|
||||
""": "\"",
|
||||
"'": "'",
|
||||
|
@ -213,13 +193,14 @@ extension String {
|
|||
"’": "'",
|
||||
"–": "'",
|
||||
">": ">",
|
||||
"<": "<"]
|
||||
"<": "<"
|
||||
]
|
||||
|
||||
public var escapedHtmlString: String {
|
||||
var newString = "\(self)"
|
||||
|
||||
for (key, value) in String.htmlEscapedDictionary {
|
||||
newString = newString.replaceAll(value, replacement: key)
|
||||
newString = newString.replacingOccurrences(of: value, with: key)
|
||||
}
|
||||
return newString
|
||||
}
|
||||
|
|
|
@ -13,7 +13,7 @@
|
|||
import Foundation
|
||||
|
||||
func errPrint(_ msg: String) {
|
||||
fputs(msg + "\n", __stderrp)
|
||||
fputs(msg + "\n", stderr)
|
||||
}
|
||||
|
||||
public func +(lhs: String, rhs: Int) -> String {
|
||||
|
@ -57,12 +57,6 @@ func >>>(lhs: Int, rhs: Int) -> Int {
|
|||
return Int(bitPattern: left >> right)
|
||||
}
|
||||
|
||||
func synced(_ lock: AnyObject, closure: () -> ()) {
|
||||
objc_sync_enter(lock)
|
||||
closure()
|
||||
objc_sync_exit(lock)
|
||||
}
|
||||
|
||||
func intChar2String(_ i: Int) -> String {
|
||||
return String(Character(integerLiteral: i))
|
||||
}
|
||||
|
|
|
@ -0,0 +1,30 @@
|
|||
import Foundation
|
||||
|
||||
|
||||
/// Using class so it can be shared even if
|
||||
/// it appears to be a field in a class.
|
||||
class Mutex {
|
||||
|
||||
/// The mutex instance.
|
||||
private var mutex = pthread_mutex_t()
|
||||
|
||||
/// Initialization
|
||||
init() {
|
||||
pthread_mutex_init(&mutex, nil)
|
||||
}
|
||||
|
||||
/// Running the supplied closure synchronously.
|
||||
///
|
||||
/// - Parameter closure: the closure to run
|
||||
/// - Returns: the value returned by the closure
|
||||
/// - Throws: the exception populated by the closure run
|
||||
@discardableResult
|
||||
func synchronized<R>(closure: () throws -> R) rethrows -> R {
|
||||
pthread_mutex_lock(&mutex)
|
||||
defer {
|
||||
pthread_mutex_unlock(&mutex)
|
||||
}
|
||||
return try closure()
|
||||
}
|
||||
|
||||
}
|
|
@ -482,7 +482,7 @@ public class ParseTreePatternMatcher {
|
|||
let c: Chunk = chunks[i]
|
||||
if c is TextChunk {
|
||||
let tc: TextChunk = c as! TextChunk
|
||||
let unescaped: String = tc.getText().replaceAll(escape, replacement: "")
|
||||
let unescaped = tc.getText().replacingOccurrences(of: escape, with: "")
|
||||
if unescaped.length < tc.getText().length {
|
||||
chunks[i] = TextChunk(unescaped)
|
||||
}
|
||||
|
|
|
@ -6,7 +6,53 @@ import XCTest
|
|||
import Antlr4
|
||||
|
||||
class TokenStreamRewriterTests: XCTestCase {
|
||||
|
||||
|
||||
static let allTests = [
|
||||
("testPreservesOrderOfContiguousInserts", testPreservesOrderOfContiguousInserts),
|
||||
("testDistinguishBetweenInsertAfterAndInsertBeforeToPreserverOrder2", testDistinguishBetweenInsertAfterAndInsertBeforeToPreserverOrder2),
|
||||
("testDistinguishBetweenInsertAfterAndInsertBeforeToPreserverOrder", testDistinguishBetweenInsertAfterAndInsertBeforeToPreserverOrder),
|
||||
("testInsertBeforeTokenThenDeleteThatToken", testInsertBeforeTokenThenDeleteThatToken),
|
||||
("testLeaveAloneDisjointInsert2", testLeaveAloneDisjointInsert2),
|
||||
("testLeaveAloneDisjointInsert", testLeaveAloneDisjointInsert),
|
||||
("testDropPrevCoveredInsert", testDropPrevCoveredInsert),
|
||||
("testDropIdenticalReplace", testDropIdenticalReplace),
|
||||
("testOverlappingReplace4", testOverlappingReplace4),
|
||||
("testOverlappingReplace3", testOverlappingReplace3),
|
||||
("testOverlappingReplace2", testOverlappingReplace2),
|
||||
("testOverlappingReplace", testOverlappingReplace),
|
||||
("testDisjointInserts", testDisjointInserts),
|
||||
("testCombineInsertOnLeftWithDelete", testCombineInsertOnLeftWithDelete),
|
||||
("testCombineInsertOnLeftWithReplace", testCombineInsertOnLeftWithReplace),
|
||||
("testCombine3Inserts", testCombine3Inserts),
|
||||
("testCombineInserts", testCombineInserts),
|
||||
("testReplaceSingleMiddleThenOverlappingSuperset", testReplaceSingleMiddleThenOverlappingSuperset),
|
||||
("testReplaceThenReplaceLowerIndexedSuperset", testReplaceThenReplaceLowerIndexedSuperset),
|
||||
("testReplaceThenReplaceSuperset", testReplaceThenReplaceSuperset),
|
||||
("testReplaceSubsetThenFetch", testReplaceSubsetThenFetch),
|
||||
("testReplaceAll", testReplaceAll),
|
||||
("testReplaceRangeThenInsertAfterRightEdge", testReplaceRangeThenInsertAfterRightEdge),
|
||||
("testReplaceRangeThenInsertAtRightEdge", testReplaceRangeThenInsertAtRightEdge),
|
||||
("testReplaceThenInsertAtLeftEdge", testReplaceThenInsertAtLeftEdge),
|
||||
("testReplaceThenInsertAfterLastIndex", testReplaceThenInsertAfterLastIndex),
|
||||
("testInsertThenReplaceLastIndex", testInsertThenReplaceLastIndex),
|
||||
("testReplaceThenInsertBeforeLastIndex", testReplaceThenInsertBeforeLastIndex),
|
||||
("test2InsertThenReplaceIndex0", test2InsertThenReplaceIndex0),
|
||||
("test2InsertMiddleIndex", test2InsertMiddleIndex),
|
||||
("testInsertThenReplaceSameIndex", testInsertThenReplaceSameIndex),
|
||||
("testInsertInPriorReplace", testInsertInPriorReplace),
|
||||
("testReplaceThenDeleteMiddleIndex", testReplaceThenDeleteMiddleIndex),
|
||||
("test2ReplaceMiddleIndex1InsertBefore", test2ReplaceMiddleIndex1InsertBefore),
|
||||
("test2ReplaceMiddleIndex", test2ReplaceMiddleIndex),
|
||||
("testToStringStartStop2", testToStringStartStop2),
|
||||
("testToStringStartStop", testToStringStartStop),
|
||||
("testReplaceMiddleIndex", testReplaceMiddleIndex),
|
||||
("testReplaceLastIndex", testReplaceLastIndex),
|
||||
("testReplaceIndex0", testReplaceIndex0),
|
||||
("test2InsertBeforeAfterMiddleIndex", test2InsertBeforeAfterMiddleIndex),
|
||||
("testInsertAfterLastIndex", testInsertAfterLastIndex),
|
||||
("testInsertBeforeIndex0", testInsertBeforeIndex0)
|
||||
]
|
||||
|
||||
func testInsertBeforeIndex0() throws {
|
||||
let input = ANTLRInputStream("abc")
|
||||
let lexer = LexerA(input)
|
||||
|
|
|
@ -6,6 +6,10 @@ import XCTest
|
|||
import Antlr4
|
||||
|
||||
class TokenStreamTests: XCTestCase {
|
||||
|
||||
static let allTests = [
|
||||
("testBufferedTokenStreamClearFetchEOFWithNewSource", testBufferedTokenStreamClearFetchEOFWithNewSource)
|
||||
]
|
||||
|
||||
/// Test fetchEOF reset after setTokenSource
|
||||
func testBufferedTokenStreamClearFetchEOFWithNewSource() throws {
|
||||
|
|
|
@ -6,6 +6,14 @@ import XCTest
|
|||
import Antlr4
|
||||
|
||||
class VisitorTests: XCTestCase {
|
||||
static let allTests = [
|
||||
("testCalculatorVisitor", testCalculatorVisitor),
|
||||
("testShouldNotVisitTerminal", testShouldNotVisitTerminal),
|
||||
("testShouldNotVisitEOF", testShouldNotVisitEOF),
|
||||
("testVisitErrorNode", testVisitErrorNode),
|
||||
("testVisitTerminalNode", testVisitTerminalNode)
|
||||
]
|
||||
|
||||
///
|
||||
/// This test verifies the basic behavior of visitors, with an emphasis on
|
||||
/// {@link AbstractParseTreeVisitor#visitTerminal}.
|
||||
|
|
|
@ -0,0 +1,13 @@
|
|||
#if os(Linux)
|
||||
|
||||
import XCTest
|
||||
@testable import Antlr4Tests
|
||||
|
||||
XCTMain([
|
||||
// Antlr4Tests
|
||||
testCase(TokenStreamTests.allTests),
|
||||
testCase(TokenStreamRewriterTests.allTests),
|
||||
testCase(VisitorTests.allTests)
|
||||
])
|
||||
|
||||
#endif
|
|
@ -1024,7 +1024,7 @@ ContextRuleListIndexedGetterDecl(r) ::= <<
|
|||
|
||||
>>
|
||||
|
||||
LexerRuleContext() ::= "antlr4::RuleContext"
|
||||
LexerRuleContext() ::= "RuleContext"
|
||||
|
||||
// The rule context name is the rule followed by a suffix; e.g. r becomes rContext.
|
||||
RuleContextNameSuffix() ::= "Context"
|
||||
|
|
Loading…
Reference in New Issue