Added (part of) a Swift backend.
This comes from https://github.com/janyou/ANTLR-Swift-Target and is marked Copyright (c) 2016 janyou on top of the BSD license and Copyrights for Terence Parr and Sam Harwell derived from the original ANTLR source.
This commit is contained in:
parent
69d9cfe302
commit
c989064e32
|
@ -0,0 +1,428 @@
|
|||
IgnoredTests ::= [
|
||||
default: false
|
||||
]
|
||||
|
||||
Generating target Swift
|
||||
TestFile(file) ::= <<
|
||||
/* This file is generated by TestGenerator, any edits will be overwritten by the next generation. */
|
||||
package org.antlr.v4.test.runtime.swift;
|
||||
|
||||
import org.junit.Test;
|
||||
import org.junit.Ignore;
|
||||
|
||||
<if(file.Options.("ImportErrorQueue"))>
|
||||
import org.antlr.v4.test.runtime.java.ErrorQueue;
|
||||
<endif>
|
||||
<if(file.Options.("ImportGrammar"))>
|
||||
import org.antlr.v4.tool.Grammar;
|
||||
<endif>
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
public class Test<file.name> extends BaseTest {
|
||||
|
||||
<file.tests:{test | <test>}; separator="\n", wrap, anchor>
|
||||
|
||||
}<\n>
|
||||
>>
|
||||
|
||||
LexerTestMethod(test) ::= <<
|
||||
/* This file and method are generated by TestGenerator, any edits will be overwritten by the next generation. */
|
||||
<testAnnotations(test)>
|
||||
public void test<test.name>() throws Exception {
|
||||
mkdir(tmpdir);
|
||||
|
||||
<test.SlaveGrammars:{grammar |
|
||||
String slave_<grammar> =<writeStringLiteral(test.SlaveGrammars.(grammar))>;
|
||||
writeFile(tmpdir, "<grammar>.g4", slave_<grammar>);
|
||||
}; separator="\n">
|
||||
<test.Grammar:{grammar |
|
||||
<buildStringLiteral(test.Grammar.(grammar), "grammar")>
|
||||
|
||||
<if(test.AfterGrammar)>
|
||||
<test.AfterGrammar>
|
||||
<endif>
|
||||
String input =<writeStringLiteral(test.Input)>;
|
||||
String found = execLexer("<grammar>.g4", grammar, "<grammar><if(test.Options.("CombinedGrammar"))>Lexer<endif>", input, <writeBoolean(test.Options.("ShowDFA"))>);
|
||||
assertEquals(<writeStringLiteral(test.Output)>, found);
|
||||
<if(!isEmpty.(test.Errors))>
|
||||
assertEquals(<writeStringLiteral(test.Errors)>, this.stderrDuringParse);
|
||||
<else>
|
||||
assertNull(this.stderrDuringParse);
|
||||
<endif>
|
||||
}>
|
||||
}
|
||||
|
||||
>>
|
||||
|
||||
CompositeLexerTestMethod(test) ::= <<
|
||||
<LexerTestMethod(test)>
|
||||
>>
|
||||
|
||||
ParserTestMethod(test) ::= <<
|
||||
/* This file and method are generated by TestGenerator, any edits will be overwritten by the next generation. */
|
||||
<testAnnotations(test)>
|
||||
public void test<test.name>() throws Exception {
|
||||
mkdir(tmpdir);
|
||||
|
||||
<test.SlaveGrammars:{grammar |
|
||||
String slave_<grammar> =<writeStringLiteral(test.SlaveGrammars.(grammar))>;
|
||||
<if(test.Options.("SlaveIsLexer"))>
|
||||
rawGenerateAndBuildRecognizer("<grammar>.g4", slave_<grammar>, null, "<grammar>");
|
||||
<else>
|
||||
writeFile(tmpdir, "<grammar>.g4", slave_<grammar>);
|
||||
<endif>
|
||||
}; separator="\n">
|
||||
<test.Grammar:{grammar |
|
||||
<buildStringLiteral(test.Grammar.(grammar), "grammar")>
|
||||
|
||||
<test.AfterGrammar>
|
||||
|
||||
String input =<writeStringLiteral(test.Input)>;
|
||||
String found = execParser("<grammar>.g4", grammar, "<grammar>Parser", "<grammar>Lexer", "<test.Rule>", input, <writeBoolean(test.Options.("Debug"))>);
|
||||
assertEquals(<writeStringLiteral(test.Output)>, found);
|
||||
<if(!isEmpty.(test.Errors))>
|
||||
assertEquals(<writeStringLiteral(test.Errors)>, this.stderrDuringParse);
|
||||
<else>
|
||||
assertNull(this.stderrDuringParse);
|
||||
<endif>
|
||||
}>
|
||||
}
|
||||
|
||||
>>
|
||||
|
||||
CompositeParserTestMethod(test) ::= <<
|
||||
<ParserTestMethod(test)>
|
||||
>>
|
||||
|
||||
AbstractParserTestMethod(test) ::= <<
|
||||
String test<test.name>(String input) throws Exception {
|
||||
String grammar = <test.grammar.lines:{ line | "<line>};separator="\\n\" +\n", wrap, anchor>";
|
||||
return execParser("<test.grammar.grammarName>.g4", grammar, "<test.grammar.grammarName>Parser", "<test.grammar.grammarName>Lexer", "<test.startRule>", input, <test.debug>);
|
||||
}
|
||||
|
||||
>>
|
||||
|
||||
ConcreteParserTestMethod(test) ::= <<
|
||||
<testAnnotations(test)>
|
||||
public void test<test.name>() throws Exception {
|
||||
String found = test<test.baseName>("<test.input>");
|
||||
assertEquals("<test.expectedOutput>", found);
|
||||
<if(test.expectedErrors)>
|
||||
assertEquals("<test.expectedErrors>", this.stderrDuringParse);
|
||||
<else>
|
||||
assertNull(this.stderrDuringParse);
|
||||
<endif>
|
||||
}
|
||||
|
||||
>>
|
||||
|
||||
testAnnotations(test) ::= <%
|
||||
@Test
|
||||
<if(test.Options.("Timeout"))>
|
||||
(timeout = <test.Options.("Timeout")>)
|
||||
<endif>
|
||||
<if(test.Options.("Ignore"))>
|
||||
<\n>@Ignore(<writeStringLiteral(test.Options.("Ignore"))>)
|
||||
<elseif(IgnoredTests.(({<file.name>.<test.name>})))>
|
||||
<\n>@Ignore(<writeStringLiteral(IgnoredTests.(({<file.name>.<test.name>})))>)
|
||||
<endif>
|
||||
%>
|
||||
|
||||
buildStringLiteral(text, variable) ::= <<
|
||||
StringBuilder <variable>Builder = new StringBuilder(<strlen.(text)>);
|
||||
<lines.(text):{line|<variable>Builder.append("<escape.(line)>");}; separator="\n">
|
||||
String <variable> = <variable>Builder.toString();
|
||||
>>
|
||||
|
||||
writeStringLiteral(text) ::= <%
|
||||
<if(isEmpty.(text))>
|
||||
""
|
||||
<else>
|
||||
<writeLines(lines.(text))>
|
||||
<endif>
|
||||
%>
|
||||
|
||||
writeLines(textLines) ::= <%
|
||||
<if(rest(textLines))>
|
||||
<textLines:{line|
|
||||
<\n> "<escape.(line)>}; separator="\" +">"
|
||||
<else>
|
||||
"<escape.(first(textLines))>"
|
||||
<endif>
|
||||
%>
|
||||
|
||||
string(text) ::= <<
|
||||
"<escape.(text)>"
|
||||
>>
|
||||
|
||||
writeBoolean(o) ::= "<if(o && !isEmpty.(o))>true<else>false<endif>"
|
||||
|
||||
writeln(s) ::= <<print(<s>)>>
|
||||
|
||||
write(s) ::= <<print(<s; format="replaceSingleQuotation">, terminator: "")>>
|
||||
|
||||
False() ::= "false"
|
||||
|
||||
True() ::= "true"
|
||||
|
||||
Not(v) ::= "!<v>"
|
||||
|
||||
Assert(s) ::= ""
|
||||
|
||||
Cast(t,v) ::= "((<v> as! <t>))"
|
||||
|
||||
Append(a,b) ::= "<a> + <b>"
|
||||
|
||||
Concat(a,b) ::= "<a><b>"
|
||||
|
||||
DeclareLocal(s,v) ::= "var <s> = <v>"
|
||||
|
||||
AssertIsList(v) ::= "var __ttt__ = <v>;" // just use static type system
|
||||
|
||||
AssignLocal(s,v) ::= "<s> = <v>"
|
||||
|
||||
InitIntMember(n,v) ::= <%var <n> = <v>%>
|
||||
|
||||
InitBooleanMember(n,v) ::= <%var <n> = <v>%>
|
||||
|
||||
GetMember(n) ::= <%self.<n>%>
|
||||
|
||||
SetMember(n,v) ::= <%self.<n> = <v>%>
|
||||
|
||||
AddMember(n,v) ::= <%self.<n> += <v>%>
|
||||
|
||||
PlusMember(v,n) ::= <%<v> + self.<n>%>
|
||||
|
||||
MemberEquals(n,v) ::= <%self.<n> == <v>%>
|
||||
|
||||
ModMemberEquals(n,m,v) ::= <%self.<n> % <m> == <v>%>
|
||||
|
||||
ModMemberNotEquals(n,m,v) ::= <%self.<n> % <m> != <v>%>
|
||||
|
||||
DumpDFA() ::= "self.dumpDFA()"
|
||||
|
||||
Pass() ::= ""
|
||||
|
||||
StringList() ::= "Array\<String>"
|
||||
|
||||
BuildParseTrees() ::= "setBuildParseTree(true)"
|
||||
|
||||
BailErrorStrategy() ::= <%setErrorHandler(BailErrorStrategy())%>
|
||||
|
||||
ToStringTree(s) ::= <%<s>.toStringTree(self)%>
|
||||
|
||||
Column() ::= "self.getCharPositionInLine()"
|
||||
|
||||
Text() ::= "self.getText()"
|
||||
|
||||
ValEquals(a,b) ::= <%<a>==<b>%>
|
||||
|
||||
TextEquals(a) ::= <%self.getText() == "<a>"%>
|
||||
|
||||
PlusText(a) ::= <%"<a>" + self.getText()%>
|
||||
|
||||
InputText() ::= "try self._input.getText()"
|
||||
|
||||
LTEquals(i, v) ::= <%try self._input.LT(<i>)?.getText() == <v>%>
|
||||
|
||||
LANotEquals(i, v) ::= <%try self._input.LA(<i>) != <v>%>
|
||||
|
||||
TokenStartColumnEquals(i) ::= <%self._tokenStartCharPositionInLine == <i>%>
|
||||
|
||||
ImportListener(X) ::= ""
|
||||
|
||||
OptionalValue(value) ::= "<value>!"
|
||||
|
||||
|
||||
GetExpectedTokenNames() ::= "try self.getExpectedTokens().toString(self.tokenNames)"
|
||||
|
||||
RuleInvocationStack() ::= "getRuleInvocationStack().description.replacingOccurrences(of: \"\\\"\", with: \"\")"
|
||||
|
||||
LL_EXACT_AMBIG_DETECTION() ::= <<_interp.setPredictionMode(PredictionMode.LL_EXACT_AMBIG_DETECTION);>>
|
||||
|
||||
ParserPropertyMember() ::= <<
|
||||
@members {
|
||||
func Property() -> Bool {
|
||||
return true
|
||||
}
|
||||
}
|
||||
>>
|
||||
|
||||
PositionAdjustingLexer() ::= <<
|
||||
|
||||
override
|
||||
open func nextToken() throws -> Token {
|
||||
if (!(_interp is PositionAdjustingLexerATNSimulator)) {
|
||||
_interp = PositionAdjustingLexerATNSimulator(self, PositionAdjustingLexer._ATN, PositionAdjustingLexer._decisionToDFA, PositionAdjustingLexer._sharedContextCache)
|
||||
}
|
||||
|
||||
return try super.nextToken()
|
||||
}
|
||||
|
||||
override
|
||||
open func emit() -> Token {
|
||||
switch (_type) {
|
||||
case PositionAdjustingLexer.TOKENS:
|
||||
handleAcceptPositionForKeyword("tokens")
|
||||
case PositionAdjustingLexer.LABEL:
|
||||
handleAcceptPositionForIdentifier()
|
||||
default:
|
||||
break
|
||||
}
|
||||
return super.emit()
|
||||
}
|
||||
|
||||
private func handleAcceptPositionForIdentifier() -> Bool {
|
||||
let tokenText = getText()
|
||||
var identifierLength = 0
|
||||
while ((identifierLength \< tokenText.length) && isIdentifierChar(tokenText[tokenText.characters.index(tokenText.startIndex, offsetBy: identifierLength)])) {
|
||||
identifierLength += 1
|
||||
}
|
||||
|
||||
if (getInputStream()!.index() > _tokenStartCharIndex + identifierLength) {
|
||||
let offset = identifierLength - 1
|
||||
(getInterpreter() as! PositionAdjustingLexerATNSimulator).resetAcceptPosition(getInputStream()!, _tokenStartCharIndex + offset, _tokenStartLine, _tokenStartCharPositionInLine + offset)
|
||||
return true
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
private func handleAcceptPositionForKeyword(_ keyword:String) -> Bool {
|
||||
if getInputStream()!.index() > _tokenStartCharIndex + keyword.length {
|
||||
let offset = keyword.length - 1
|
||||
(getInterpreter() as! PositionAdjustingLexerATNSimulator).resetAcceptPosition(getInputStream()!, _tokenStartCharIndex + offset, _tokenStartLine, _tokenStartCharPositionInLine + offset)
|
||||
return true
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
|
||||
//public func getInterpreter() -> PositionAdjustingLexerATNSimulator {
|
||||
// return super.getInterpreter() as! PositionAdjustingLexerATNSimulator
|
||||
//}
|
||||
|
||||
private func isIdentifierChar(_ c: Character) -> Bool {
|
||||
return (c >= "0" && c \<= "9") || (c >= "a" && c \<= "z") || c >= "A" && c \<= "Z" || c == "_"
|
||||
}
|
||||
|
||||
class PositionAdjustingLexerATNSimulator: LexerATNSimulator {
|
||||
|
||||
init(_ recog: Lexer,_ atn: ATN,
|
||||
_ decisionToDFA: [DFA],
|
||||
_ sharedContextCache:PredictionContextCache)
|
||||
{
|
||||
super.init(recog, atn, decisionToDFA, sharedContextCache)
|
||||
}
|
||||
|
||||
func resetAcceptPosition(_ input: CharStream,_ index: Int,_ line: Int,_ charPositionInLine: Int) {
|
||||
try! input.seek(index)
|
||||
self.line = line
|
||||
self.charPositionInLine = charPositionInLine
|
||||
try! consume(input)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
>>
|
||||
|
||||
BasicListener(X) ::= <<
|
||||
open class LeafListener: TBaseListener {
|
||||
override
|
||||
open func visitTerminal(_ node: TerminalNode) {
|
||||
print(node.getSymbol()?.getText() ?? "")
|
||||
}
|
||||
}
|
||||
>>
|
||||
|
||||
WalkListener(s) ::= <<
|
||||
let walker = ParseTreeWalker()
|
||||
try! walker.walk(LeafListener(), <s>)
|
||||
>>
|
||||
|
||||
TokenGetterListener(X) ::= <<
|
||||
open class LeafListener: TBaseListener {
|
||||
override
|
||||
open func exitA(_ ctx: TParser.AContext) {
|
||||
if (ctx.getChildCount() == 2) {
|
||||
print("\(ctx.INT(0)?.getSymbol()?.getText() ?? "") \(ctx.INT(1)?.getSymbol()?.getText() ?? "") \(ctx.INT())")
|
||||
}
|
||||
else {
|
||||
print(ctx.ID()?.getSymbol() ?? "")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
>>
|
||||
|
||||
RuleGetterListener(X) ::= <<
|
||||
open class LeafListener: TBaseListener {
|
||||
override
|
||||
open func exitA(_ ctx: TParser.AContext) {
|
||||
if (ctx.getChildCount() == 2) {
|
||||
print("\(ctx.b(0)?.start?.getText() ?? "") \(ctx.b(1)?.start?.getText() ?? "") \(ctx.b()[0].start?.getText() ?? "")")
|
||||
} else {
|
||||
print(ctx.b(0)?.start?.getText() ?? "")
|
||||
}
|
||||
}
|
||||
}
|
||||
>>
|
||||
|
||||
|
||||
LRListener(X) ::= <<
|
||||
open class LeafListener: TBaseListener {
|
||||
override
|
||||
open func exitE(_ ctx: TParser.EContext) {
|
||||
if (ctx.getChildCount() == 3) {
|
||||
print("\(ctx.e(0)?.start?.getText() ?? "") \(ctx.e(1)?.start?.getText() ?? "") \(ctx.e()[0].start?.getText() ?? "")")
|
||||
} else {
|
||||
print(ctx.INT()?.getSymbol()?.getText() ?? "")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
>>
|
||||
|
||||
LRWithLabelsListener(X) ::= <<
|
||||
open class LeafListener: TBaseListener {
|
||||
override
|
||||
open func exitCall(_ ctx: TParser.CallContext) {
|
||||
print("\(ctx.e()?.start?.getText() ?? "") \(ctx.eList()!)")
|
||||
}
|
||||
override
|
||||
open func exitInt(_ ctx: TParser.IntContext) {
|
||||
print(ctx.INT()?.getSymbol()?.getText() ?? "")
|
||||
}
|
||||
}
|
||||
>>
|
||||
|
||||
DeclareContextListGettersFunction() ::= <<
|
||||
func foo() {
|
||||
//let s: SContext? = nil
|
||||
//let a: [AContext]? = s?.a()
|
||||
//let b: [BContext]? = s?.b()
|
||||
}
|
||||
>>
|
||||
|
||||
Declare_foo() ::= <<
|
||||
public func foo() {print("foo")}
|
||||
>>
|
||||
|
||||
Invoke_foo() ::= "foo()"
|
||||
|
||||
Declare_pred() ::= <<
|
||||
func pred(_ v: Bool) -> Bool {
|
||||
print("eval=\(v)")
|
||||
return v
|
||||
}
|
||||
>>
|
||||
|
||||
Invoke_pred(v) ::= <<self.pred(<v>)>>
|
||||
|
||||
|
||||
isEmpty ::= [
|
||||
"": true,
|
||||
default: false
|
||||
]
|
|
@ -0,0 +1,890 @@
|
|||
/*
|
||||
* [The "BSD license"]
|
||||
* Copyright (c) 2012 Terence Parr
|
||||
* Copyright (c) 2012 Sam Harwell
|
||||
* Copyright (c) 2016 Janyou
|
||||
* All rights reserved.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions
|
||||
* are met:
|
||||
*
|
||||
* 1. Redistributions of source code must retain the above copyright
|
||||
* notice, this list of conditions and the following disclaimer.
|
||||
* 2. Redistributions in binary form must reproduce the above copyright
|
||||
* notice, this list of conditions and the following disclaimer in the
|
||||
* documentation and/or other materials provided with the distribution.
|
||||
* 3. The name of the author may not be used to endorse or promote products
|
||||
* derived from this software without specific prior written permission.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
|
||||
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
|
||||
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
|
||||
* IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
|
||||
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
|
||||
* THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
package org.antlr.v4.test.runtime.swift;
|
||||
|
||||
import org.antlr.v4.Tool;
|
||||
import org.antlr.v4.runtime.CommonTokenStream;
|
||||
import org.antlr.v4.runtime.Token;
|
||||
import org.antlr.v4.runtime.TokenSource;
|
||||
import org.antlr.v4.runtime.WritableToken;
|
||||
import org.antlr.v4.runtime.misc.Utils;
|
||||
import org.antlr.v4.test.runtime.java.ErrorQueue;
|
||||
import org.antlr.v4.tool.ANTLRMessage;
|
||||
import org.antlr.v4.tool.DefaultToolListener;
|
||||
import org.antlr.v4.tool.GrammarSemanticsMessage;
|
||||
import org.junit.Before;
|
||||
import org.junit.rules.TestRule;
|
||||
import org.junit.rules.TestWatcher;
|
||||
import org.junit.runner.Description;
|
||||
import org.stringtemplate.v4.ST;
|
||||
|
||||
import java.io.*;
|
||||
import java.net.URL;
|
||||
import java.util.*;
|
||||
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
public abstract class BaseTest {
|
||||
public static final String newline = System.getProperty("line.separator");
|
||||
public static final String pathSep = System.getProperty("path.separator");
|
||||
|
||||
/**
|
||||
* When the {@code antlr.preserve-test-dir} runtime property is set to
|
||||
* {@code true}, the temporary directories created by the test run will not
|
||||
* be removed at the end of the test run, even for tests that completed
|
||||
* successfully.
|
||||
*
|
||||
* <p>
|
||||
* The default behavior (used in all other cases) is removing the temporary
|
||||
* directories for all tests which completed successfully, and preserving
|
||||
* the directories for tests which failed.</p>
|
||||
*/
|
||||
public static final boolean PRESERVE_TEST_DIR = Boolean.parseBoolean(System.getProperty("antlr-preserve-swift-test-dir"));
|
||||
|
||||
/**
|
||||
* The base test directory is the directory where generated files get placed
|
||||
* during unit test execution.
|
||||
*
|
||||
* <p>
|
||||
* The default value for this property is the {@code java.io.tmpdir} system
|
||||
* property, and can be overridden by setting the
|
||||
* {@code antlr.java-test-dir} property to a custom location. Note that the
|
||||
* {@code antlr.java-test-dir} property directly affects the
|
||||
* {@link #CREATE_PER_TEST_DIRECTORIES} value as well.</p>
|
||||
*/
|
||||
public static final String BASE_TEST_DIR;
|
||||
|
||||
/**
|
||||
* When {@code true}, a temporary directory will be created for each test
|
||||
* executed during the test run.
|
||||
*
|
||||
* <p>
|
||||
* This value is {@code true} when the {@code antlr.java-test-dir} system
|
||||
* property is set, and otherwise {@code false}.</p>
|
||||
*/
|
||||
public static final boolean CREATE_PER_TEST_DIRECTORIES;
|
||||
|
||||
public static final String EXEC_NAME = "Test";
|
||||
|
||||
public static String ANTLR_FRAMEWORK_DIR;
|
||||
|
||||
static {
|
||||
String baseTestDir = System.getProperty("antlr-swift-test-dir");
|
||||
boolean perTestDirectories = false;
|
||||
if (baseTestDir == null || baseTestDir.isEmpty()) {
|
||||
baseTestDir = System.getProperty("java.io.tmpdir");
|
||||
perTestDirectories = true;
|
||||
}
|
||||
|
||||
if (!new File(baseTestDir).isDirectory()) {
|
||||
throw new UnsupportedOperationException("The specified base test directory does not exist: " + baseTestDir);
|
||||
}
|
||||
|
||||
BASE_TEST_DIR = baseTestDir;
|
||||
CREATE_PER_TEST_DIRECTORIES = perTestDirectories;
|
||||
|
||||
//add antlr.swift
|
||||
final ClassLoader loader = Thread.currentThread().getContextClassLoader();
|
||||
//TODO
|
||||
final URL swiftRuntime = loader.getResource("Swift/Antlr4");
|
||||
if ( swiftRuntime==null ) {
|
||||
throw new RuntimeException("Swift runtime file not found at:" + swiftRuntime.getPath());
|
||||
}
|
||||
String swiftRuntimePath = swiftRuntime.getPath();
|
||||
// String swiftRuntimePath = "/Users/janyou/OSXWorks/AntlrSwift/Antlr4/Antlr4";
|
||||
|
||||
//get Antlr4 framework
|
||||
|
||||
try {
|
||||
String commandLine = "find " + swiftRuntimePath + "/ -iname *.swift -not -name merge.swift -exec cat {} ;" ;
|
||||
ProcessBuilder builder = new ProcessBuilder(commandLine.split(" "));
|
||||
builder.redirectError(ProcessBuilder.Redirect.INHERIT);
|
||||
Process p = builder.start();
|
||||
StreamVacuum stdoutVacuum = new StreamVacuum(p.getInputStream());
|
||||
stdoutVacuum.start();
|
||||
p.waitFor();
|
||||
stdoutVacuum.join();
|
||||
|
||||
String antlrSwift = stdoutVacuum.toString();
|
||||
//write to Antlr4
|
||||
ANTLR_FRAMEWORK_DIR = new File(BASE_TEST_DIR, "Antlr4").getAbsolutePath();
|
||||
mkdir(ANTLR_FRAMEWORK_DIR);
|
||||
writeFile(ANTLR_FRAMEWORK_DIR,"Antlr4.swift",antlrSwift);
|
||||
//compile Antlr4 module
|
||||
buildAntlr4Framework();
|
||||
String argsString;
|
||||
} catch (IOException e) {
|
||||
System.out.println(e.getMessage());
|
||||
} catch (InterruptedException e) {
|
||||
e.printStackTrace();
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
Runtime.getRuntime().addShutdownHook(new Thread() {
|
||||
public void run() {
|
||||
// shutdown logic
|
||||
eraseAntlrFrameWorkDir();
|
||||
}
|
||||
});
|
||||
|
||||
}
|
||||
private static boolean buildAntlr4Framework() throws Exception {
|
||||
String argsString = "xcrun -sdk macosx swiftc -emit-library -emit-module Antlr4.swift -module-name Antlr4 -module-link-name Antlr4 -Xlinker -install_name -Xlinker " + ANTLR_FRAMEWORK_DIR + "/libAntlr4.dylib ";
|
||||
return runProcess(argsString,ANTLR_FRAMEWORK_DIR);
|
||||
}
|
||||
|
||||
public String tmpdir = null;
|
||||
|
||||
/** If error during parser execution, store stderr here; can't return
|
||||
* stdout and stderr. This doesn't trap errors from running antlr.
|
||||
*/
|
||||
protected String stderrDuringParse;
|
||||
|
||||
@org.junit.Rule
|
||||
public final TestRule testWatcher = new TestWatcher() {
|
||||
|
||||
@Override
|
||||
protected void succeeded(Description description) {
|
||||
// remove tmpdir if no error.
|
||||
if (!PRESERVE_TEST_DIR) {
|
||||
eraseTempDir();
|
||||
}
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
@Before
|
||||
public void setUp() throws Exception {
|
||||
if (CREATE_PER_TEST_DIRECTORIES) {
|
||||
// new output dir for each test
|
||||
String testDirectory = getClass().getSimpleName() + "-" + System.currentTimeMillis();
|
||||
tmpdir = new File(BASE_TEST_DIR, testDirectory).getAbsolutePath();
|
||||
}
|
||||
else {
|
||||
tmpdir = new File(BASE_TEST_DIR).getAbsolutePath();
|
||||
if (!PRESERVE_TEST_DIR && new File(tmpdir).exists()) {
|
||||
eraseFiles(tmpdir);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// private void copyAntlrFramework(){
|
||||
// writeFile(tmpdir,"Antlr4.swift",ANTLR_FRAMEWORK);
|
||||
// }
|
||||
|
||||
protected Tool newTool(String[] args) {
|
||||
Tool tool = new Tool(args);
|
||||
return tool;
|
||||
}
|
||||
|
||||
protected Tool newTool() {
|
||||
Tool tool = new Tool(new String[] {"-o", tmpdir});
|
||||
return tool;
|
||||
}
|
||||
|
||||
protected String load(String fileName, String encoding)
|
||||
throws IOException
|
||||
{
|
||||
if ( fileName==null ) {
|
||||
return null;
|
||||
}
|
||||
|
||||
String fullFileName = getClass().getPackage().getName().replace('.', '/') + '/' + fileName;
|
||||
int size = 65000;
|
||||
InputStreamReader isr;
|
||||
InputStream fis = getClass().getClassLoader().getResourceAsStream(fullFileName);
|
||||
if ( encoding!=null ) {
|
||||
isr = new InputStreamReader(fis, encoding);
|
||||
}
|
||||
else {
|
||||
isr = new InputStreamReader(fis);
|
||||
}
|
||||
try {
|
||||
char[] data = new char[size];
|
||||
int n = isr.read(data);
|
||||
return new String(data, 0, n);
|
||||
}
|
||||
finally {
|
||||
isr.close();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
protected ErrorQueue antlr(String grammarFileName, boolean defaultListener, String... extraOptions) {
|
||||
final List<String> options = new ArrayList<String>();
|
||||
Collections.addAll(options, extraOptions);
|
||||
options.add("-Dlanguage=Swift");
|
||||
if ( !options.contains("-o") ) {
|
||||
options.add("-o");
|
||||
options.add(tmpdir);
|
||||
}
|
||||
if ( !options.contains("-lib") ) {
|
||||
options.add("-lib");
|
||||
options.add(tmpdir);
|
||||
}
|
||||
if ( !options.contains("-encoding") ) {
|
||||
options.add("-encoding");
|
||||
options.add("UTF-8");
|
||||
}
|
||||
options.add(new File(tmpdir,grammarFileName).toString());
|
||||
|
||||
final String[] optionsA = new String[options.size()];
|
||||
options.toArray(optionsA);
|
||||
Tool antlr = newTool(optionsA);
|
||||
ErrorQueue equeue = new ErrorQueue(antlr);
|
||||
antlr.addListener(equeue);
|
||||
if (defaultListener) {
|
||||
antlr.addListener(new DefaultToolListener(antlr));
|
||||
}
|
||||
antlr.processGrammarsOnCommandLine();
|
||||
|
||||
if ( !defaultListener && !equeue.errors.isEmpty() ) {
|
||||
System.err.println("antlr reports errors from "+options);
|
||||
for (int i = 0; i < equeue.errors.size(); i++) {
|
||||
ANTLRMessage msg = equeue.errors.get(i);
|
||||
System.err.println(msg);
|
||||
}
|
||||
System.out.println("!!!\ngrammar:");
|
||||
try {
|
||||
System.out.println(new String(Utils.readFile(tmpdir+"/"+grammarFileName)));
|
||||
}
|
||||
catch (IOException ioe) {
|
||||
System.err.println(ioe.toString());
|
||||
}
|
||||
System.out.println("###");
|
||||
}
|
||||
if ( !defaultListener && !equeue.warnings.isEmpty() ) {
|
||||
System.err.println("antlr reports warnings from "+options);
|
||||
for (int i = 0; i < equeue.warnings.size(); i++) {
|
||||
ANTLRMessage msg = equeue.warnings.get(i);
|
||||
System.err.println(msg);
|
||||
}
|
||||
}
|
||||
|
||||
return equeue;
|
||||
}
|
||||
|
||||
protected ErrorQueue antlr(String grammarFileName, String grammarStr, boolean defaultListener, String... extraOptions) {
|
||||
System.out.println("dir "+tmpdir);
|
||||
mkdir(tmpdir);
|
||||
writeFile(tmpdir, grammarFileName, grammarStr);
|
||||
return antlr(grammarFileName, defaultListener, extraOptions);
|
||||
}
|
||||
|
||||
protected String execLexer(String grammarFileName,
|
||||
String grammarStr,
|
||||
String lexerName,
|
||||
String input)
|
||||
{
|
||||
return execLexer(grammarFileName, grammarStr, lexerName, input, false);
|
||||
}
|
||||
|
||||
protected String execLexer(String grammarFileName,
|
||||
String grammarStr,
|
||||
String lexerName,
|
||||
String input,
|
||||
boolean showDFA)
|
||||
{
|
||||
boolean success = rawGenerateRecognizer(grammarFileName,
|
||||
grammarStr,
|
||||
null,
|
||||
lexerName);
|
||||
assertTrue(success);
|
||||
writeFile(tmpdir, "input", input);
|
||||
writeLexerTestFile(lexerName, showDFA);
|
||||
addSourceFiles("main.swift");
|
||||
// addSourceFiles("Antlr4.swift");
|
||||
|
||||
compile();
|
||||
String output = execTest();
|
||||
if ( stderrDuringParse!=null && stderrDuringParse.length()>0 ) {
|
||||
System.err.println(stderrDuringParse);
|
||||
}
|
||||
return output;
|
||||
}
|
||||
|
||||
Set<String> sourceFiles = new HashSet<String>();
|
||||
|
||||
private void addSourceFiles(String ... files) {
|
||||
for(String file : files)
|
||||
this.sourceFiles.add(file);
|
||||
}
|
||||
protected String execParser(String grammarFileName,
|
||||
String grammarStr,
|
||||
String parserName,
|
||||
String lexerName,
|
||||
String startRuleName,
|
||||
String input, boolean debug)
|
||||
{
|
||||
return execParser(grammarFileName, grammarStr, parserName,
|
||||
lexerName, startRuleName, input, debug, false);
|
||||
}
|
||||
protected String execParser(String grammarFileName,
|
||||
String grammarStr,
|
||||
String parserName,
|
||||
String lexerName,
|
||||
String startRuleName,
|
||||
String input, boolean debug,boolean profile)
|
||||
{
|
||||
boolean success = rawGenerateRecognizer(grammarFileName,
|
||||
grammarStr,
|
||||
parserName,
|
||||
lexerName,
|
||||
"-visitor");
|
||||
assertTrue(success);
|
||||
writeFile(tmpdir, "input", input);
|
||||
return rawExecRecognizer(parserName,
|
||||
lexerName,
|
||||
startRuleName,
|
||||
debug,profile);
|
||||
}
|
||||
|
||||
/** Return true if all is well */
|
||||
protected boolean rawGenerateRecognizer(String grammarFileName,
|
||||
String grammarStr,
|
||||
String parserName,
|
||||
String lexerName,
|
||||
String... extraOptions)
|
||||
{
|
||||
return rawGenerateRecognizer(grammarFileName, grammarStr, parserName, lexerName, false, extraOptions);
|
||||
}
|
||||
|
||||
/** Return true if all is well */
|
||||
protected boolean rawGenerateRecognizer(String grammarFileName,
|
||||
String grammarStr,
|
||||
String parserName,
|
||||
String lexerName,
|
||||
boolean defaultListener,
|
||||
String... extraOptions)
|
||||
{
|
||||
ErrorQueue equeue = antlr(grammarFileName, grammarStr, defaultListener, extraOptions);
|
||||
if (!equeue.errors.isEmpty()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
List<String> files = new ArrayList<String>();
|
||||
if ( lexerName!=null ) {
|
||||
files.add(lexerName+".swift");
|
||||
files.add(lexerName+"ATN.swift");
|
||||
}
|
||||
if ( parserName!=null ) {
|
||||
files.add(parserName+".swift");
|
||||
files.add(parserName+"ATN.swift");
|
||||
Set<String> optionsSet = new HashSet<String>(Arrays.asList(extraOptions));
|
||||
String grammarName = grammarFileName.substring(0, grammarFileName.lastIndexOf('.'));
|
||||
if (!optionsSet.contains("-no-listener")) {
|
||||
files.add(grammarName+"Listener.swift");
|
||||
files.add(grammarName+"BaseListener.swift");
|
||||
}
|
||||
if (optionsSet.contains("-visitor")) {
|
||||
files.add(grammarName+"Visitor.swift");
|
||||
files.add(grammarName+"BaseVisitor.swift");
|
||||
}
|
||||
}
|
||||
addSourceFiles(files.toArray(new String[files.size()]));
|
||||
return true;
|
||||
}
|
||||
|
||||
protected String rawExecRecognizer(String parserName,
|
||||
String lexerName,
|
||||
String parserStartRuleName,
|
||||
boolean debug,
|
||||
boolean profile)
|
||||
{
|
||||
this.stderrDuringParse = null;
|
||||
if ( parserName==null ) {
|
||||
writeLexerTestFile(lexerName, false);
|
||||
}
|
||||
else {
|
||||
writeParserTestFile(parserName,
|
||||
lexerName,
|
||||
parserStartRuleName,
|
||||
debug,
|
||||
profile);
|
||||
}
|
||||
|
||||
addSourceFiles("main.swift");
|
||||
// addSourceFiles("Antlr4.swift");
|
||||
return execRecognizer();
|
||||
}
|
||||
|
||||
public String execRecognizer() {
|
||||
compile();
|
||||
return execTest();
|
||||
}
|
||||
|
||||
public boolean compile() {
|
||||
try {
|
||||
|
||||
if(!buildProject())
|
||||
return false;
|
||||
return true;
|
||||
} catch(Exception e) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
private boolean buildProject() throws Exception {
|
||||
String fileList = sourceFiles.toString().replace("[", "").replace("]", "")
|
||||
.replace(", ", " ");
|
||||
|
||||
String argsString = "xcrun -sdk macosx swiftc " + fileList + " -o " + EXEC_NAME + " -I "+ ANTLR_FRAMEWORK_DIR + " -L "+ ANTLR_FRAMEWORK_DIR + " -module-link-name Antlr4" ;
|
||||
return runProcess(argsString,tmpdir);
|
||||
}
|
||||
|
||||
private static boolean runProcess(String argsString, String execPath) throws IOException, InterruptedException {
|
||||
String[] args = argsString.split(" ");
|
||||
System.err.println("Starting build "+ argsString);//Utils.join(args, " "))
|
||||
Process process = Runtime.getRuntime().exec(args, null, new File(execPath));
|
||||
StreamVacuum stdoutVacuum = new StreamVacuum(process.getInputStream());
|
||||
StreamVacuum stderrVacuum = new StreamVacuum(process.getErrorStream());
|
||||
stdoutVacuum.start();
|
||||
stderrVacuum.start();
|
||||
process.waitFor();
|
||||
stdoutVacuum.join();
|
||||
stderrVacuum.join();
|
||||
if ( stderrVacuum.toString().length()>0 ) {
|
||||
//this.stderrDuringParse = stderrVacuum.toString();
|
||||
System.err.println("buildProject stderrVacuum: "+ stderrVacuum);
|
||||
}
|
||||
return process.exitValue()==0;
|
||||
}
|
||||
|
||||
public String execTest() {
|
||||
try {
|
||||
String exec = tmpdir + "/" + EXEC_NAME ;
|
||||
String[] args =
|
||||
new String[] { exec,"input"};//new File(tmpdir, "input").getAbsolutePath()
|
||||
ProcessBuilder pb = new ProcessBuilder(args);
|
||||
pb.directory(new File(tmpdir));
|
||||
Process p = pb.start();
|
||||
StreamVacuum stdoutVacuum = new StreamVacuum(p.getInputStream());
|
||||
StreamVacuum stderrVacuum = new StreamVacuum(p.getErrorStream());
|
||||
stdoutVacuum.start();
|
||||
stderrVacuum.start();
|
||||
p.waitFor();
|
||||
stdoutVacuum.join();
|
||||
stderrVacuum.join();
|
||||
String output = stdoutVacuum.toString();
|
||||
if ( stderrVacuum.toString().length()>0 ) {
|
||||
this.stderrDuringParse = stderrVacuum.toString();
|
||||
System.err.println("execTest stderrVacuum: "+ stderrVacuum);
|
||||
}
|
||||
return output;
|
||||
}
|
||||
catch (Exception e) {
|
||||
System.err.println("can't exec recognizer");
|
||||
e.printStackTrace(System.err);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
public void testErrors(String[] pairs, boolean printTree) {
|
||||
for (int i = 0; i < pairs.length; i+=2) {
|
||||
String input = pairs[i];
|
||||
String expect = pairs[i+1];
|
||||
|
||||
String[] lines = input.split("\n");
|
||||
String fileName = getFilenameFromFirstLineOfGrammar(lines[0]);
|
||||
ErrorQueue equeue = antlr(fileName, input, false);
|
||||
|
||||
String actual = equeue.toString(true);
|
||||
actual = actual.replace(tmpdir + File.separator, "");
|
||||
System.err.println(actual);
|
||||
String msg = input;
|
||||
msg = msg.replace("\n","\\n");
|
||||
msg = msg.replace("\r","\\r");
|
||||
msg = msg.replace("\t","\\t");
|
||||
|
||||
org.junit.Assert.assertEquals("error in: "+msg,expect,actual);
|
||||
}
|
||||
}
|
||||
|
||||
public String getFilenameFromFirstLineOfGrammar(String line) {
|
||||
String fileName = "A" + Tool.GRAMMAR_EXTENSION;
|
||||
int grIndex = line.lastIndexOf("grammar");
|
||||
int semi = line.lastIndexOf(';');
|
||||
if ( grIndex>=0 && semi>=0 ) {
|
||||
int space = line.indexOf(' ', grIndex);
|
||||
fileName = line.substring(space+1, semi)+Tool.GRAMMAR_EXTENSION;
|
||||
}
|
||||
if ( fileName.length()==Tool.GRAMMAR_EXTENSION.length() ) fileName = "A" + Tool.GRAMMAR_EXTENSION;
|
||||
return fileName;
|
||||
}
|
||||
|
||||
|
||||
List<ANTLRMessage> getMessagesOfType(List<ANTLRMessage> msgs, Class<? extends ANTLRMessage> c) {
|
||||
List<ANTLRMessage> filtered = new ArrayList<ANTLRMessage>();
|
||||
for (ANTLRMessage m : msgs) {
|
||||
if ( m.getClass() == c ) filtered.add(m);
|
||||
}
|
||||
return filtered;
|
||||
}
|
||||
|
||||
|
||||
public static class StreamVacuum implements Runnable {
|
||||
StringBuilder buf = new StringBuilder();
|
||||
BufferedReader in;
|
||||
Thread sucker;
|
||||
public StreamVacuum(InputStream in) {
|
||||
this.in = new BufferedReader( new InputStreamReader(in) );
|
||||
}
|
||||
public void start() {
|
||||
sucker = new Thread(this);
|
||||
sucker.start();
|
||||
}
|
||||
@Override
|
||||
public void run() {
|
||||
try {
|
||||
String line = in.readLine();
|
||||
while (line!=null) {
|
||||
buf.append(line);
|
||||
buf.append('\n');
|
||||
line = in.readLine();
|
||||
}
|
||||
}
|
||||
catch (IOException ioe) {
|
||||
System.err.println("can't read output from process");
|
||||
}
|
||||
}
|
||||
/** wait for the thread to finish */
|
||||
public void join() throws InterruptedException {
|
||||
sucker.join();
|
||||
}
|
||||
@Override
|
||||
public String toString() {
|
||||
return buf.toString();
|
||||
}
|
||||
}
|
||||
|
||||
protected void checkGrammarSemanticsError(ErrorQueue equeue,
|
||||
GrammarSemanticsMessage expectedMessage)
|
||||
throws Exception
|
||||
{
|
||||
ANTLRMessage foundMsg = null;
|
||||
for (int i = 0; i < equeue.errors.size(); i++) {
|
||||
ANTLRMessage m = equeue.errors.get(i);
|
||||
if (m.getErrorType()==expectedMessage.getErrorType() ) {
|
||||
foundMsg = m;
|
||||
}
|
||||
}
|
||||
assertNotNull("no error; "+expectedMessage.getErrorType()+" expected", foundMsg);
|
||||
assertTrue("error is not a GrammarSemanticsMessage",
|
||||
foundMsg instanceof GrammarSemanticsMessage);
|
||||
assertEquals(Arrays.toString(expectedMessage.getArgs()), Arrays.toString(foundMsg.getArgs()));
|
||||
if ( equeue.size()!=1 ) {
|
||||
System.err.println(equeue);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public static class FilteringTokenStream extends CommonTokenStream {
|
||||
public FilteringTokenStream(TokenSource src) { super(src); }
|
||||
Set<Integer> hide = new HashSet<Integer>();
|
||||
@Override
|
||||
protected boolean sync(int i) {
|
||||
if (!super.sync(i)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
Token t = get(i);
|
||||
if ( hide.contains(t.getType()) ) {
|
||||
((WritableToken)t).setChannel(Token.HIDDEN_CHANNEL);
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
public void setTokenTypeChannel(int ttype, int channel) {
|
||||
hide.add(ttype);
|
||||
}
|
||||
}
|
||||
|
||||
public static void writeFile(String dir, String fileName, String content) {
|
||||
try {
|
||||
Utils.writeFile(dir+"/"+fileName, content, "UTF-8");
|
||||
}
|
||||
catch (IOException ioe) {
|
||||
System.err.println("can't write file");
|
||||
ioe.printStackTrace(System.err);
|
||||
}
|
||||
}
|
||||
|
||||
protected static void mkdir(String dir) {
|
||||
File f = new File(dir);
|
||||
f.mkdirs();
|
||||
}
|
||||
|
||||
protected void writeParserTestFile(String parserName,
|
||||
String lexerName,
|
||||
String parserStartRuleName,
|
||||
boolean debug,
|
||||
boolean profile)
|
||||
{
|
||||
|
||||
ST outputFileST = new ST(
|
||||
"import Antlr4\n" +
|
||||
"import Foundation\n" +
|
||||
"setbuf(__stdoutp, nil)\n" +
|
||||
"class TreeShapeListener: ParseTreeListener{\n" +
|
||||
" func visitTerminal(_ node: TerminalNode){ }\n" +
|
||||
" func visitErrorNode(_ node: ErrorNode){ }\n" +
|
||||
" func enterEveryRule(_ ctx: ParserRuleContext) throws { }\n" +
|
||||
" func exitEveryRule(_ ctx: ParserRuleContext) throws {\n" +
|
||||
" for i in 0..\\<ctx.getChildCount() {\n" +
|
||||
" let parent = ctx.getChild(i)?.getParent()\n" +
|
||||
" if (!(parent is RuleNode) || (parent as! RuleNode ).getRuleContext() !== ctx) {\n" +
|
||||
" throw ANTLRError.illegalState(msg: \"Invalid parse tree shape detected.\")\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
"}\n" +
|
||||
"\n" +
|
||||
"do {\n" +
|
||||
"let args = CommandLine.arguments\n" +
|
||||
"let input = ANTLRFileStream(args[1])\n" +
|
||||
"let lex = <lexerName>(input)\n" +
|
||||
"let tokens = CommonTokenStream(lex)\n" +
|
||||
"<createParser>\n" +
|
||||
"parser.setBuildParseTree(true)\n" +
|
||||
"<profile>\n" +
|
||||
"let tree = try parser.<parserStartRuleName>()\n" +
|
||||
"<if(profile)>print(profiler.getDecisionInfo().description)<endif>\n" +
|
||||
"try ParseTreeWalker.DEFAULT.walk(TreeShapeListener(), tree)\n" +
|
||||
"}catch ANTLRException.cannotInvokeStartRule {\n" +
|
||||
" print(\"error occur: cannotInvokeStartRule\")\n" +
|
||||
"}catch ANTLRException.recognition(let e ) {\n" +
|
||||
" print(\"error occur\\(e)\")\n" +
|
||||
"}catch {\n" +
|
||||
" print(\"error occur\")\n" +
|
||||
"}\n"
|
||||
);
|
||||
ST createParserST = new ST(" let parser = try <parserName>(tokens)\n");
|
||||
if ( debug ) {
|
||||
createParserST =
|
||||
new ST(
|
||||
" let parser = try <parserName>(tokens)\n" +
|
||||
" parser.addErrorListener(DiagnosticErrorListener())\n");
|
||||
}
|
||||
if ( profile ) {
|
||||
outputFileST.add("profile",
|
||||
"let profiler = ProfilingATNSimulator(parser)\n" +
|
||||
"parser.setInterpreter(profiler)");
|
||||
}
|
||||
else {
|
||||
outputFileST.add("profile", new ArrayList<Object>());
|
||||
}
|
||||
outputFileST.add("createParser", createParserST);
|
||||
outputFileST.add("parserName", parserName);
|
||||
outputFileST.add("lexerName", lexerName);
|
||||
outputFileST.add("parserStartRuleName", parserStartRuleName);
|
||||
writeFile(tmpdir, "main.swift", outputFileST.render());
|
||||
}
|
||||
|
||||
protected void writeLexerTestFile(String lexerName, boolean showDFA) {
|
||||
ST outputFileST = new ST(
|
||||
"import Antlr4\n" +
|
||||
"import Foundation\n" +
|
||||
"setbuf(__stdoutp, nil)\n" +
|
||||
"let args = CommandLine.arguments\n" +
|
||||
"let input = ANTLRFileStream(args[1])\n" +
|
||||
"let lex = <lexerName>(input)\n" +
|
||||
"let tokens = CommonTokenStream(lex)\n" +
|
||||
"do {\n" +
|
||||
" try tokens.fill()\n" +
|
||||
"}catch ANTLRException.cannotInvokeStartRule {\n" +
|
||||
" print(\"error occur: cannotInvokeStartRule\")\n" +
|
||||
"}catch ANTLRException.recognition(let e ) {\n" +
|
||||
" print(\"error occur\\(e)\")\n" +
|
||||
"}catch {\n" +
|
||||
" print(\"error occur\")\n" +
|
||||
"}\n" +
|
||||
"for t in tokens.getTokens() {\n" +
|
||||
" print(t)\n" +
|
||||
"}\n" +
|
||||
(showDFA?"print(lex.getInterpreter().getDFA(Lexer.DEFAULT_MODE).toLexerString(), terminator: \"\" )\n":"") );
|
||||
|
||||
outputFileST.add("lexerName", lexerName);
|
||||
writeFile(tmpdir, "main.swift", outputFileST.render());
|
||||
}
|
||||
|
||||
public void writeRecognizerAndCompile(String parserName, String lexerName,
|
||||
String parserStartRuleName,
|
||||
boolean debug,
|
||||
boolean profile) {
|
||||
if ( parserName==null ) {
|
||||
writeLexerTestFile(lexerName, debug);
|
||||
}
|
||||
else {
|
||||
writeParserTestFile(parserName,
|
||||
lexerName,
|
||||
parserStartRuleName,
|
||||
debug,
|
||||
profile);
|
||||
}
|
||||
|
||||
addSourceFiles("main.swift");
|
||||
// addSourceFiles("Antlr4.swift");
|
||||
}
|
||||
|
||||
|
||||
protected void eraseFiles(final String filesEndingWith) {
|
||||
File tmpdirF = new File(tmpdir);
|
||||
String[] files = tmpdirF.list();
|
||||
for(int i = 0; files!=null && i < files.length; i++) {
|
||||
if ( files[i].endsWith(filesEndingWith) ) {
|
||||
new File(tmpdir+"/"+files[i]).delete();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
protected void eraseTempDir() {
|
||||
File tmpdirF = new File(tmpdir);
|
||||
if ( tmpdirF.exists() ) {
|
||||
eraseFilesIn(tmpdir);
|
||||
tmpdirF.delete();
|
||||
}
|
||||
}
|
||||
protected static void eraseFilesIn(String dirName) {
|
||||
if (dirName == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
File dir = new File(dirName);
|
||||
String[] files = dir.list();
|
||||
if(files!=null) for(String file : files) {
|
||||
new File(dirName+"/"+file).delete();
|
||||
}
|
||||
}
|
||||
protected static void eraseAntlrFrameWorkDir() {
|
||||
File frameworkdir = new File(ANTLR_FRAMEWORK_DIR);
|
||||
if ( frameworkdir.exists() ) {
|
||||
eraseFilesIn(ANTLR_FRAMEWORK_DIR);
|
||||
frameworkdir.delete();
|
||||
}
|
||||
}
|
||||
|
||||
public String getFirstLineOfException() {
|
||||
if ( this.stderrDuringParse ==null ) {
|
||||
return null;
|
||||
}
|
||||
String[] lines = this.stderrDuringParse.split("\n");
|
||||
String prefix="Exception in thread \"main\" ";
|
||||
return lines[0].substring(prefix.length(),lines[0].length());
|
||||
}
|
||||
|
||||
public List<String> realElements(List<String> elements) {
|
||||
return elements.subList(Token.MIN_USER_TOKEN_TYPE, elements.size());
|
||||
}
|
||||
|
||||
public void assertNotNullOrEmpty(String message, String text) {
|
||||
assertNotNull(message, text);
|
||||
assertFalse(message, text.isEmpty());
|
||||
}
|
||||
|
||||
public void assertNotNullOrEmpty(String text) {
|
||||
assertNotNull(text);
|
||||
assertFalse(text.isEmpty());
|
||||
}
|
||||
|
||||
|
||||
/** Return map sorted by key */
|
||||
public <K extends Comparable<? super K>,V> LinkedHashMap<K,V> sort(Map<K,V> data) {
|
||||
LinkedHashMap<K,V> dup = new LinkedHashMap<K, V>();
|
||||
List<K> keys = new ArrayList<K>();
|
||||
keys.addAll(data.keySet());
|
||||
Collections.sort(keys);
|
||||
for (K k : keys) {
|
||||
dup.put(k, data.get(k));
|
||||
}
|
||||
return dup;
|
||||
}
|
||||
|
||||
protected static void assertEquals(String msg, int a, int b) {
|
||||
org.junit.Assert.assertEquals(msg, a, b);
|
||||
}
|
||||
|
||||
protected static void assertEquals(String a, String b) {
|
||||
a = absorbExpectedDifferences(a);
|
||||
b = absorbActualDifferences(b);
|
||||
org.junit.Assert.assertEquals(a, b);
|
||||
}
|
||||
|
||||
protected static void assertNull(String a) {
|
||||
a = absorbActualDifferences(a);
|
||||
org.junit.Assert.assertNull(a);
|
||||
}
|
||||
|
||||
private static String absorbExpectedDifferences(String a) {
|
||||
if(a==null)
|
||||
return a;
|
||||
// work around the lack of requiresFullContext field in DFAState
|
||||
//if(a.startsWith("Decision"))
|
||||
//a = a.replaceAll("\\^", "");
|
||||
// work around the algo difference for full context
|
||||
//a = stripOutUnwantedLinesWith(a, "reportAttemptingFullContext","reportContextSensitivity", "reportAmbiguity");
|
||||
if(a.isEmpty())
|
||||
a = null;
|
||||
return a;
|
||||
}
|
||||
|
||||
private static String absorbActualDifferences(String a) {
|
||||
if(a==null)
|
||||
return a;
|
||||
// work around the algo difference for full context
|
||||
// work around the algo difference for semantic predicates
|
||||
//a = stripOutUnwantedLinesWith(a, "reportContextSensitivity","eval=false");
|
||||
if(a.isEmpty())
|
||||
a = null;
|
||||
return a;
|
||||
}
|
||||
|
||||
private static String stripOutUnwantedLinesWith(String a, String ... unwanteds) {
|
||||
String[] lines = a.split("\n");
|
||||
StringBuilder sb = new StringBuilder();
|
||||
for(String line : lines) {
|
||||
boolean wanted = true;
|
||||
for(String unwanted : unwanteds) {
|
||||
if(line.contains(unwanted) ) {
|
||||
wanted = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if(!wanted)
|
||||
continue;
|
||||
sb.append(line);
|
||||
sb.append("\n");
|
||||
}
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
}
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,521 @@
|
|||
package org.antlr.v4.codegen.target;
|
||||
|
||||
import org.antlr.v4.codegen.CodeGenerator;
|
||||
import org.antlr.v4.codegen.Target;
|
||||
import org.antlr.v4.runtime.Token;
|
||||
import org.antlr.v4.runtime.atn.*;
|
||||
import org.antlr.v4.runtime.misc.IntegerList;
|
||||
import org.antlr.v4.runtime.misc.Interval;
|
||||
import org.antlr.v4.runtime.misc.IntervalSet;
|
||||
import org.antlr.v4.tool.ErrorType;
|
||||
import org.antlr.v4.tool.Grammar;
|
||||
import org.antlr.v4.tool.LexerGrammar;
|
||||
import org.antlr.v4.tool.ast.GrammarAST;
|
||||
import org.stringtemplate.v4.*;
|
||||
|
||||
import javax.json.Json;
|
||||
import javax.json.JsonArrayBuilder;
|
||||
import javax.json.JsonObject;
|
||||
import javax.json.JsonObjectBuilder;
|
||||
import java.io.IOException;
|
||||
import java.io.Writer;
|
||||
import java.util.*;
|
||||
|
||||
/**
|
||||
* Created by janyou on 15/9/15.
|
||||
*/
|
||||
public class SwiftTarget extends Target {
|
||||
|
||||
/**
|
||||
* The Swift target can cache the code generation templates.
|
||||
*/
|
||||
private static final ThreadLocal<STGroup> targetTemplates = new ThreadLocal<STGroup>();
|
||||
|
||||
protected static final String[] swiftKeywords = {
|
||||
"associatedtype", "class", "deinit", "enum", "extension", "func", "import", "init", "inout", "internal",
|
||||
"let", "operator", "private", "protocol", "public", "static", "struct", "subscript", "typealias", "var",
|
||||
"break", "case", "continue", "default", "defer", "do", "else", "fallthrough", "for", "guard", "if",
|
||||
"in", "repeat", "return", "switch", "where", "while",
|
||||
"as", "catch", "dynamicType", "false", "is", "nil", "rethrows", "super", "self", "Self", "throw", "throws",
|
||||
"true", "try", "__COLUMN__", "__FILE__", "__FUNCTION__","__LINE__", "#column", "#file", "#function", "#line", "_" , "#available", "#else", "#elseif", "#endif", "#if", "#selector",
|
||||
"associativity", "convenience", "dynamic", "didSet", "final", "get", "infix", "indirect", "lazy",
|
||||
"left", "mutating", "none", "nonmutating", "optional", "override", "postfix", "precedence",
|
||||
"prefix", "Protocol", "required", "right", "set", "Type", "unowned", "weak", "willSet"
|
||||
};
|
||||
|
||||
/** Avoid grammar symbols in this set to prevent conflicts in gen'd code. */
|
||||
protected final Set<String> badWords = new HashSet<String>();
|
||||
|
||||
public String lexerAtnJSON = null;
|
||||
public String parserAtnJSON = null;
|
||||
public SwiftTarget(CodeGenerator gen) {
|
||||
super(gen, "Swift");
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getVersion() {
|
||||
return "4.5.1"; // Java and tool versions move in lock step
|
||||
}
|
||||
|
||||
public Set<String> getBadWords() {
|
||||
if (badWords.isEmpty()) {
|
||||
addBadWords();
|
||||
}
|
||||
|
||||
return badWords;
|
||||
}
|
||||
|
||||
protected void addBadWords() {
|
||||
badWords.addAll(Arrays.asList(swiftKeywords));
|
||||
badWords.add("rule");
|
||||
badWords.add("parserRule");
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getSerializedATNSegmentLimit() {
|
||||
// 65535 is the class file format byte limit for a UTF-8 encoded string literal
|
||||
// 3 is the maximum number of bytes it takes to encode a value in the range 0-0xFFFF
|
||||
return 65535 / 3;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean visibleGrammarSymbolCausesIssueInGeneratedCode(GrammarAST idNode) {
|
||||
return getBadWords().contains(idNode.getText());
|
||||
}
|
||||
@Override
|
||||
protected void genFile(Grammar g,
|
||||
ST outputFileST,
|
||||
String fileName)
|
||||
{
|
||||
super.genFile(g,outputFileST,fileName);
|
||||
|
||||
if (g.isLexer() && lexerAtnJSON == null) {
|
||||
lexerAtnJSON = getLexerOrParserATNJson(g, fileName);
|
||||
} else if (!g.isLexer() && parserAtnJSON == null && g.atn != null) {
|
||||
parserAtnJSON = getLexerOrParserATNJson(g, fileName);
|
||||
}
|
||||
|
||||
if (fileName.endsWith(CodeGenerator.VOCAB_FILE_EXTENSION)) {
|
||||
String jsonFileName = fileName.substring(0,fileName.lastIndexOf(CodeGenerator.VOCAB_FILE_EXTENSION));
|
||||
if (lexerAtnJSON != null) {
|
||||
jsonFileName = jsonFileName + "ATN.swift";
|
||||
// System.out.println(jsonFileName);
|
||||
//System.out.println(lexerAtnJSON);
|
||||
writeFile(lexerAtnJSON,g,jsonFileName);
|
||||
}
|
||||
|
||||
if (parserAtnJSON != null) {
|
||||
jsonFileName = jsonFileName + "ParserATN.swift";
|
||||
// System.out.println(jsonFileName);
|
||||
//System.out.println(parserAtnJSON);
|
||||
writeFile(parserAtnJSON,g,jsonFileName);
|
||||
}
|
||||
}
|
||||
|
||||
// else if (g instanceof ParseR) {
|
||||
// System.out.println("parserGrammar");
|
||||
// }
|
||||
//
|
||||
//getCodeGenerator().write(outputFileST, fileName);
|
||||
}
|
||||
|
||||
private String getLexerOrParserATNJson(Grammar g, String fileName) {
|
||||
ST extST = getTemplates().getInstanceOf("codeFileExtension");
|
||||
String className = fileName.substring(0,fileName.lastIndexOf(extST.render()));
|
||||
|
||||
String JSON = "class " + className + "ATN {\n" +
|
||||
" let jsonString: String = \"" +
|
||||
serializeTojson(g.atn).replaceAll("\"","\\\\\"") +"\"\n}" ; //.replaceAll("\"", "\\\\\"");
|
||||
return JSON;
|
||||
}
|
||||
|
||||
private void writeFile(String content,Grammar g,String fileName) {
|
||||
|
||||
try {
|
||||
Writer w = this.getCodeGenerator().tool.getOutputFileWriter(g, fileName);
|
||||
w.write(content);
|
||||
w.close();
|
||||
}
|
||||
catch (IOException ioe) {
|
||||
this.getCodeGenerator().tool.errMgr.toolError(ErrorType.CANNOT_WRITE_FILE,
|
||||
ioe,
|
||||
fileName);
|
||||
}
|
||||
}
|
||||
@Override
|
||||
protected STGroup loadTemplates() {
|
||||
STGroup result = targetTemplates.get();
|
||||
if (result == null) {
|
||||
result = super.loadTemplates();
|
||||
result.registerRenderer(String.class, new SwiftStringRenderer(), true);
|
||||
targetTemplates.set(result);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
//added by janyou -->
|
||||
public String serializeTojson(ATN atn) {
|
||||
JsonObjectBuilder builder = Json.createObjectBuilder();
|
||||
builder.add("version", ATNDeserializer.SERIALIZED_VERSION);
|
||||
builder.add("uuid", ATNDeserializer.SERIALIZED_UUID.toString());
|
||||
|
||||
// convert grammar type to ATN const to avoid dependence on ANTLRParser
|
||||
builder.add("grammarType",atn.grammarType.ordinal());
|
||||
builder.add("maxTokenType",atn.maxTokenType);
|
||||
|
||||
//states
|
||||
int nedges = 0;
|
||||
|
||||
Map<IntervalSet, Integer> setIndices = new HashMap<IntervalSet, Integer>();
|
||||
List<IntervalSet> sets = new ArrayList<IntervalSet>();
|
||||
JsonArrayBuilder statesBuilder = Json.createArrayBuilder() ;
|
||||
IntegerList nonGreedyStates = new IntegerList();
|
||||
IntegerList precedenceStates = new IntegerList();
|
||||
for (ATNState s : atn.states) {
|
||||
JsonObjectBuilder stateBuilder = Json.createObjectBuilder();
|
||||
if ( s==null ) { // might be optimized away
|
||||
statesBuilder.addNull();
|
||||
continue;
|
||||
}
|
||||
|
||||
int stateType = s.getStateType();
|
||||
|
||||
stateBuilder.add("stateType",stateType);
|
||||
//stateBuilder.add("stateNumber",s.stateNumber);
|
||||
stateBuilder.add("ruleIndex",s.ruleIndex);
|
||||
|
||||
if (s instanceof DecisionState && ((DecisionState)s).nonGreedy) {
|
||||
nonGreedyStates.add(s.stateNumber);
|
||||
}
|
||||
|
||||
if (s instanceof RuleStartState && ((RuleStartState)s).isLeftRecursiveRule) {
|
||||
precedenceStates.add(s.stateNumber);
|
||||
}
|
||||
|
||||
|
||||
if ( s.getStateType() == ATNState.LOOP_END ) {
|
||||
stateBuilder.add("detailStateNumber",((LoopEndState)s).loopBackState.stateNumber);
|
||||
}
|
||||
else if ( s instanceof BlockStartState ) {
|
||||
stateBuilder.add("detailStateNumber",((BlockStartState)s).endState.stateNumber);
|
||||
}
|
||||
|
||||
if (s.getStateType() != ATNState.RULE_STOP) {
|
||||
// the deserializer can trivially derive these edges, so there's no need to serialize them
|
||||
nedges += s.getNumberOfTransitions();
|
||||
}
|
||||
for (int i=0; i<s.getNumberOfTransitions(); i++) {
|
||||
Transition t = s.transition(i);
|
||||
int edgeType = Transition.serializationTypes.get(t.getClass());
|
||||
if ( edgeType == Transition.SET || edgeType == Transition.NOT_SET ) {
|
||||
SetTransition st = (SetTransition)t;
|
||||
if (!setIndices.containsKey(st.set)) {
|
||||
sets.add(st.set);
|
||||
setIndices.put(st.set, sets.size() - 1);
|
||||
}
|
||||
}
|
||||
}
|
||||
statesBuilder.add(stateBuilder);
|
||||
}
|
||||
builder.add("states",statesBuilder);
|
||||
|
||||
|
||||
// non-greedy states
|
||||
JsonArrayBuilder nonGreedyStatesBuilder = Json.createArrayBuilder() ;
|
||||
for (int i = 0; i < nonGreedyStates.size(); i++) {
|
||||
nonGreedyStatesBuilder.add(nonGreedyStates.get(i));
|
||||
}
|
||||
builder.add("nonGreedyStates",nonGreedyStatesBuilder);
|
||||
|
||||
|
||||
// precedence states
|
||||
JsonArrayBuilder precedenceStatesBuilder = Json.createArrayBuilder() ;
|
||||
for (int i = 0; i < precedenceStates.size(); i++) {
|
||||
precedenceStatesBuilder.add(precedenceStates.get(i));
|
||||
}
|
||||
builder.add("precedenceStates",precedenceStatesBuilder);
|
||||
|
||||
JsonArrayBuilder ruleToStartStateBuilder = Json.createArrayBuilder() ;
|
||||
int nrules = atn.ruleToStartState.length;
|
||||
|
||||
for (int r=0; r<nrules; r++) {
|
||||
JsonObjectBuilder stateBuilder = Json.createObjectBuilder();
|
||||
ATNState ruleStartState = atn.ruleToStartState[r];
|
||||
|
||||
stateBuilder.add("stateNumber",ruleStartState.stateNumber);
|
||||
if (atn.grammarType == ATNType.LEXER) {
|
||||
// if (atn.ruleToTokenType[r] == Token.EOF) {
|
||||
// //data.add(Character.MAX_VALUE);
|
||||
// stateBuilder.add("ruleToTokenType",-1);
|
||||
// }
|
||||
// else {
|
||||
// //data.add(atn.ruleToTokenType[r]);
|
||||
stateBuilder.add("ruleToTokenType",atn.ruleToTokenType[r]);
|
||||
// }
|
||||
}
|
||||
ruleToStartStateBuilder.add(stateBuilder);
|
||||
}
|
||||
builder.add("ruleToStartState",ruleToStartStateBuilder);
|
||||
|
||||
|
||||
JsonArrayBuilder modeToStartStateBuilder = Json.createArrayBuilder() ;
|
||||
int nmodes = atn.modeToStartState.size();
|
||||
if ( nmodes>0 ) {
|
||||
for (ATNState modeStartState : atn.modeToStartState) {
|
||||
|
||||
modeToStartStateBuilder.add(modeStartState.stateNumber);
|
||||
}
|
||||
}
|
||||
builder.add("modeToStartState",modeToStartStateBuilder);
|
||||
|
||||
|
||||
JsonArrayBuilder nsetsBuilder = Json.createArrayBuilder() ;
|
||||
int nsets = sets.size();
|
||||
//data.add(nsets);
|
||||
builder.add("nsets",nsets);
|
||||
|
||||
for (IntervalSet set : sets) {
|
||||
JsonObjectBuilder setBuilder = Json.createObjectBuilder();
|
||||
boolean containsEof = set.contains(Token.EOF);
|
||||
if (containsEof && set.getIntervals().get(0).b == Token.EOF) {
|
||||
//data.add(set.getIntervals().size() - 1);
|
||||
|
||||
setBuilder.add("size",set.getIntervals().size() - 1);
|
||||
}
|
||||
else {
|
||||
//data.add(set.getIntervals().size());
|
||||
|
||||
setBuilder.add("size",set.getIntervals().size());
|
||||
}
|
||||
setBuilder.add("containsEof",containsEof ? 1 : 0);
|
||||
JsonArrayBuilder IntervalsBuilder = Json.createArrayBuilder() ;
|
||||
for (Interval I : set.getIntervals()) {
|
||||
JsonObjectBuilder IntervalBuilder = Json.createObjectBuilder();
|
||||
if (I.a == Token.EOF) {
|
||||
if (I.b == Token.EOF) {
|
||||
continue;
|
||||
}
|
||||
else {
|
||||
IntervalBuilder.add("a",0);
|
||||
//data.add(0);
|
||||
}
|
||||
}
|
||||
else {
|
||||
IntervalBuilder.add("a",I.a);
|
||||
|
||||
//data.add(I.a);
|
||||
}
|
||||
IntervalBuilder.add("b",I.b);
|
||||
IntervalsBuilder.add(IntervalBuilder);
|
||||
}
|
||||
setBuilder.add("Intervals",IntervalsBuilder);
|
||||
nsetsBuilder.add(setBuilder);
|
||||
}
|
||||
|
||||
builder.add("IntervalSet",nsetsBuilder);
|
||||
//builder.add("nedges",nedges);
|
||||
JsonArrayBuilder allTransitionsBuilder = Json.createArrayBuilder() ;
|
||||
|
||||
for (ATNState s : atn.states) {
|
||||
|
||||
if ( s==null ) {
|
||||
// might be optimized away
|
||||
continue;
|
||||
}
|
||||
|
||||
if (s.getStateType() == ATNState.RULE_STOP) {
|
||||
continue;
|
||||
}
|
||||
JsonArrayBuilder transitionsBuilder = Json.createArrayBuilder() ;
|
||||
|
||||
for (int i=0; i<s.getNumberOfTransitions(); i++) {
|
||||
JsonObjectBuilder transitionBuilder = Json.createObjectBuilder();
|
||||
Transition t = s.transition(i);
|
||||
|
||||
if (atn.states.get(t.target.stateNumber) == null) {
|
||||
throw new IllegalStateException("Cannot serialize a transition to a removed state.");
|
||||
}
|
||||
|
||||
int src = s.stateNumber;
|
||||
int trg = t.target.stateNumber;
|
||||
int edgeType = Transition.serializationTypes.get(t.getClass());
|
||||
int arg1 = 0;
|
||||
int arg2 = 0;
|
||||
int arg3 = 0;
|
||||
switch ( edgeType ) {
|
||||
case Transition.RULE :
|
||||
trg = ((RuleTransition)t).followState.stateNumber;
|
||||
arg1 = ((RuleTransition)t).target.stateNumber;
|
||||
arg2 = ((RuleTransition)t).ruleIndex;
|
||||
arg3 = ((RuleTransition)t).precedence;
|
||||
break;
|
||||
case Transition.PRECEDENCE:
|
||||
PrecedencePredicateTransition ppt = (PrecedencePredicateTransition)t;
|
||||
arg1 = ppt.precedence;
|
||||
break;
|
||||
case Transition.PREDICATE :
|
||||
PredicateTransition pt = (PredicateTransition)t;
|
||||
arg1 = pt.ruleIndex;
|
||||
arg2 = pt.predIndex;
|
||||
arg3 = pt.isCtxDependent ? 1 : 0 ;
|
||||
break;
|
||||
case Transition.RANGE :
|
||||
arg1 = ((RangeTransition)t).from;
|
||||
arg2 = ((RangeTransition)t).to;
|
||||
if (arg1 == Token.EOF) {
|
||||
arg1 = 0;
|
||||
arg3 = 1;
|
||||
}
|
||||
|
||||
break;
|
||||
case Transition.ATOM :
|
||||
arg1 = ((AtomTransition)t).label;
|
||||
if (arg1 == Token.EOF) {
|
||||
arg1 = 0;
|
||||
arg3 = 1;
|
||||
}
|
||||
|
||||
break;
|
||||
case Transition.ACTION :
|
||||
ActionTransition at = (ActionTransition)t;
|
||||
arg1 = at.ruleIndex;
|
||||
arg2 = at.actionIndex;
|
||||
// if (arg2 == -1) {
|
||||
// arg2 = 0xFFFF;
|
||||
// }
|
||||
|
||||
arg3 = at.isCtxDependent ? 1 : 0 ;
|
||||
break;
|
||||
case Transition.SET :
|
||||
arg1 = setIndices.get(((SetTransition)t).set);
|
||||
break;
|
||||
case Transition.NOT_SET :
|
||||
arg1 = setIndices.get(((SetTransition)t).set);
|
||||
break;
|
||||
case Transition.WILDCARD :
|
||||
break;
|
||||
}
|
||||
transitionBuilder.add("src",src);
|
||||
transitionBuilder.add("trg",trg);
|
||||
transitionBuilder.add("edgeType",edgeType);
|
||||
transitionBuilder.add("arg1",arg1);
|
||||
transitionBuilder.add("arg2",arg2);
|
||||
transitionBuilder.add("arg3",arg3);
|
||||
transitionsBuilder.add(transitionBuilder);
|
||||
}
|
||||
allTransitionsBuilder.add(transitionsBuilder);
|
||||
}
|
||||
|
||||
builder.add("allTransitionsBuilder",allTransitionsBuilder);
|
||||
int ndecisions = atn.decisionToState.size();
|
||||
//data.add(ndecisions);
|
||||
JsonArrayBuilder decisionToStateBuilder = Json.createArrayBuilder() ;
|
||||
|
||||
for (DecisionState decStartState : atn.decisionToState) {
|
||||
//data.add(decStartState.stateNumber);
|
||||
decisionToStateBuilder.add(decStartState.stateNumber);
|
||||
}
|
||||
builder.add("decisionToState",decisionToStateBuilder);
|
||||
//
|
||||
// LEXER ACTIONS
|
||||
//
|
||||
JsonArrayBuilder lexerActionsBuilder = Json.createArrayBuilder() ;
|
||||
|
||||
if (atn.grammarType == ATNType.LEXER) {
|
||||
//data.add(atn.lexerActions.length);
|
||||
for (LexerAction action : atn.lexerActions) {
|
||||
JsonObjectBuilder lexerActionBuilder = Json.createObjectBuilder();
|
||||
|
||||
lexerActionBuilder.add("actionType",action.getActionType().ordinal());
|
||||
//data.add(action.getActionType().ordinal());
|
||||
switch (action.getActionType()) {
|
||||
case CHANNEL:
|
||||
int channel = ((LexerChannelAction)action).getChannel();
|
||||
|
||||
lexerActionBuilder.add("a",channel);
|
||||
lexerActionBuilder.add("b",0);
|
||||
break;
|
||||
|
||||
case CUSTOM:
|
||||
int ruleIndex = ((LexerCustomAction)action).getRuleIndex();
|
||||
int actionIndex = ((LexerCustomAction)action).getActionIndex();
|
||||
|
||||
lexerActionBuilder.add("a",ruleIndex);
|
||||
lexerActionBuilder.add("b",actionIndex);
|
||||
break;
|
||||
|
||||
case MODE:
|
||||
int mode = ((LexerModeAction)action).getMode();
|
||||
|
||||
lexerActionBuilder.add("a",mode);
|
||||
lexerActionBuilder.add("b",0);
|
||||
break;
|
||||
|
||||
|
||||
case MORE:
|
||||
|
||||
lexerActionBuilder.add("a",0);
|
||||
lexerActionBuilder.add("b",0);
|
||||
break;
|
||||
|
||||
case POP_MODE:
|
||||
lexerActionBuilder.add("a",0);
|
||||
lexerActionBuilder.add("b",0);
|
||||
break;
|
||||
|
||||
case PUSH_MODE:
|
||||
mode = ((LexerPushModeAction)action).getMode();
|
||||
|
||||
lexerActionBuilder.add("a",mode);
|
||||
lexerActionBuilder.add("b",0);
|
||||
break;
|
||||
|
||||
case SKIP:
|
||||
lexerActionBuilder.add("a",0);
|
||||
lexerActionBuilder.add("b",0);
|
||||
break;
|
||||
|
||||
case TYPE:
|
||||
int type = ((LexerTypeAction)action).getType();
|
||||
|
||||
lexerActionBuilder.add("a",type);
|
||||
lexerActionBuilder.add("b",0);
|
||||
break;
|
||||
|
||||
default:
|
||||
String message = String.format(Locale.getDefault(), "The specified lexer action type %s is not valid.", action.getActionType());
|
||||
throw new IllegalArgumentException(message);
|
||||
}
|
||||
lexerActionsBuilder.add(lexerActionBuilder);
|
||||
}
|
||||
}
|
||||
builder.add("lexerActions",lexerActionsBuilder);
|
||||
// don't adjust the first value since that's the version number
|
||||
// for (int i = 1; i < data.size(); i++) {
|
||||
// if (data.get(i) < Character.MIN_VALUE || data.get(i) > Character.MAX_VALUE) {
|
||||
// throw new UnsupportedOperationException("Serialized ATN data element out of range.");
|
||||
// }
|
||||
//
|
||||
// int value = (data.get(i) + 2) & 0xFFFF;
|
||||
// data.set(i, value);
|
||||
// }
|
||||
JsonObject data = builder.build();
|
||||
// System.out.print(data.toString());
|
||||
return data.toString();
|
||||
}
|
||||
|
||||
//<--
|
||||
protected static class SwiftStringRenderer extends StringRenderer {
|
||||
|
||||
@Override
|
||||
public String toString(Object o, String formatString, Locale locale) {
|
||||
if ("java-escape".equals(formatString)) {
|
||||
// 5C is the hex code for the \ itself
|
||||
return ((String)o).replace("\\u", "\\u005Cu");
|
||||
}
|
||||
|
||||
return super.toString(o, formatString, locale);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue