forked from jasder/antlr
Adding TokenStreamRewriterTests.
This commit is contained in:
parent
6d2b5b6790
commit
cefd2c6528
|
@ -334,6 +334,13 @@
|
|||
DBF000AF1E7F521E006DB586 /* SwiftTestListener.swift in Sources */ = {isa = PBXBuildFile; fileRef = DBF000A71E7F521E006DB586 /* SwiftTestListener.swift */; };
|
||||
DBF000B01E7F521E006DB586 /* SwiftTestParser.swift in Sources */ = {isa = PBXBuildFile; fileRef = DBF000A81E7F521E006DB586 /* SwiftTestParser.swift */; };
|
||||
DBF000B11E7F521E006DB586 /* SwiftTestParserATN.swift in Sources */ = {isa = PBXBuildFile; fileRef = DBF000A91E7F521E006DB586 /* SwiftTestParserATN.swift */; };
|
||||
DBF000B31E7F59B3006DB586 /* TokenStreamRewriterTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = DBF000B21E7F59B3006DB586 /* TokenStreamRewriterTests.swift */; };
|
||||
DBF000B91E7F627D006DB586 /* LexerA.swift in Sources */ = {isa = PBXBuildFile; fileRef = DBF000B61E7F627D006DB586 /* LexerA.swift */; };
|
||||
DBF000BA1E7F627D006DB586 /* LexerA.tokens in Resources */ = {isa = PBXBuildFile; fileRef = DBF000B71E7F627D006DB586 /* LexerA.tokens */; };
|
||||
DBF000BB1E7F627D006DB586 /* LexerAATN.swift in Sources */ = {isa = PBXBuildFile; fileRef = DBF000B81E7F627D006DB586 /* LexerAATN.swift */; };
|
||||
DBF000C11E7F6546006DB586 /* LexerB.swift in Sources */ = {isa = PBXBuildFile; fileRef = DBF000BE1E7F6546006DB586 /* LexerB.swift */; };
|
||||
DBF000C21E7F6546006DB586 /* LexerB.tokens in Resources */ = {isa = PBXBuildFile; fileRef = DBF000BF1E7F6546006DB586 /* LexerB.tokens */; };
|
||||
DBF000C31E7F6546006DB586 /* LexerBATN.swift in Sources */ = {isa = PBXBuildFile; fileRef = DBF000C01E7F6546006DB586 /* LexerBATN.swift */; };
|
||||
F9A424AD1BCD348C00A9CD35 /* Antlr4.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = F9A4211C1BCCC35000A9CD35 /* Antlr4.framework */; };
|
||||
/* End PBXBuildFile section */
|
||||
|
||||
|
@ -518,6 +525,15 @@
|
|||
DBF000A71E7F521E006DB586 /* SwiftTestListener.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = SwiftTestListener.swift; path = ../../Intermediates/Antlr4.build/Debug/Antlr4Tests.build/DerivedSources/Tests/Antlr4Tests/SwiftTestListener.swift; sourceTree = BUILT_PRODUCTS_DIR; };
|
||||
DBF000A81E7F521E006DB586 /* SwiftTestParser.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = SwiftTestParser.swift; path = ../../Intermediates/Antlr4.build/Debug/Antlr4Tests.build/DerivedSources/Tests/Antlr4Tests/SwiftTestParser.swift; sourceTree = BUILT_PRODUCTS_DIR; };
|
||||
DBF000A91E7F521E006DB586 /* SwiftTestParserATN.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = SwiftTestParserATN.swift; path = ../../Intermediates/Antlr4.build/Debug/Antlr4Tests.build/DerivedSources/Tests/Antlr4Tests/SwiftTestParserATN.swift; sourceTree = BUILT_PRODUCTS_DIR; };
|
||||
DBF000B21E7F59B3006DB586 /* TokenStreamRewriterTests.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = TokenStreamRewriterTests.swift; sourceTree = "<group>"; };
|
||||
DBF000B51E7F614D006DB586 /* LexerA.g4 */ = {isa = PBXFileReference; lastKnownFileType = text; path = LexerA.g4; sourceTree = "<group>"; };
|
||||
DBF000B61E7F627D006DB586 /* LexerA.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = LexerA.swift; path = ../../Intermediates/Antlr4.build/Debug/Antlr4Tests.build/DerivedSources/Tests/Antlr4Tests/LexerA.swift; sourceTree = BUILT_PRODUCTS_DIR; };
|
||||
DBF000B71E7F627D006DB586 /* LexerA.tokens */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = file; name = LexerA.tokens; path = ../../Intermediates/Antlr4.build/Debug/Antlr4Tests.build/DerivedSources/Tests/Antlr4Tests/LexerA.tokens; sourceTree = BUILT_PRODUCTS_DIR; };
|
||||
DBF000B81E7F627D006DB586 /* LexerAATN.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = LexerAATN.swift; path = ../../Intermediates/Antlr4.build/Debug/Antlr4Tests.build/DerivedSources/Tests/Antlr4Tests/LexerAATN.swift; sourceTree = BUILT_PRODUCTS_DIR; };
|
||||
DBF000BD1E7F64DE006DB586 /* LexerB.g4 */ = {isa = PBXFileReference; lastKnownFileType = text; path = LexerB.g4; sourceTree = "<group>"; };
|
||||
DBF000BE1E7F6546006DB586 /* LexerB.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = LexerB.swift; path = ../../Intermediates/Antlr4.build/Debug/Antlr4Tests.build/DerivedSources/Tests/Antlr4Tests/LexerB.swift; sourceTree = BUILT_PRODUCTS_DIR; };
|
||||
DBF000BF1E7F6546006DB586 /* LexerB.tokens */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = file; name = LexerB.tokens; path = ../../Intermediates/Antlr4.build/Debug/Antlr4Tests.build/DerivedSources/Tests/Antlr4Tests/LexerB.tokens; sourceTree = BUILT_PRODUCTS_DIR; };
|
||||
DBF000C01E7F6546006DB586 /* LexerBATN.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = LexerBATN.swift; path = ../../Intermediates/Antlr4.build/Debug/Antlr4Tests.build/DerivedSources/Tests/Antlr4Tests/LexerBATN.swift; sourceTree = BUILT_PRODUCTS_DIR; };
|
||||
F9A4211C1BCCC35000A9CD35 /* Antlr4.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Antlr4.framework; sourceTree = BUILT_PRODUCTS_DIR; };
|
||||
F9A422F81BCCD47300A9CD35 /* Antlr4.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Antlr4.framework; sourceTree = BUILT_PRODUCTS_DIR; };
|
||||
F9A424A81BCD348C00A9CD35 /* Antlr4Tests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = Antlr4Tests.xctest; sourceTree = BUILT_PRODUCTS_DIR; };
|
||||
|
@ -563,7 +579,10 @@
|
|||
children = (
|
||||
DB4EB8B21E2CBA240095F402 /* gen */,
|
||||
DB0991951E22DE6000FE71AA /* TokenStreamTests.swift */,
|
||||
DBF000B21E7F59B3006DB586 /* TokenStreamRewriterTests.swift */,
|
||||
DB4EB8A41E2CB1B60095F402 /* SwiftTest.g4 */,
|
||||
DBF000B51E7F614D006DB586 /* LexerA.g4 */,
|
||||
DBF000BD1E7F64DE006DB586 /* LexerB.g4 */,
|
||||
);
|
||||
path = Antlr4Tests;
|
||||
sourceTree = "<group>";
|
||||
|
@ -571,6 +590,12 @@
|
|||
DB4EB8B21E2CBA240095F402 /* gen */ = {
|
||||
isa = PBXGroup;
|
||||
children = (
|
||||
DBF000B61E7F627D006DB586 /* LexerA.swift */,
|
||||
DBF000B71E7F627D006DB586 /* LexerA.tokens */,
|
||||
DBF000B81E7F627D006DB586 /* LexerAATN.swift */,
|
||||
DBF000BE1E7F6546006DB586 /* LexerB.swift */,
|
||||
DBF000BF1E7F6546006DB586 /* LexerB.tokens */,
|
||||
DBF000C01E7F6546006DB586 /* LexerBATN.swift */,
|
||||
DBF000A21E7F521E006DB586 /* SwiftTest.tokens */,
|
||||
DBF000A31E7F521E006DB586 /* SwiftTestBaseListener.swift */,
|
||||
DBF000A41E7F521E006DB586 /* SwiftTestLexer.swift */,
|
||||
|
@ -981,6 +1006,8 @@
|
|||
isa = PBXResourcesBuildPhase;
|
||||
buildActionMask = 2147483647;
|
||||
files = (
|
||||
DBF000C21E7F6546006DB586 /* LexerB.tokens in Resources */,
|
||||
DBF000BA1E7F627D006DB586 /* LexerA.tokens in Resources */,
|
||||
DB0991A11E22DF3B00FE71AA /* Info.plist in Resources */,
|
||||
DBF000AD1E7F521E006DB586 /* SwiftTestLexer.tokens in Resources */,
|
||||
DBF000AA1E7F521E006DB586 /* SwiftTest.tokens in Resources */,
|
||||
|
@ -1004,10 +1031,13 @@
|
|||
"$(DERIVED_FILE_DIR)/SwiftTestListener.swift",
|
||||
"$(DERIVED_FILE_DIR)/SwiftTestParser.swift",
|
||||
"$(DERIVED_FILE_DIR)/SwiftTestParserATN.swift",
|
||||
"$(DERIVED_FILE_DIR)/LexerA.swift",
|
||||
"$(DERIVED_FILE_DIR)/LexerA.tokens",
|
||||
"$(DERIVED_FILE_DIR)/LexerAATN.swift",
|
||||
);
|
||||
runOnlyForDeploymentPostprocessing = 0;
|
||||
shellPath = /bin/sh;
|
||||
shellScript = "echo \"Generating parser in $DERIVED_FILE_DIR\"\njava -jar ~/.m2/repository/org/antlr/antlr4/4.7-SNAPSHOT/antlr4-4.7-SNAPSHOT-complete.jar -Dlanguage=Swift Tests/Antlr4Tests/SwiftTest.g4 -o $DERIVED_FILE_DIR";
|
||||
shellScript = "echo \"Generating parser in $DERIVED_FILE_DIR\"\njava -jar ~/.m2/repository/org/antlr/antlr4/4.7-SNAPSHOT/antlr4-4.7-SNAPSHOT-complete.jar -Dlanguage=Swift Tests/Antlr4Tests/SwiftTest.g4 -o $DERIVED_FILE_DIR\njava -jar ~/.m2/repository/org/antlr/antlr4/4.7-SNAPSHOT/antlr4-4.7-SNAPSHOT-complete.jar -Dlanguage=Swift Tests/Antlr4Tests/LexerA.g4 -o $DERIVED_FILE_DIR\njava -jar ~/.m2/repository/org/antlr/antlr4/4.7-SNAPSHOT/antlr4-4.7-SNAPSHOT-complete.jar -Dlanguage=Swift Tests/Antlr4Tests/LexerB.g4 -o $DERIVED_FILE_DIR";
|
||||
};
|
||||
/* End PBXShellScriptBuildPhase section */
|
||||
|
||||
|
@ -1344,11 +1374,16 @@
|
|||
isa = PBXSourcesBuildPhase;
|
||||
buildActionMask = 2147483647;
|
||||
files = (
|
||||
DBF000B91E7F627D006DB586 /* LexerA.swift in Sources */,
|
||||
DB09919D1E22DE9B00FE71AA /* TokenStreamTests.swift in Sources */,
|
||||
DBF000BB1E7F627D006DB586 /* LexerAATN.swift in Sources */,
|
||||
DBF000C31E7F6546006DB586 /* LexerBATN.swift in Sources */,
|
||||
DBF000AC1E7F521E006DB586 /* SwiftTestLexer.swift in Sources */,
|
||||
DBF000C11E7F6546006DB586 /* LexerB.swift in Sources */,
|
||||
DBF000B11E7F521E006DB586 /* SwiftTestParserATN.swift in Sources */,
|
||||
DBF000B01E7F521E006DB586 /* SwiftTestParser.swift in Sources */,
|
||||
DBF000AE1E7F521E006DB586 /* SwiftTestLexerATN.swift in Sources */,
|
||||
DBF000B31E7F59B3006DB586 /* TokenStreamRewriterTests.swift in Sources */,
|
||||
DBF000AF1E7F521E006DB586 /* SwiftTestListener.swift in Sources */,
|
||||
DBF000AB1E7F521E006DB586 /* SwiftTestBaseListener.swift in Sources */,
|
||||
);
|
||||
|
|
|
@ -0,0 +1,4 @@
|
|||
lexer grammar LexerA;
|
||||
A : 'a';
|
||||
B : 'b';
|
||||
C : 'c';
|
|
@ -0,0 +1,8 @@
|
|||
lexer grammar LexerB;
|
||||
ID : 'a'..'z'+;
|
||||
INT : '0'..'9'+;
|
||||
SEMI : ';';
|
||||
MUL : '*';
|
||||
PLUS : '+';
|
||||
ASSIGN : '=';
|
||||
WS : ' '+;
|
|
@ -0,0 +1,680 @@
|
|||
/// Copyright (c) 2012-2016 The ANTLR Project. All rights reserved.
|
||||
/// Use of this file is governed by the BSD 3-clause license that
|
||||
/// can be found in the LICENSE.txt file in the project root.
|
||||
|
||||
import XCTest
|
||||
import Antlr4
|
||||
|
||||
class TokenStreamRewriterTests: XCTestCase {
|
||||
|
||||
func testInsertBeforeIndex0() throws {
|
||||
let input = ANTLRInputStream("abc")
|
||||
let lexer = LexerA(input)
|
||||
let stream = CommonTokenStream(lexer)
|
||||
try stream.fill()
|
||||
let tokens = TokenStreamRewriter(stream)
|
||||
tokens.insertBefore(0, "0")
|
||||
let result = try tokens.getText()
|
||||
let expecting = "0abc"
|
||||
XCTAssertEqual(expecting, result)
|
||||
}
|
||||
|
||||
func testInsertAfterLastIndex() throws {
|
||||
let input = ANTLRInputStream("abc")
|
||||
let lexer = LexerA(input)
|
||||
let stream = CommonTokenStream(lexer)
|
||||
try stream.fill()
|
||||
let tokens = TokenStreamRewriter(stream)
|
||||
tokens.insertAfter(2, "x")
|
||||
let result = try tokens.getText()
|
||||
let expecting = "abcx"
|
||||
XCTAssertEqual(expecting, result)
|
||||
}
|
||||
|
||||
func test2InsertBeforeAfterMiddleIndex() throws {
|
||||
let input = ANTLRInputStream("abc")
|
||||
let lexer = LexerA(input)
|
||||
let stream = CommonTokenStream(lexer)
|
||||
try stream.fill()
|
||||
let tokens = TokenStreamRewriter(stream)
|
||||
tokens.insertBefore(1, "x")
|
||||
tokens.insertAfter(1, "x")
|
||||
let result = try tokens.getText()
|
||||
let expecting = "axbxc"
|
||||
XCTAssertEqual(expecting, result)
|
||||
}
|
||||
|
||||
func testReplaceIndex0() throws {
|
||||
let input = ANTLRInputStream("abc")
|
||||
let lexer = LexerA(input)
|
||||
let stream = CommonTokenStream(lexer)
|
||||
try stream.fill()
|
||||
let tokens = TokenStreamRewriter(stream)
|
||||
try tokens.replace(0, "x")
|
||||
let result = try tokens.getText()
|
||||
let expecting = "xbc"
|
||||
XCTAssertEqual(expecting, result)
|
||||
}
|
||||
|
||||
func testReplaceLastIndex() throws {
|
||||
let input = ANTLRInputStream("abc")
|
||||
let lexer = LexerA(input)
|
||||
let stream = CommonTokenStream(lexer)
|
||||
try stream.fill()
|
||||
let tokens = TokenStreamRewriter(stream)
|
||||
try tokens.replace(2, "x")
|
||||
let result = try tokens.getText()
|
||||
let expecting = "abx"
|
||||
XCTAssertEqual(expecting, result)
|
||||
}
|
||||
|
||||
func testReplaceMiddleIndex() throws {
|
||||
let input = ANTLRInputStream("abc")
|
||||
let lexer = LexerA(input)
|
||||
let stream = CommonTokenStream(lexer)
|
||||
try stream.fill()
|
||||
let tokens = TokenStreamRewriter(stream)
|
||||
try tokens.replace(1, "x")
|
||||
let result = try tokens.getText()
|
||||
let expecting = "axc"
|
||||
XCTAssertEqual(expecting, result)
|
||||
}
|
||||
|
||||
func testToStringStartStop() throws {
|
||||
// Tokens: 0123456789
|
||||
// Input: x = 3 * 0
|
||||
let input = ANTLRInputStream("x = 3 * 0;")
|
||||
let lexer = LexerB(input)
|
||||
let stream = CommonTokenStream(lexer)
|
||||
try stream.fill()
|
||||
let tokens = TokenStreamRewriter(stream)
|
||||
|
||||
// replace 3 * 0 with 0
|
||||
try tokens.replace(4, 8, "0")
|
||||
try stream.fill()
|
||||
|
||||
var result = try tokens.getTokenStream().getText()
|
||||
var expecting = "x = 3 * 0;"
|
||||
XCTAssertEqual(expecting, result)
|
||||
|
||||
result = try tokens.getText()
|
||||
expecting = "x = 0;"
|
||||
XCTAssertEqual(expecting, result)
|
||||
|
||||
result = try tokens.getText(Interval.of(0, 9))
|
||||
expecting = "x = 0;"
|
||||
XCTAssertEqual(expecting, result)
|
||||
|
||||
result = try tokens.getText(Interval.of(4, 8))
|
||||
expecting = "0"
|
||||
XCTAssertEqual(expecting, result)
|
||||
}
|
||||
|
||||
func testToStringStartStop2() throws {
|
||||
// Tokens: 012345678901234567
|
||||
// Input: x = 3 * 0 + 2 * 0;
|
||||
let input = ANTLRInputStream("x = 3 * 0 + 2 * 0;")
|
||||
let lexer = LexerB(input)
|
||||
let stream = CommonTokenStream(lexer)
|
||||
try stream.fill()
|
||||
let tokens = TokenStreamRewriter(stream)
|
||||
|
||||
var result = try tokens.getTokenStream().getText()
|
||||
var expecting = "x = 3 * 0 + 2 * 0;"
|
||||
XCTAssertEqual(expecting, result)
|
||||
|
||||
// replace 3 * 0 with 0
|
||||
try tokens.replace(4, 8, "0")
|
||||
try stream.fill()
|
||||
|
||||
result = try tokens.getText()
|
||||
expecting = "x = 0 + 2 * 0;"
|
||||
XCTAssertEqual(expecting, result)
|
||||
|
||||
result = try tokens.getText(Interval.of(0, 17))
|
||||
expecting = "x = 0 + 2 * 0;"
|
||||
XCTAssertEqual(expecting, result)
|
||||
|
||||
result = try tokens.getText(Interval.of(4, 8))
|
||||
expecting = "0"
|
||||
XCTAssertEqual(expecting, result)
|
||||
|
||||
result = try tokens.getText(Interval.of(0, 8))
|
||||
expecting = "x = 0"
|
||||
XCTAssertEqual(expecting, result)
|
||||
|
||||
result = try tokens.getText(Interval.of(12, 16))
|
||||
expecting = "2 * 0"
|
||||
XCTAssertEqual(expecting, result)
|
||||
|
||||
tokens.insertAfter(17, "// comment")
|
||||
result = try tokens.getText(Interval.of(12, 18))
|
||||
expecting = "2 * 0;// comment"
|
||||
XCTAssertEqual(expecting, result)
|
||||
|
||||
result = try tokens.getText(Interval.of(0, 8))
|
||||
try stream.fill()
|
||||
// try again after insert at end
|
||||
expecting = "x = 0"
|
||||
XCTAssertEqual(expecting, result)
|
||||
}
|
||||
|
||||
func test2ReplaceMiddleIndex() throws {
|
||||
let input = ANTLRInputStream("abc")
|
||||
let lexer = LexerA(input)
|
||||
let stream = CommonTokenStream(lexer)
|
||||
try stream.fill()
|
||||
let tokens = TokenStreamRewriter(stream)
|
||||
try tokens.replace(1, "x")
|
||||
try tokens.replace(1, "y")
|
||||
let result = try tokens.getText()
|
||||
let expecting = "ayc"
|
||||
XCTAssertEqual(expecting, result)
|
||||
}
|
||||
|
||||
func test2ReplaceMiddleIndex1InsertBefore() throws {
|
||||
let input = ANTLRInputStream("abc")
|
||||
let lexer = LexerA(input)
|
||||
let stream = CommonTokenStream(lexer)
|
||||
try stream.fill()
|
||||
let tokens = TokenStreamRewriter(stream)
|
||||
tokens.insertBefore(0, "_")
|
||||
try tokens.replace(1, "x")
|
||||
try tokens.replace(1, "y")
|
||||
let result = try tokens.getText()
|
||||
let expecting = "_ayc"
|
||||
XCTAssertEqual(expecting, result)
|
||||
}
|
||||
|
||||
func testReplaceThenDeleteMiddleIndex() throws {
|
||||
let input = ANTLRInputStream("abc")
|
||||
let lexer = LexerA(input)
|
||||
let stream = CommonTokenStream(lexer)
|
||||
try stream.fill()
|
||||
let tokens = TokenStreamRewriter(stream)
|
||||
try tokens.replace(1, "x")
|
||||
try tokens.delete(1)
|
||||
let result = try tokens.getText()
|
||||
let expecting = "ac"
|
||||
XCTAssertEqual(expecting, result)
|
||||
}
|
||||
|
||||
// TODO: tweak printing
|
||||
func testInsertInPriorReplace() throws {
|
||||
let input = ANTLRInputStream("abc")
|
||||
let lexer = LexerA(input)
|
||||
let stream = CommonTokenStream(lexer)
|
||||
try stream.fill()
|
||||
let tokens = TokenStreamRewriter(stream)
|
||||
try tokens.replace(0, 2, "x")
|
||||
tokens.insertBefore(1, "0")
|
||||
|
||||
var err: Error?
|
||||
do {
|
||||
_ = try tokens.getText()
|
||||
} catch ANTLRError.illegalArgument(let msg) {
|
||||
let expecting = "insert op <InsertBeforeOp@[@1,1:1='b',<2>,1:1]:\"0\"> within boundaries of previous <ReplaceOp@[@0,0:0='a',<1>,1:0]..[@2,2:2='c',<3>,1:2]:\"x\">"
|
||||
|
||||
XCTAssertEqual(expecting, msg)
|
||||
}
|
||||
}
|
||||
|
||||
func testInsertThenReplaceSameIndex() throws {
|
||||
let input = ANTLRInputStream("abc")
|
||||
let lexer = LexerA(input)
|
||||
let stream = CommonTokenStream(lexer)
|
||||
try stream.fill()
|
||||
let tokens = TokenStreamRewriter(stream)
|
||||
tokens.insertBefore(0, "0")
|
||||
try tokens.replace(0, "x")
|
||||
let result = try tokens.getText()
|
||||
let expecting = "0xbc"
|
||||
XCTAssertEqual(expecting, result)
|
||||
}
|
||||
|
||||
func test2InsertMiddleIndex() throws {
|
||||
let input = ANTLRInputStream("abc")
|
||||
let lexer = LexerA(input)
|
||||
let stream = CommonTokenStream(lexer)
|
||||
try stream.fill()
|
||||
let tokens = TokenStreamRewriter(stream)
|
||||
tokens.insertBefore(1, "x")
|
||||
tokens.insertBefore(1, "y")
|
||||
let result = try tokens.getText()
|
||||
let expecting = "ayxbc"
|
||||
XCTAssertEqual(expecting, result)
|
||||
}
|
||||
|
||||
func test2InsertThenReplaceIndex0() throws {
|
||||
let input = ANTLRInputStream("abc")
|
||||
let lexer = LexerA(input)
|
||||
let stream = CommonTokenStream(lexer)
|
||||
try stream.fill()
|
||||
let tokens = TokenStreamRewriter(stream)
|
||||
tokens.insertBefore(0, "x")
|
||||
tokens.insertBefore(0, "y")
|
||||
try tokens.replace(0, "z")
|
||||
let result = try tokens.getText()
|
||||
let expecting = "yxzbc"
|
||||
XCTAssertEqual(expecting, result)
|
||||
}
|
||||
|
||||
func testReplaceThenInsertBeforeLastIndex() throws {
|
||||
let input = ANTLRInputStream("abc")
|
||||
let lexer = LexerA(input)
|
||||
let stream = CommonTokenStream(lexer)
|
||||
try stream.fill()
|
||||
let tokens = TokenStreamRewriter(stream)
|
||||
try tokens.replace(2, "x")
|
||||
tokens.insertBefore(2, "y")
|
||||
let result = try tokens.getText()
|
||||
let expecting = "abyx"
|
||||
XCTAssertEqual(expecting, result)
|
||||
}
|
||||
|
||||
func testInsertThenReplaceLastIndex() throws {
|
||||
let input = ANTLRInputStream("abc")
|
||||
let lexer = LexerA(input)
|
||||
let stream = CommonTokenStream(lexer)
|
||||
try stream.fill()
|
||||
let tokens = TokenStreamRewriter(stream)
|
||||
tokens.insertBefore(2, "y")
|
||||
try tokens.replace(2, "x")
|
||||
let result = try tokens.getText()
|
||||
let expecting = "abyx"
|
||||
XCTAssertEqual(expecting, result)
|
||||
}
|
||||
|
||||
func testReplaceThenInsertAfterLastIndex() throws {
|
||||
let input = ANTLRInputStream("abc")
|
||||
let lexer = LexerA(input)
|
||||
let stream = CommonTokenStream(lexer)
|
||||
try stream.fill()
|
||||
let tokens = TokenStreamRewriter(stream)
|
||||
try tokens.replace(2, "x")
|
||||
tokens.insertAfter(2, "y")
|
||||
let result = try tokens.getText()
|
||||
let expecting = "abxy"
|
||||
XCTAssertEqual(expecting, result)
|
||||
}
|
||||
|
||||
func testReplaceThenInsertAtLeftEdge() throws {
|
||||
let input = ANTLRInputStream("abcccba")
|
||||
let lexer = LexerA(input)
|
||||
let stream = CommonTokenStream(lexer)
|
||||
try stream.fill()
|
||||
let tokens = TokenStreamRewriter(stream)
|
||||
try tokens.replace(2, 4, "x")
|
||||
tokens.insertBefore(2, "y")
|
||||
let result = try tokens.getText()
|
||||
let expecting = "abyxba"
|
||||
XCTAssertEqual(expecting, result)
|
||||
}
|
||||
|
||||
// TODO: tweak printing
|
||||
func testReplaceRangeThenInsertAtRightEdge() throws {
|
||||
let input = ANTLRInputStream("abcccba")
|
||||
let lexer = LexerA(input)
|
||||
let stream = CommonTokenStream(lexer)
|
||||
try stream.fill()
|
||||
let tokens = TokenStreamRewriter(stream)
|
||||
try tokens.replace(2, 4, "x")
|
||||
tokens.insertBefore(4, "y")
|
||||
|
||||
var err: Error?
|
||||
do {
|
||||
_ = try tokens.getText()
|
||||
} catch ANTLRError.illegalArgument(let msg) {
|
||||
let expecting = "insert op <InsertBeforeOp@[@4,4:4='c',<3>,1:4]:\"y\"> within boundaries of previous <ReplaceOp@[@2,2:2='c',<3>,1:2]..[@4,4:4='c',<3>,1:4]:\"x\">"
|
||||
|
||||
XCTAssertEqual(expecting, msg)
|
||||
}
|
||||
}
|
||||
|
||||
func testReplaceRangeThenInsertAfterRightEdge() throws {
|
||||
let input = ANTLRInputStream("abcccba")
|
||||
let lexer = LexerA(input)
|
||||
let stream = CommonTokenStream(lexer)
|
||||
try stream.fill()
|
||||
let tokens = TokenStreamRewriter(stream)
|
||||
try tokens.replace(2, 4, "x")
|
||||
tokens.insertAfter(4, "y")
|
||||
let result = try tokens.getText()
|
||||
let expecting = "abxyba"
|
||||
XCTAssertEqual(expecting, result)
|
||||
}
|
||||
|
||||
func testReplaceAll() throws {
|
||||
let input = ANTLRInputStream("abcccba")
|
||||
let lexer = LexerA(input)
|
||||
let stream = CommonTokenStream(lexer)
|
||||
try stream.fill()
|
||||
let tokens = TokenStreamRewriter(stream)
|
||||
try tokens.replace(0, 6, "x")
|
||||
let result = try tokens.getText()
|
||||
let expecting = "x"
|
||||
XCTAssertEqual(expecting, result)
|
||||
}
|
||||
|
||||
func testReplaceSubsetThenFetch() throws {
|
||||
let input = ANTLRInputStream("abcccba")
|
||||
let lexer = LexerA(input)
|
||||
let stream = CommonTokenStream(lexer)
|
||||
try stream.fill()
|
||||
let tokens = TokenStreamRewriter(stream)
|
||||
try tokens.replace(2, 4, "xyz")
|
||||
let result = try tokens.getText(Interval.of(0, 6))
|
||||
let expecting = "abxyzba"
|
||||
XCTAssertEqual(expecting, result)
|
||||
}
|
||||
|
||||
// TODO: tweak printing
|
||||
func testReplaceThenReplaceSuperset() throws {
|
||||
let input = ANTLRInputStream("abcccba")
|
||||
let lexer = LexerA(input)
|
||||
let stream = CommonTokenStream(lexer)
|
||||
try stream.fill()
|
||||
let tokens = TokenStreamRewriter(stream)
|
||||
try tokens.replace(2, 4, "xyz")
|
||||
try tokens.replace(3, 5, "foo")
|
||||
|
||||
var err: Error?
|
||||
do {
|
||||
_ = try tokens.getText()
|
||||
} catch ANTLRError.illegalArgument(let msg) {
|
||||
let expecting = "replace op boundaries of <ReplaceOp@[@3,3:3='c',<3>,1:3]..[@5,5:5='b',<2>,1:5]:\"foo\"> overlap with previous <ReplaceOp@[@2,2:2='c',<3>,1:2]..[@4,4:4='c',<3>,1:4]:\"xyz\">"
|
||||
XCTAssertEqual(expecting, msg)
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: tweak printing
|
||||
func testReplaceThenReplaceLowerIndexedSuperset() throws {
|
||||
let input = ANTLRInputStream("abcccba")
|
||||
let lexer = LexerA(input)
|
||||
let stream = CommonTokenStream(lexer)
|
||||
try stream.fill()
|
||||
let tokens = TokenStreamRewriter(stream)
|
||||
try tokens.replace(2, 4, "xyz")
|
||||
try tokens.replace(1, 3, "foo")
|
||||
|
||||
var err: Error?
|
||||
do {
|
||||
_ = try tokens.getText()
|
||||
} catch ANTLRError.illegalArgument(let msg) {
|
||||
let expecting = "replace op boundaries of <ReplaceOp@[@1,1:1='b',<2>,1:1]..[@3,3:3='c',<3>,1:3]:\"foo\"> overlap with previous <ReplaceOp@[@2,2:2='c',<3>,1:2]..[@4,4:4='c',<3>,1:4]:\"xyz\">"
|
||||
XCTAssertEqual(expecting, msg)
|
||||
}
|
||||
}
|
||||
|
||||
func testReplaceSingleMiddleThenOverlappingSuperset() throws {
|
||||
let input = ANTLRInputStream("abcba")
|
||||
let lexer = LexerA(input)
|
||||
let stream = CommonTokenStream(lexer)
|
||||
try stream.fill()
|
||||
let tokens = TokenStreamRewriter(stream)
|
||||
try tokens.replace(2, 2, "xyz")
|
||||
try tokens.replace(0, 3, "foo")
|
||||
let result = try tokens.getText()
|
||||
let expecting = "fooa"
|
||||
XCTAssertEqual(expecting, result)
|
||||
}
|
||||
|
||||
func testCombineInserts() throws {
|
||||
let input = ANTLRInputStream("abc")
|
||||
let lexer = LexerA(input)
|
||||
let stream = CommonTokenStream(lexer)
|
||||
try stream.fill()
|
||||
let tokens = TokenStreamRewriter(stream)
|
||||
tokens.insertBefore(0, "x")
|
||||
tokens.insertBefore(0, "y")
|
||||
let result = try tokens.getText()
|
||||
let expecting = "yxabc"
|
||||
XCTAssertEqual(expecting, result)
|
||||
}
|
||||
|
||||
func testCombine3Inserts() throws {
|
||||
let input = ANTLRInputStream("abc")
|
||||
let lexer = LexerA(input)
|
||||
let stream = CommonTokenStream(lexer)
|
||||
try stream.fill()
|
||||
let tokens = TokenStreamRewriter(stream)
|
||||
tokens.insertBefore(1, "x")
|
||||
tokens.insertBefore(0, "y")
|
||||
tokens.insertBefore(1, "z")
|
||||
let result = try tokens.getText()
|
||||
let expecting = "yazxbc"
|
||||
XCTAssertEqual(expecting, result)
|
||||
}
|
||||
|
||||
func testCombineInsertOnLeftWithReplace() throws {
|
||||
let input = ANTLRInputStream("abc")
|
||||
let lexer = LexerA(input)
|
||||
let stream = CommonTokenStream(lexer)
|
||||
try stream.fill()
|
||||
let tokens = TokenStreamRewriter(stream)
|
||||
// combine with left edge of rewrite
|
||||
try tokens.replace(0, 2, "foo")
|
||||
tokens.insertBefore(0, "z")
|
||||
try stream.fill()
|
||||
let result = try tokens.getText()
|
||||
let expecting = "zfoo"
|
||||
XCTAssertEqual(expecting, result)
|
||||
}
|
||||
|
||||
func testCombineInsertOnLeftWithDelete() throws {
|
||||
let input = ANTLRInputStream("abc")
|
||||
let lexer = LexerA(input)
|
||||
let stream = CommonTokenStream(lexer)
|
||||
try stream.fill()
|
||||
let tokens = TokenStreamRewriter(stream)
|
||||
// combine with left edge of rewrite
|
||||
try tokens.delete(0, 2)
|
||||
tokens.insertBefore(0, "z")
|
||||
try stream.fill()
|
||||
let result = try tokens.getText()
|
||||
let expecting = "z"
|
||||
// make sure combo is not znull
|
||||
try stream.fill()
|
||||
XCTAssertEqual(expecting, result)
|
||||
}
|
||||
|
||||
func testDisjointInserts() throws {
|
||||
let input = ANTLRInputStream("abc")
|
||||
let lexer = LexerA(input)
|
||||
let stream = CommonTokenStream(lexer)
|
||||
try stream.fill()
|
||||
let tokens = TokenStreamRewriter(stream)
|
||||
tokens.insertBefore(1, "x")
|
||||
tokens.insertBefore(2, "y")
|
||||
tokens.insertBefore(0, "z")
|
||||
try stream.fill()
|
||||
let result = try tokens.getText()
|
||||
let expecting = "zaxbyc"
|
||||
XCTAssertEqual(expecting, result)
|
||||
}
|
||||
|
||||
func testOverlappingReplace() throws {
|
||||
let input = ANTLRInputStream("abcc")
|
||||
let lexer = LexerA(input)
|
||||
let stream = CommonTokenStream(lexer)
|
||||
try stream.fill()
|
||||
let tokens = TokenStreamRewriter(stream)
|
||||
try tokens.replace(1, 2, "foo")
|
||||
try tokens.replace(0, 3, "bar")
|
||||
try stream.fill()
|
||||
// wipes prior nested replace
|
||||
let result = try tokens.getText()
|
||||
let expecting = "bar"
|
||||
XCTAssertEqual(expecting, result)
|
||||
}
|
||||
|
||||
func testOverlappingReplace2() throws {
|
||||
let input = ANTLRInputStream("abcc")
|
||||
let lexer = LexerA(input)
|
||||
let stream = CommonTokenStream(lexer)
|
||||
try stream.fill()
|
||||
let tokens = TokenStreamRewriter(stream)
|
||||
try tokens.replace(0, 3, "bar")
|
||||
try tokens.replace(1, 2, "foo")
|
||||
try stream.fill()
|
||||
// cannot split earlier replace
|
||||
|
||||
var err: Error?
|
||||
do {
|
||||
_ = try tokens.getText()
|
||||
} catch ANTLRError.illegalArgument(let msg) {
|
||||
let expecting = "replace op boundaries of <ReplaceOp@[@1,1:1='b',<2>,1:1]..[@2,2:2='c',<3>,1:2]:\"foo\"> overlap with previous <ReplaceOp@[@0,0:0='a',<1>,1:0]..[@3,3:3='c',<3>,1:3]:\"bar\">"
|
||||
XCTAssertEqual(expecting, msg)
|
||||
}
|
||||
}
|
||||
|
||||
func testOverlappingReplace3() throws {
|
||||
let input = ANTLRInputStream("abcc")
|
||||
let lexer = LexerA(input)
|
||||
let stream = CommonTokenStream(lexer)
|
||||
try stream.fill()
|
||||
let tokens = TokenStreamRewriter(stream)
|
||||
try tokens.replace(1, 2, "foo")
|
||||
try tokens.replace(0, 2, "bar")
|
||||
try stream.fill()
|
||||
// wipes prior nested replace
|
||||
let result = try tokens.getText()
|
||||
let expecting = "barc"
|
||||
XCTAssertEqual(expecting, result)
|
||||
}
|
||||
|
||||
func testOverlappingReplace4() throws {
|
||||
let input = ANTLRInputStream("abcc")
|
||||
let lexer = LexerA(input)
|
||||
let stream = CommonTokenStream(lexer)
|
||||
try stream.fill()
|
||||
let tokens = TokenStreamRewriter(stream)
|
||||
try tokens.replace(1, 2, "foo")
|
||||
try tokens.replace(1, 3, "bar")
|
||||
try stream.fill()
|
||||
// wipes prior nested replace
|
||||
let result = try tokens.getText()
|
||||
let expecting = "abar"
|
||||
XCTAssertEqual(expecting, result)
|
||||
}
|
||||
|
||||
func testDropIdenticalReplace() throws {
|
||||
let input = ANTLRInputStream("abcc")
|
||||
let lexer = LexerA(input)
|
||||
let stream = CommonTokenStream(lexer)
|
||||
try stream.fill()
|
||||
let tokens = TokenStreamRewriter(stream)
|
||||
try tokens.replace(1, 2, "foo")
|
||||
try tokens.replace(1, 2, "foo")
|
||||
try stream.fill()
|
||||
// drop previous, identical
|
||||
let result = try tokens.getText()
|
||||
let expecting = "afooc"
|
||||
XCTAssertEqual(expecting, result)
|
||||
}
|
||||
|
||||
func testDropPrevCoveredInsert() throws {
|
||||
let input = ANTLRInputStream("abc")
|
||||
let lexer = LexerA(input)
|
||||
let stream = CommonTokenStream(lexer)
|
||||
try stream.fill()
|
||||
let tokens = TokenStreamRewriter(stream)
|
||||
tokens.insertBefore(1, "foo")
|
||||
try tokens.replace(1, 2, "foo")
|
||||
try stream.fill()
|
||||
// kill prev insert
|
||||
let result = try tokens.getText()
|
||||
let expecting = "afoofoo"
|
||||
XCTAssertEqual(expecting, result)
|
||||
}
|
||||
|
||||
func testLeaveAloneDisjointInsert() throws {
|
||||
let input = ANTLRInputStream("abcc")
|
||||
let lexer = LexerA(input)
|
||||
let stream = CommonTokenStream(lexer)
|
||||
try stream.fill()
|
||||
let tokens = TokenStreamRewriter(stream)
|
||||
tokens.insertBefore(1, "x")
|
||||
try tokens.replace(2, 3, "foo")
|
||||
let result = try tokens.getText()
|
||||
let expecting = "axbfoo"
|
||||
XCTAssertEqual(expecting, result)
|
||||
}
|
||||
|
||||
func testLeaveAloneDisjointInsert2() throws {
|
||||
let input = ANTLRInputStream("abcc")
|
||||
let lexer = LexerA(input)
|
||||
let stream = CommonTokenStream(lexer)
|
||||
try stream.fill()
|
||||
let tokens = TokenStreamRewriter(stream)
|
||||
try tokens.replace(2, 3, "foo")
|
||||
tokens.insertBefore(1, "x")
|
||||
let result = try tokens.getText()
|
||||
let expecting = "axbfoo"
|
||||
XCTAssertEqual(expecting, result)
|
||||
}
|
||||
|
||||
func testInsertBeforeTokenThenDeleteThatToken() throws {
|
||||
let input = ANTLRInputStream("abc")
|
||||
let lexer = LexerA(input)
|
||||
let stream = CommonTokenStream(lexer)
|
||||
try stream.fill()
|
||||
let tokens = TokenStreamRewriter(stream)
|
||||
tokens.insertBefore(2, "y")
|
||||
try tokens.delete(2)
|
||||
let result = try tokens.getText()
|
||||
let expecting = "aby"
|
||||
XCTAssertEqual(expecting, result)
|
||||
}
|
||||
|
||||
// TODO: Test Fix for https://github.com/antlr/antlr4/issues/550
|
||||
func testDistinguishBetweenInsertAfterAndInsertBeforeToPreserverOrder() throws {
|
||||
let input = ANTLRInputStream("aa")
|
||||
let lexer = LexerA(input)
|
||||
let stream = CommonTokenStream(lexer)
|
||||
try stream.fill()
|
||||
let tokens = TokenStreamRewriter(stream)
|
||||
tokens.insertBefore(0, "<b>")
|
||||
tokens.insertAfter(0, "</b>")
|
||||
tokens.insertBefore(1, "<b>")
|
||||
tokens.insertAfter(1, "</b>")
|
||||
let result = try tokens.getText()
|
||||
let expecting = "<b>a</b><b>a</b>" // fails with <b>a<b></b>a</b>"
|
||||
XCTAssertEqual(expecting, result)
|
||||
}
|
||||
|
||||
func testDistinguishBetweenInsertAfterAndInsertBeforeToPreserverOrder2() throws {
|
||||
let input = ANTLRInputStream("aa")
|
||||
let lexer = LexerA(input)
|
||||
let stream = CommonTokenStream(lexer)
|
||||
try stream.fill()
|
||||
let tokens = TokenStreamRewriter(stream)
|
||||
tokens.insertBefore(0, "<p>")
|
||||
tokens.insertBefore(0, "<b>")
|
||||
tokens.insertAfter(0, "</p>")
|
||||
tokens.insertAfter(0, "</b>")
|
||||
tokens.insertBefore(1, "<b>")
|
||||
tokens.insertAfter(1, "</b>")
|
||||
let result = try tokens.getText()
|
||||
let expecting = "<b><p>a</p></b><b>a</b>"
|
||||
XCTAssertEqual(expecting, result)
|
||||
}
|
||||
|
||||
func testPreservesOrderOfContiguousInserts() throws {
|
||||
let input = ANTLRInputStream("ab")
|
||||
let lexer = LexerA(input)
|
||||
let stream = CommonTokenStream(lexer)
|
||||
try stream.fill()
|
||||
let tokens = TokenStreamRewriter(stream)
|
||||
tokens.insertBefore(0, "<p>")
|
||||
tokens.insertBefore(0, "<b>")
|
||||
tokens.insertBefore(0, "<div>")
|
||||
tokens.insertAfter(0, "</p>")
|
||||
tokens.insertAfter(0, "</b>")
|
||||
tokens.insertAfter(0, "</div>")
|
||||
tokens.insertBefore(1, "!")
|
||||
let result = try tokens.getText()
|
||||
let expecting = "<div><b><p>a</p></b></div>!b"
|
||||
XCTAssertEqual(expecting, result)
|
||||
}
|
||||
}
|
|
@ -11,12 +11,12 @@ class TokenStreamTests: XCTestCase {
|
|||
func testBufferedTokenStreamClearFetchEOFWithNewSource() throws {
|
||||
let inputStream1 = ANTLRInputStream("A")
|
||||
let tokenStream = CommonTokenStream(SwiftTestLexer(inputStream1))
|
||||
|
||||
|
||||
try tokenStream.fill();
|
||||
XCTAssertEqual(2, tokenStream.size())
|
||||
XCTAssertEqual(SwiftTestLexer.T__0, try tokenStream.get(0).getType())
|
||||
XCTAssertEqual(Lexer.EOF, try tokenStream.get(1).getType())
|
||||
|
||||
|
||||
let inputStream2 = ANTLRInputStream("AA");
|
||||
tokenStream.setTokenSource(SwiftTestLexer(inputStream2));
|
||||
try tokenStream.fill();
|
||||
|
@ -25,5 +25,5 @@ class TokenStreamTests: XCTestCase {
|
|||
XCTAssertEqual(SwiftTestLexer.T__0, try tokenStream.get(1).getType())
|
||||
XCTAssertEqual(Lexer.EOF, try tokenStream.get(2).getType())
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue