forked from jasder/antlr
Merge remote-tracking branch 'upstream/master'
This commit is contained in:
commit
1b11cbed18
|
@ -395,7 +395,7 @@ public class Antlr4Mojo extends AbstractMojo {
|
|||
String tokensFileName = grammarFile.getName().split("\\.")[0] + ".tokens";
|
||||
File outputFile = new File(outputDirectory, tokensFileName);
|
||||
if ( (! outputFile.exists()) ||
|
||||
outputFile.lastModified() < grammarFile.lastModified() ||
|
||||
outputFile.lastModified() <= grammarFile.lastModified() ||
|
||||
dependencies.isDependencyChanged(grammarFile)) {
|
||||
grammarFilesToProcess.add(grammarFile);
|
||||
}
|
||||
|
@ -412,10 +412,7 @@ public class Antlr4Mojo extends AbstractMojo {
|
|||
// Iterate each grammar file we were given and add it into the tool's list of
|
||||
// grammars to process.
|
||||
for (File grammarFile : grammarFiles) {
|
||||
if (!buildContext.hasDelta(grammarFile)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
buildContext.refresh(grammarFile);
|
||||
buildContext.removeMessages(grammarFile);
|
||||
|
||||
getLog().debug("Grammar file '" + grammarFile.getPath() + "' detected.");
|
||||
|
|
|
@ -216,14 +216,14 @@ class GrammarDependencies {
|
|||
return;
|
||||
|
||||
for (GrammarAST importDecl : grammar.getAllChildrenWithType(ANTLRParser.IMPORT)) {
|
||||
Tree id = importDecl.getFirstChildWithType(ANTLRParser.ID);
|
||||
for (Tree id: importDecl.getAllChildrenWithType(ANTLRParser.ID)) {
|
||||
// missing id is not valid, but we don't want to prevent the root cause from
|
||||
// being reported by the ANTLR tool
|
||||
if (id != null) {
|
||||
String grammarPath = getRelativePath(grammarFile);
|
||||
|
||||
// missing id is not valid, but we don't want to prevent the root cause from
|
||||
// being reported by the ANTLR tool
|
||||
if (id != null) {
|
||||
String grammarPath = getRelativePath(grammarFile);
|
||||
|
||||
graph.addEdge(id.getText() + ".g4", grammarPath);
|
||||
graph.addEdge(id.getText() + ".g4", grammarPath);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -202,6 +202,7 @@ public class Antlr4MojoTest {
|
|||
Path genHello = generatedSources.resolve("test/HelloParser.java");
|
||||
|
||||
Path baseGrammar = antlrDir.resolve("imports/TestBaseLexer.g4");
|
||||
Path baseGrammar2 = antlrDir.resolve("imports/TestBaseLexer2.g4");
|
||||
Path lexerGrammar = antlrDir.resolve("test/TestLexer.g4");
|
||||
Path parserGrammar = antlrDir.resolve("test/TestParser.g4");
|
||||
|
||||
|
@ -222,21 +223,20 @@ public class Antlr4MojoTest {
|
|||
assertTrue(Files.exists(genHello));
|
||||
assertTrue(Files.exists(genTestParser));
|
||||
assertTrue(Files.exists(genTestLexer));
|
||||
byte[] origTestLexerSum = checksum(genTestLexer);
|
||||
byte[] origTestParserSum = checksum(genTestParser);
|
||||
byte[] origHelloSum = checksum(genHello);
|
||||
|
||||
////////////////////////////////////////////////////////////////////////
|
||||
// 2nd - nothing has been modified, no grammars have to be processed
|
||||
////////////////////////////////////////////////////////////////////////
|
||||
|
||||
{
|
||||
byte[] testLexerSum = checksum(genTestLexer);
|
||||
byte[] testParserSum = checksum(genTestParser);
|
||||
byte[] helloSum = checksum(genHello);
|
||||
|
||||
maven.executeMojo(session, project, exec);
|
||||
|
||||
assertTrue(Arrays.equals(testLexerSum, checksum(genTestLexer)));
|
||||
assertTrue(Arrays.equals(testParserSum, checksum(genTestParser)));
|
||||
assertTrue(Arrays.equals(helloSum, checksum(genHello)));
|
||||
assertTrue(Arrays.equals(origTestLexerSum, checksum(genTestLexer)));
|
||||
assertTrue(Arrays.equals(origTestParserSum, checksum(genTestParser)));
|
||||
assertTrue(Arrays.equals(origHelloSum, checksum(genHello)));
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////
|
||||
|
@ -245,50 +245,71 @@ public class Antlr4MojoTest {
|
|||
|
||||
// modify the grammar to make checksum comparison detect a change
|
||||
try(Change change = Change.of(baseGrammar, "DOT: '.' ;")) {
|
||||
byte[] testLexerSum = checksum(genTestLexer);
|
||||
byte[] testParserSum = checksum(genTestParser);
|
||||
byte[] helloSum = checksum(genHello);
|
||||
|
||||
maven.executeMojo(session, project, exec);
|
||||
|
||||
assertFalse(Arrays.equals(testLexerSum, checksum(genTestLexer)));
|
||||
assertFalse(Arrays.equals(testParserSum, checksum(genTestParser)));
|
||||
assertTrue(Arrays.equals(helloSum, checksum(genHello)));
|
||||
assertFalse(Arrays.equals(origTestLexerSum, checksum(genTestLexer)));
|
||||
assertFalse(Arrays.equals(origTestParserSum, checksum(genTestParser)));
|
||||
assertTrue(Arrays.equals(origHelloSum, checksum(genHello)));
|
||||
}
|
||||
// Restore file and confirm it was restored.
|
||||
maven.executeMojo(session, project, exec);
|
||||
assertTrue(Arrays.equals(origTestLexerSum, checksum(genTestLexer)));
|
||||
assertTrue(Arrays.equals(origTestParserSum, checksum(genTestParser)));
|
||||
assertTrue(Arrays.equals(origHelloSum, checksum(genHello)));
|
||||
|
||||
////////////////////////////////////////////////////////////////////////
|
||||
// 4th - the lexer grammar changed, the parser grammar has to be processed as well
|
||||
// 4th - the second imported grammar changed, every dependency has to be processed
|
||||
////////////////////////////////////////////////////////////////////////
|
||||
|
||||
// modify the grammar to make checksum comparison detect a change
|
||||
try(Change change = Change.of(lexerGrammar)) {
|
||||
byte[] testLexerSum = checksum(genTestLexer);
|
||||
byte[] testParserSum = checksum(genTestParser);
|
||||
byte[] helloSum = checksum(genHello);
|
||||
|
||||
try(Change change = Change.of(baseGrammar2, "BANG: '!' ;")) {
|
||||
maven.executeMojo(session, project, exec);
|
||||
|
||||
assertFalse(Arrays.equals(testLexerSum, checksum(genTestLexer)));
|
||||
assertFalse(Arrays.equals(testParserSum, checksum(genTestParser)));
|
||||
assertTrue(Arrays.equals(helloSum, checksum(genHello)));
|
||||
assertFalse(Arrays.equals(origTestLexerSum, checksum(genTestLexer)));
|
||||
assertFalse(Arrays.equals(origTestParserSum, checksum(genTestParser)));
|
||||
assertTrue(Arrays.equals(origHelloSum, checksum(genHello)));
|
||||
}
|
||||
// Restore file and confirm it was restored.
|
||||
maven.executeMojo(session, project, exec);
|
||||
assertTrue(Arrays.equals(origTestLexerSum, checksum(genTestLexer)));
|
||||
assertTrue(Arrays.equals(origTestParserSum, checksum(genTestParser)));
|
||||
assertTrue(Arrays.equals(origHelloSum, checksum(genHello)));
|
||||
|
||||
////////////////////////////////////////////////////////////////////////
|
||||
// 5th - the parser grammar changed, no other grammars have to be processed
|
||||
// 5th - the lexer grammar changed, the parser grammar has to be processed as well
|
||||
////////////////////////////////////////////////////////////////////////
|
||||
|
||||
// modify the grammar to make checksum comparison detect a change
|
||||
try(Change change = Change.of(lexerGrammar, "FOO: 'foo' ;")) {
|
||||
maven.executeMojo(session, project, exec);
|
||||
|
||||
assertFalse(Arrays.equals(origTestLexerSum, checksum(genTestLexer)));
|
||||
assertFalse(Arrays.equals(origTestParserSum, checksum(genTestParser)));
|
||||
assertTrue(Arrays.equals(origHelloSum, checksum(genHello)));
|
||||
}
|
||||
// Restore file and confirm it was restored.
|
||||
maven.executeMojo(session, project, exec);
|
||||
assertTrue(Arrays.equals(origTestLexerSum, checksum(genTestLexer)));
|
||||
assertTrue(Arrays.equals(origTestParserSum, checksum(genTestParser)));
|
||||
assertTrue(Arrays.equals(origHelloSum, checksum(genHello)));
|
||||
|
||||
////////////////////////////////////////////////////////////////////////
|
||||
// 6th - the parser grammar changed, no other grammars have to be processed
|
||||
////////////////////////////////////////////////////////////////////////
|
||||
|
||||
// modify the grammar to make checksum comparison detect a change
|
||||
try(Change change = Change.of(parserGrammar, " t : WS* ;")) {
|
||||
byte[] testLexerSum = checksum(genTestLexer);
|
||||
byte[] testParserSum = checksum(genTestParser);
|
||||
byte[] helloSum = checksum(genHello);
|
||||
|
||||
maven.executeMojo(session, project, exec);
|
||||
|
||||
assertTrue(Arrays.equals(testLexerSum, checksum(genTestLexer)));
|
||||
assertFalse(Arrays.equals(testParserSum, checksum(genTestParser)));
|
||||
assertTrue(Arrays.equals(helloSum, checksum(genHello)));
|
||||
assertTrue(Arrays.equals(origTestLexerSum, checksum(genTestLexer)));
|
||||
assertFalse(Arrays.equals(origTestParserSum, checksum(genTestParser)));
|
||||
assertTrue(Arrays.equals(origHelloSum, checksum(genHello)));
|
||||
}
|
||||
// Restore file and confirm it was restored.
|
||||
maven.executeMojo(session, project, exec);
|
||||
assertTrue(Arrays.equals(origTestLexerSum, checksum(genTestLexer)));
|
||||
assertTrue(Arrays.equals(origTestParserSum, checksum(genTestParser)));
|
||||
assertTrue(Arrays.equals(origHelloSum, checksum(genHello)));
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
|
@ -10,7 +10,4 @@ fragment
|
|||
Whitespace : ' ' | '\n' | '\t' | '\r' ;
|
||||
|
||||
fragment
|
||||
Hexdigit : [a-fA-F0-9] ;
|
||||
|
||||
fragment
|
||||
Digit : [0-9] ;
|
||||
Hexdigit : [a-fA-F0-9] ;
|
|
@ -0,0 +1,4 @@
|
|||
lexer grammar TestBaseLexer2;
|
||||
|
||||
fragment
|
||||
Digit : [0-9] ;
|
|
@ -1,6 +1,6 @@
|
|||
lexer grammar TestLexer;
|
||||
|
||||
import TestBaseLexer;
|
||||
import TestBaseLexer, TestBaseLexer2;
|
||||
|
||||
WS : Whitespace+ -> skip;
|
||||
TEXT : ~[<&]+ ; // match any 16 bit char other than < and &
|
||||
TEXT : ~[<&]+ ; // match any 16 bit char other than < and &
|
||||
|
|
|
@ -151,4 +151,5 @@ YYYY/MM/DD, github id, Full name, email
|
|||
2017/06/11, erikbra, Erik A. Brandstadmoen, erik@brandstadmoen.net
|
||||
2017/06/10, jm-mikkelsen, Jan Martin Mikkelsen, janm@transactionware.com
|
||||
2017/06/25, alimg, Alim Gökkaya, alim.gokkaya@gmail.com
|
||||
2017/07/07, shirou, WAKAYAMA Shirou, shirou.faw@gmail.com
|
||||
2017/07/11, dhalperi, Daniel Halperin, daniel@halper.in
|
||||
2017/07/27, shirou, WAKAYAMA Shirou, shirou.faw@gmail.com
|
||||
|
|
|
@ -91,7 +91,7 @@ using namespace antlr4::misc;
|
|||
|
||||
- (void)testANTLRInputStreamUse {
|
||||
std::string text(u8"🚧Lorem ipsum dolor sit amet🕶");
|
||||
std::u32string wtext = utfConverter.from_bytes(text); // Convert to UTF-32.
|
||||
std::u32string wtext = utf8_to_utf32(text.c_str(), text.c_str() + text.size()); // Convert to UTF-32.
|
||||
ANTLRInputStream stream(text);
|
||||
XCTAssertEqual(stream.index(), 0U);
|
||||
XCTAssertEqual(stream.size(), wtext.size());
|
||||
|
@ -116,8 +116,8 @@ using namespace antlr4::misc;
|
|||
|
||||
XCTAssertEqual(stream.LA(0), 0ULL);
|
||||
for (size_t i = 1; i < wtext.size(); ++i) {
|
||||
XCTAssertEqual(stream.LA((ssize_t)i), wtext[i - 1]); // LA(1) means: current char.
|
||||
XCTAssertEqual(stream.LT((ssize_t)i), wtext[i - 1]); // LT is mapped to LA.
|
||||
XCTAssertEqual(stream.LA(static_cast<ssize_t>(i)), wtext[i - 1]); // LA(1) means: current char.
|
||||
XCTAssertEqual(stream.LT(static_cast<ssize_t>(i)), wtext[i - 1]); // LT is mapped to LA.
|
||||
XCTAssertEqual(stream.index(), 0U); // No consumption when looking ahead.
|
||||
}
|
||||
|
||||
|
@ -128,7 +128,7 @@ using namespace antlr4::misc;
|
|||
XCTAssertEqual(stream.index(), wtext.size() / 2);
|
||||
|
||||
stream.seek(wtext.size() - 1);
|
||||
for (ssize_t i = 1; i < (ssize_t)wtext.size() - 1; ++i) {
|
||||
for (ssize_t i = 1; i < static_cast<ssize_t>(wtext.size()) - 1; ++i) {
|
||||
XCTAssertEqual(stream.LA(-i), wtext[wtext.size() - i - 1]); // LA(-1) means: previous char.
|
||||
XCTAssertEqual(stream.LT(-i), wtext[wtext.size() - i - 1]); // LT is mapped to LA.
|
||||
XCTAssertEqual(stream.index(), wtext.size() - 1); // No consumption when looking ahead.
|
||||
|
@ -150,7 +150,7 @@ using namespace antlr4::misc;
|
|||
|
||||
misc::Interval interval1(2, 10UL); // From - to, inclusive.
|
||||
std::string output = stream.getText(interval1);
|
||||
std::string sub = utfConverter.to_bytes(wtext.substr(2, 9));
|
||||
std::string sub = utf32_to_utf8(wtext.substr(2, 9));
|
||||
XCTAssertEqual(output, sub);
|
||||
|
||||
misc::Interval interval2(200, 10UL); // Start beyond bounds.
|
||||
|
|
|
@ -92,7 +92,7 @@ using namespace antlrcpp;
|
|||
// in a deterministic and a random sequence of 100K values each.
|
||||
std::set<size_t> hashs;
|
||||
for (size_t i = 0; i < 100000; ++i) {
|
||||
std::vector<size_t> data = { i, (size_t)(i * M_PI), arc4random()};
|
||||
std::vector<size_t> data = { i, static_cast<size_t>(i * M_PI), arc4random() };
|
||||
size_t hash = 0;
|
||||
for (auto value : data)
|
||||
hash = MurmurHash::update(hash, value);
|
||||
|
@ -103,7 +103,7 @@ using namespace antlrcpp;
|
|||
|
||||
hashs.clear();
|
||||
for (size_t i = 0; i < 100000; ++i) {
|
||||
std::vector<size_t> data = { i, (size_t)(i * M_PI)};
|
||||
std::vector<size_t> data = { i, static_cast<size_t>(i * M_PI) };
|
||||
size_t hash = 0;
|
||||
for (auto value : data)
|
||||
hash = MurmurHash::update(hash, value);
|
||||
|
@ -232,19 +232,25 @@ using namespace antlrcpp;
|
|||
{ 78, Interval(1000, 1000UL), Interval(20, 100UL), { false, false, true, true, false, true, false, false } },
|
||||
|
||||
// It's possible to add more tests with borders that touch each other (e.g. first starts before/on/after second
|
||||
// and first ends directly before/after second. However, such cases are not handled differently in the Interval class
|
||||
// and first ends directly before/after second. However, such cases are not handled differently in the Interval
|
||||
// class
|
||||
// (only adjacent intervals, where first ends directly before second starts and vice versa. So I ommitted them here.
|
||||
};
|
||||
|
||||
for (auto &entry : testData) {
|
||||
XCTAssert(entry.interval1.startsBeforeDisjoint(entry.interval2) == entry.results[0], @"entry: %zu", entry.runningNumber);
|
||||
XCTAssert(entry.interval1.startsBeforeNonDisjoint(entry.interval2) == entry.results[1], @"entry: %zu", entry.runningNumber);
|
||||
XCTAssert(entry.interval1.startsBeforeDisjoint(entry.interval2) == entry.results[0], @"entry: %zu",
|
||||
entry.runningNumber);
|
||||
XCTAssert(entry.interval1.startsBeforeNonDisjoint(entry.interval2) == entry.results[1], @"entry: %zu",
|
||||
entry.runningNumber);
|
||||
XCTAssert(entry.interval1.startsAfter(entry.interval2) == entry.results[2], @"entry: %zu", entry.runningNumber);
|
||||
XCTAssert(entry.interval1.startsAfterDisjoint(entry.interval2) == entry.results[3], @"entry: %zu", entry.runningNumber);
|
||||
XCTAssert(entry.interval1.startsAfterNonDisjoint(entry.interval2) == entry.results[4], @"entry: %zu", entry.runningNumber);
|
||||
XCTAssert(entry.interval1.startsAfterDisjoint(entry.interval2) == entry.results[3], @"entry: %zu",
|
||||
entry.runningNumber);
|
||||
XCTAssert(entry.interval1.startsAfterNonDisjoint(entry.interval2) == entry.results[4], @"entry: %zu",
|
||||
entry.runningNumber);
|
||||
XCTAssert(entry.interval1.disjoint(entry.interval2) == entry.results[5], @"entry: %zu", entry.runningNumber);
|
||||
XCTAssert(entry.interval1.adjacent(entry.interval2) == entry.results[6], @"entry: %zu", entry.runningNumber);
|
||||
XCTAssert(entry.interval1.properlyContains(entry.interval2) == entry.results[7], @"entry: %zu", entry.runningNumber);
|
||||
XCTAssert(entry.interval1.properlyContains(entry.interval2) == entry.results[7], @"entry: %zu",
|
||||
entry.runningNumber);
|
||||
}
|
||||
|
||||
XCTAssert(Interval().Union(Interval(10, 100UL)) == Interval(-1L, 100));
|
||||
|
@ -327,30 +333,34 @@ using namespace antlrcpp;
|
|||
try {
|
||||
set4.clear();
|
||||
XCTFail(@"Expected exception");
|
||||
}
|
||||
catch (IllegalStateException &e) {
|
||||
} catch (IllegalStateException &e) {
|
||||
}
|
||||
|
||||
try {
|
||||
set4.setReadOnly(false);
|
||||
XCTFail(@"Expected exception");
|
||||
} catch (IllegalStateException &e) {
|
||||
}
|
||||
catch (IllegalStateException &e) {
|
||||
}
|
||||
|
||||
set4 = IntervalSet::of(12345);
|
||||
XCTAssertEqual(set4.getSingleElement(), 12345);
|
||||
XCTAssertEqual(set4.getMinElement(), 12345);
|
||||
XCTAssertEqual(set4.getMaxElement(), 12345);
|
||||
|
||||
IntervalSet set5(10, 5, 10, 15, 20, 25, 30, 35, 40, 45, 50);
|
||||
XCTAssertEqual(set5.getMinElement(), 5);
|
||||
XCTAssertEqual(set5.getMaxElement(), 50);
|
||||
XCTAssertEqual(set5.size(), 10U);
|
||||
set5.add(12, 18);
|
||||
XCTAssertEqual(set5.size(), 16U); // (15, 15) replaced by (12, 18)
|
||||
set5.add(9, 33);
|
||||
XCTAssertEqual(set5.size(), 30U); // (10, 10), (12, 18), (20, 20), (25, 25) and (30, 30) replaced by (9, 33)
|
||||
try {
|
||||
set4 = IntervalSet::of(12345);
|
||||
XCTFail(@"Expected exception");
|
||||
} catch (IllegalStateException &e) {
|
||||
}
|
||||
|
||||
IntervalSet set5 = IntervalSet::of(12345);
|
||||
XCTAssertEqual(set5.getSingleElement(), 12345);
|
||||
XCTAssertEqual(set5.getMinElement(), 12345);
|
||||
XCTAssertEqual(set5.getMaxElement(), 12345);
|
||||
|
||||
IntervalSet set6(10, 5, 10, 15, 20, 25, 30, 35, 40, 45, 50);
|
||||
XCTAssertEqual(set6.getMinElement(), 5);
|
||||
XCTAssertEqual(set6.getMaxElement(), 50);
|
||||
XCTAssertEqual(set6.size(), 10U);
|
||||
set6.add(12, 18);
|
||||
XCTAssertEqual(set6.size(), 16U); // (15, 15) replaced by (12, 18)
|
||||
set6.add(9, 33);
|
||||
XCTAssertEqual(set6.size(), 30U); // (10, 10), (12, 18), (20, 20), (25, 25) and (30, 30) replaced by (9, 33)
|
||||
|
||||
XCTAssert(IntervalSet(3, 1, 2, 10).Or(IntervalSet(3, 1, 2, 5)) == IntervalSet(4, 1, 2, 5, 10));
|
||||
XCTAssert(IntervalSet({ Interval(2, 10UL) }).Or(IntervalSet({ Interval(5, 8UL) })) == IntervalSet({ Interval(2, 10UL) }));
|
||||
|
@ -358,8 +368,10 @@ using namespace antlrcpp;
|
|||
XCTAssert(IntervalSet::of(1, 10).complement(IntervalSet::of(7, 55)) == IntervalSet::of(11, 55));
|
||||
XCTAssert(IntervalSet::of(1, 10).complement(IntervalSet::of(20, 55)) == IntervalSet::of(20, 55));
|
||||
XCTAssert(IntervalSet::of(1, 10).complement(IntervalSet::of(5, 6)) == IntervalSet::EMPTY_SET);
|
||||
XCTAssert(IntervalSet::of(15, 20).complement(IntervalSet::of(7, 55)) == IntervalSet({ Interval(7, 14UL), Interval(21, 55UL) }));
|
||||
XCTAssert(IntervalSet({ Interval(1, 10UL), Interval(30, 35UL) }).complement(IntervalSet::of(7, 55)) == IntervalSet({ Interval(11, 29UL), Interval(36, 55UL) }));
|
||||
XCTAssert(IntervalSet::of(15, 20).complement(IntervalSet::of(7, 55)) ==
|
||||
IntervalSet({ Interval(7, 14UL), Interval(21, 55UL) }));
|
||||
XCTAssert(IntervalSet({ Interval(1, 10UL), Interval(30, 35UL) }).complement(IntervalSet::of(7, 55)) ==
|
||||
IntervalSet({ Interval(11, 29UL), Interval(36, 55UL) }));
|
||||
|
||||
XCTAssert(IntervalSet::of(1, 10).And(IntervalSet::of(7, 55)) == IntervalSet::of(7, 10));
|
||||
XCTAssert(IntervalSet::of(1, 10).And(IntervalSet::of(20, 55)) == IntervalSet::EMPTY_SET);
|
||||
|
@ -368,7 +380,8 @@ using namespace antlrcpp;
|
|||
|
||||
XCTAssert(IntervalSet::of(1, 10).subtract(IntervalSet::of(7, 55)) == IntervalSet::of(1, 6));
|
||||
XCTAssert(IntervalSet::of(1, 10).subtract(IntervalSet::of(20, 55)) == IntervalSet::of(1, 10));
|
||||
XCTAssert(IntervalSet::of(1, 10).subtract(IntervalSet::of(5, 6)) == IntervalSet({ Interval(1, 4UL), Interval(7, 10UL) }));
|
||||
XCTAssert(IntervalSet::of(1, 10).subtract(IntervalSet::of(5, 6)) ==
|
||||
IntervalSet({ Interval(1, 4UL), Interval(7, 10UL) }));
|
||||
XCTAssert(IntervalSet::of(15, 20).subtract(IntervalSet::of(7, 55)) == IntervalSet::EMPTY_SET);
|
||||
}
|
||||
|
||||
|
|
|
@ -12,7 +12,8 @@ rem Headers
|
|||
xcopy runtime\src\*.h antlr4-runtime\ /s
|
||||
|
||||
rem Binaries
|
||||
if exist "C:\Program Files (x86)\Microsoft Visual Studio 12.0\Common7\Tools\VsDevCmd.bat" (
|
||||
rem VS 2013 disabled by default. Change the X to a C to enable it.
|
||||
if exist "X:\Program Files (x86)\Microsoft Visual Studio 12.0\Common7\Tools\VsDevCmd.bat" (
|
||||
call "C:\Program Files (x86)\Microsoft Visual Studio 12.0\Common7\Tools\VsDevCmd.bat"
|
||||
|
||||
pushd runtime
|
||||
|
|
|
@ -321,6 +321,8 @@
|
|||
</Link>
|
||||
</ItemDefinitionGroup>
|
||||
<ItemGroup>
|
||||
<ClCompile Include="src\ANTLRErrorListener.cpp" />
|
||||
<ClCompile Include="src\ANTLRErrorStrategy.cpp" />
|
||||
<ClCompile Include="src\ANTLRFileStream.cpp" />
|
||||
<ClCompile Include="src\ANTLRInputStream.cpp" />
|
||||
<ClCompile Include="src\atn\AbstractPredicateTransition.cpp" />
|
||||
|
@ -339,6 +341,7 @@
|
|||
<ClCompile Include="src\atn\BasicBlockStartState.cpp" />
|
||||
<ClCompile Include="src\atn\BasicState.cpp" />
|
||||
<ClCompile Include="src\atn\BlockEndState.cpp" />
|
||||
<ClCompile Include="src\atn\BlockStartState.cpp" />
|
||||
<ClCompile Include="src\atn\ContextSensitivityInfo.cpp" />
|
||||
<ClCompile Include="src\atn\DecisionEventInfo.cpp" />
|
||||
<ClCompile Include="src\atn\DecisionInfo.cpp" />
|
||||
|
@ -346,6 +349,7 @@
|
|||
<ClCompile Include="src\atn\EmptyPredictionContext.cpp" />
|
||||
<ClCompile Include="src\atn\EpsilonTransition.cpp" />
|
||||
<ClCompile Include="src\atn\ErrorInfo.cpp" />
|
||||
<ClCompile Include="src\atn\LexerAction.cpp" />
|
||||
<ClCompile Include="src\atn\LexerActionExecutor.cpp" />
|
||||
<ClCompile Include="src\atn\LexerATNConfig.cpp" />
|
||||
<ClCompile Include="src\atn\LexerATNSimulator.cpp" />
|
||||
|
@ -412,6 +416,7 @@
|
|||
<ClCompile Include="src\misc\Interval.cpp" />
|
||||
<ClCompile Include="src\misc\IntervalSet.cpp" />
|
||||
<ClCompile Include="src\misc\MurmurHash.cpp" />
|
||||
<ClCompile Include="src\misc\Predicate.cpp" />
|
||||
<ClCompile Include="src\NoViableAltException.cpp" />
|
||||
<ClCompile Include="src\Parser.cpp" />
|
||||
<ClCompile Include="src\ParserInterpreter.cpp" />
|
||||
|
@ -422,16 +427,23 @@
|
|||
<ClCompile Include="src\RuleContext.cpp" />
|
||||
<ClCompile Include="src\RuleContextWithAltNum.cpp" />
|
||||
<ClCompile Include="src\RuntimeMetaData.cpp" />
|
||||
<ClCompile Include="src\support\Any.cpp" />
|
||||
<ClCompile Include="src\support\Arrays.cpp" />
|
||||
<ClCompile Include="src\support\CPPUtils.cpp" />
|
||||
<ClCompile Include="src\support\guid.cpp" />
|
||||
<ClCompile Include="src\support\StringUtils.cpp" />
|
||||
<ClCompile Include="src\Token.cpp" />
|
||||
<ClCompile Include="src\TokenSource.cpp" />
|
||||
<ClCompile Include="src\TokenStream.cpp" />
|
||||
<ClCompile Include="src\TokenStreamRewriter.cpp" />
|
||||
<ClCompile Include="src\tree\ErrorNode.cpp" />
|
||||
<ClCompile Include="src\tree\ErrorNodeImpl.cpp" />
|
||||
<ClCompile Include="src\tree\IterativeParseTreeWalker.cpp" />
|
||||
<ClCompile Include="src\tree\ParseTree.cpp" />
|
||||
<ClCompile Include="src\tree\ParseTreeListener.cpp" />
|
||||
<ClCompile Include="src\tree\ParseTreeVisitor.cpp" />
|
||||
<ClCompile Include="src\tree\ParseTreeWalker.cpp" />
|
||||
<ClCompile Include="src\tree\pattern\Chunk.cpp" />
|
||||
<ClCompile Include="src\tree\pattern\ParseTreeMatch.cpp" />
|
||||
<ClCompile Include="src\tree\pattern\ParseTreePattern.cpp" />
|
||||
<ClCompile Include="src\tree\pattern\ParseTreePatternMatcher.cpp" />
|
||||
|
@ -439,6 +451,7 @@
|
|||
<ClCompile Include="src\tree\pattern\TagChunk.cpp" />
|
||||
<ClCompile Include="src\tree\pattern\TextChunk.cpp" />
|
||||
<ClCompile Include="src\tree\pattern\TokenTagToken.cpp" />
|
||||
<ClCompile Include="src\tree\TerminalNode.cpp" />
|
||||
<ClCompile Include="src\tree\TerminalNodeImpl.cpp" />
|
||||
<ClCompile Include="src\tree\Trees.cpp" />
|
||||
<ClCompile Include="src\tree\xpath\XPath.cpp" />
|
||||
|
@ -454,6 +467,7 @@
|
|||
<ClCompile Include="src\UnbufferedCharStream.cpp" />
|
||||
<ClCompile Include="src\UnbufferedTokenStream.cpp" />
|
||||
<ClCompile Include="src\Vocabulary.cpp" />
|
||||
<ClCompile Include="src\WritableToken.cpp" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<ClInclude Include="src\antlr4-common.h" />
|
||||
|
@ -620,4 +634,4 @@
|
|||
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" />
|
||||
<ImportGroup Label="ExtensionTargets">
|
||||
</ImportGroup>
|
||||
</Project>
|
||||
</Project>
|
|
@ -938,5 +938,47 @@
|
|||
<ClCompile Include="src\tree\IterativeParseTreeWalker.cpp">
|
||||
<Filter>Source Files\tree</Filter>
|
||||
</ClCompile>
|
||||
<ClCompile Include="src\ANTLRErrorListener.cpp">
|
||||
<Filter>Source Files</Filter>
|
||||
</ClCompile>
|
||||
<ClCompile Include="src\ANTLRErrorStrategy.cpp">
|
||||
<Filter>Source Files</Filter>
|
||||
</ClCompile>
|
||||
<ClCompile Include="src\Token.cpp">
|
||||
<Filter>Source Files</Filter>
|
||||
</ClCompile>
|
||||
<ClCompile Include="src\TokenSource.cpp">
|
||||
<Filter>Source Files</Filter>
|
||||
</ClCompile>
|
||||
<ClCompile Include="src\WritableToken.cpp">
|
||||
<Filter>Source Files</Filter>
|
||||
</ClCompile>
|
||||
<ClCompile Include="src\tree\ErrorNode.cpp">
|
||||
<Filter>Source Files\tree</Filter>
|
||||
</ClCompile>
|
||||
<ClCompile Include="src\tree\ParseTreeListener.cpp">
|
||||
<Filter>Source Files\tree</Filter>
|
||||
</ClCompile>
|
||||
<ClCompile Include="src\tree\ParseTreeVisitor.cpp">
|
||||
<Filter>Source Files\tree</Filter>
|
||||
</ClCompile>
|
||||
<ClCompile Include="src\tree\TerminalNode.cpp">
|
||||
<Filter>Source Files\tree</Filter>
|
||||
</ClCompile>
|
||||
<ClCompile Include="src\support\Any.cpp">
|
||||
<Filter>Source Files\support</Filter>
|
||||
</ClCompile>
|
||||
<ClCompile Include="src\atn\BlockStartState.cpp">
|
||||
<Filter>Source Files\atn</Filter>
|
||||
</ClCompile>
|
||||
<ClCompile Include="src\atn\LexerAction.cpp">
|
||||
<Filter>Source Files\atn</Filter>
|
||||
</ClCompile>
|
||||
<ClCompile Include="src\tree\pattern\Chunk.cpp">
|
||||
<Filter>Source Files\tree\pattern</Filter>
|
||||
</ClCompile>
|
||||
<ClCompile Include="src\misc\Predicate.cpp">
|
||||
<Filter>Source Files\misc</Filter>
|
||||
</ClCompile>
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
</Project>
|
|
@ -334,6 +334,8 @@
|
|||
</Link>
|
||||
</ItemDefinitionGroup>
|
||||
<ItemGroup>
|
||||
<ClCompile Include="src\ANTLRErrorListener.cpp" />
|
||||
<ClCompile Include="src\ANTLRErrorStrategy.cpp" />
|
||||
<ClCompile Include="src\ANTLRFileStream.cpp" />
|
||||
<ClCompile Include="src\ANTLRInputStream.cpp" />
|
||||
<ClCompile Include="src\atn\AbstractPredicateTransition.cpp" />
|
||||
|
@ -352,6 +354,7 @@
|
|||
<ClCompile Include="src\atn\BasicBlockStartState.cpp" />
|
||||
<ClCompile Include="src\atn\BasicState.cpp" />
|
||||
<ClCompile Include="src\atn\BlockEndState.cpp" />
|
||||
<ClCompile Include="src\atn\BlockStartState.cpp" />
|
||||
<ClCompile Include="src\atn\ContextSensitivityInfo.cpp" />
|
||||
<ClCompile Include="src\atn\DecisionEventInfo.cpp" />
|
||||
<ClCompile Include="src\atn\DecisionInfo.cpp" />
|
||||
|
@ -359,6 +362,7 @@
|
|||
<ClCompile Include="src\atn\EmptyPredictionContext.cpp" />
|
||||
<ClCompile Include="src\atn\EpsilonTransition.cpp" />
|
||||
<ClCompile Include="src\atn\ErrorInfo.cpp" />
|
||||
<ClCompile Include="src\atn\LexerAction.cpp" />
|
||||
<ClCompile Include="src\atn\LexerActionExecutor.cpp" />
|
||||
<ClCompile Include="src\atn\LexerATNConfig.cpp" />
|
||||
<ClCompile Include="src\atn\LexerATNSimulator.cpp" />
|
||||
|
@ -425,6 +429,7 @@
|
|||
<ClCompile Include="src\misc\Interval.cpp" />
|
||||
<ClCompile Include="src\misc\IntervalSet.cpp" />
|
||||
<ClCompile Include="src\misc\MurmurHash.cpp" />
|
||||
<ClCompile Include="src\misc\Predicate.cpp" />
|
||||
<ClCompile Include="src\NoViableAltException.cpp" />
|
||||
<ClCompile Include="src\Parser.cpp" />
|
||||
<ClCompile Include="src\ParserInterpreter.cpp" />
|
||||
|
@ -435,16 +440,23 @@
|
|||
<ClCompile Include="src\RuleContext.cpp" />
|
||||
<ClCompile Include="src\RuleContextWithAltNum.cpp" />
|
||||
<ClCompile Include="src\RuntimeMetaData.cpp" />
|
||||
<ClCompile Include="src\support\Any.cpp" />
|
||||
<ClCompile Include="src\support\Arrays.cpp" />
|
||||
<ClCompile Include="src\support\CPPUtils.cpp" />
|
||||
<ClCompile Include="src\support\guid.cpp" />
|
||||
<ClCompile Include="src\support\StringUtils.cpp" />
|
||||
<ClCompile Include="src\Token.cpp" />
|
||||
<ClCompile Include="src\TokenSource.cpp" />
|
||||
<ClCompile Include="src\TokenStream.cpp" />
|
||||
<ClCompile Include="src\TokenStreamRewriter.cpp" />
|
||||
<ClCompile Include="src\tree\ErrorNode.cpp" />
|
||||
<ClCompile Include="src\tree\ErrorNodeImpl.cpp" />
|
||||
<ClCompile Include="src\tree\IterativeParseTreeWalker.cpp" />
|
||||
<ClCompile Include="src\tree\ParseTree.cpp" />
|
||||
<ClCompile Include="src\tree\ParseTreeListener.cpp" />
|
||||
<ClCompile Include="src\tree\ParseTreeVisitor.cpp" />
|
||||
<ClCompile Include="src\tree\ParseTreeWalker.cpp" />
|
||||
<ClCompile Include="src\tree\pattern\Chunk.cpp" />
|
||||
<ClCompile Include="src\tree\pattern\ParseTreeMatch.cpp" />
|
||||
<ClCompile Include="src\tree\pattern\ParseTreePattern.cpp" />
|
||||
<ClCompile Include="src\tree\pattern\ParseTreePatternMatcher.cpp" />
|
||||
|
@ -452,6 +464,7 @@
|
|||
<ClCompile Include="src\tree\pattern\TagChunk.cpp" />
|
||||
<ClCompile Include="src\tree\pattern\TextChunk.cpp" />
|
||||
<ClCompile Include="src\tree\pattern\TokenTagToken.cpp" />
|
||||
<ClCompile Include="src\tree\TerminalNode.cpp" />
|
||||
<ClCompile Include="src\tree\TerminalNodeImpl.cpp" />
|
||||
<ClCompile Include="src\tree\Trees.cpp" />
|
||||
<ClCompile Include="src\tree\xpath\XPath.cpp" />
|
||||
|
@ -467,6 +480,7 @@
|
|||
<ClCompile Include="src\UnbufferedCharStream.cpp" />
|
||||
<ClCompile Include="src\UnbufferedTokenStream.cpp" />
|
||||
<ClCompile Include="src\Vocabulary.cpp" />
|
||||
<ClCompile Include="src\WritableToken.cpp" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<ClInclude Include="src\antlr4-common.h" />
|
||||
|
@ -633,4 +647,4 @@
|
|||
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" />
|
||||
<ImportGroup Label="ExtensionTargets">
|
||||
</ImportGroup>
|
||||
</Project>
|
||||
</Project>
|
|
@ -938,5 +938,47 @@
|
|||
<ClCompile Include="src\tree\IterativeParseTreeWalker.cpp">
|
||||
<Filter>Source Files\tree</Filter>
|
||||
</ClCompile>
|
||||
<ClCompile Include="src\ANTLRErrorListener.cpp">
|
||||
<Filter>Source Files</Filter>
|
||||
</ClCompile>
|
||||
<ClCompile Include="src\ANTLRErrorStrategy.cpp">
|
||||
<Filter>Source Files</Filter>
|
||||
</ClCompile>
|
||||
<ClCompile Include="src\atn\BlockStartState.cpp">
|
||||
<Filter>Source Files\atn</Filter>
|
||||
</ClCompile>
|
||||
<ClCompile Include="src\atn\LexerAction.cpp">
|
||||
<Filter>Source Files\atn</Filter>
|
||||
</ClCompile>
|
||||
<ClCompile Include="src\misc\Predicate.cpp">
|
||||
<Filter>Source Files\misc</Filter>
|
||||
</ClCompile>
|
||||
<ClCompile Include="src\Token.cpp">
|
||||
<Filter>Source Files</Filter>
|
||||
</ClCompile>
|
||||
<ClCompile Include="src\TokenSource.cpp">
|
||||
<Filter>Source Files</Filter>
|
||||
</ClCompile>
|
||||
<ClCompile Include="src\WritableToken.cpp">
|
||||
<Filter>Source Files</Filter>
|
||||
</ClCompile>
|
||||
<ClCompile Include="src\support\Any.cpp">
|
||||
<Filter>Source Files\support</Filter>
|
||||
</ClCompile>
|
||||
<ClCompile Include="src\tree\ErrorNode.cpp">
|
||||
<Filter>Source Files\tree</Filter>
|
||||
</ClCompile>
|
||||
<ClCompile Include="src\tree\ParseTreeListener.cpp">
|
||||
<Filter>Source Files\tree</Filter>
|
||||
</ClCompile>
|
||||
<ClCompile Include="src\tree\ParseTreeVisitor.cpp">
|
||||
<Filter>Source Files\tree</Filter>
|
||||
</ClCompile>
|
||||
<ClCompile Include="src\tree\TerminalNode.cpp">
|
||||
<Filter>Source Files\tree</Filter>
|
||||
</ClCompile>
|
||||
<ClCompile Include="src\tree\pattern\Chunk.cpp">
|
||||
<Filter>Source Files\tree\pattern</Filter>
|
||||
</ClCompile>
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
</Project>
|
|
@ -534,9 +534,6 @@
|
|||
276E5F411CDB57AA003FF4B4 /* IntStream.h in Headers */ = {isa = PBXBuildFile; fileRef = 276E5CBF1CDB57AA003FF4B4 /* IntStream.h */; };
|
||||
276E5F421CDB57AA003FF4B4 /* IntStream.h in Headers */ = {isa = PBXBuildFile; fileRef = 276E5CBF1CDB57AA003FF4B4 /* IntStream.h */; };
|
||||
276E5F431CDB57AA003FF4B4 /* IntStream.h in Headers */ = {isa = PBXBuildFile; fileRef = 276E5CBF1CDB57AA003FF4B4 /* IntStream.h */; settings = {ATTRIBUTES = (Public, ); }; };
|
||||
276E5F441CDB57AA003FF4B4 /* IRecognizer.h in Headers */ = {isa = PBXBuildFile; fileRef = 276E5CC01CDB57AA003FF4B4 /* IRecognizer.h */; };
|
||||
276E5F451CDB57AA003FF4B4 /* IRecognizer.h in Headers */ = {isa = PBXBuildFile; fileRef = 276E5CC01CDB57AA003FF4B4 /* IRecognizer.h */; };
|
||||
276E5F461CDB57AA003FF4B4 /* IRecognizer.h in Headers */ = {isa = PBXBuildFile; fileRef = 276E5CC01CDB57AA003FF4B4 /* IRecognizer.h */; settings = {ATTRIBUTES = (Public, ); }; };
|
||||
276E5F471CDB57AA003FF4B4 /* Lexer.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 276E5CC11CDB57AA003FF4B4 /* Lexer.cpp */; };
|
||||
276E5F481CDB57AA003FF4B4 /* Lexer.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 276E5CC11CDB57AA003FF4B4 /* Lexer.cpp */; };
|
||||
276E5F491CDB57AA003FF4B4 /* Lexer.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 276E5CC11CDB57AA003FF4B4 /* Lexer.cpp */; };
|
||||
|
@ -800,6 +797,45 @@
|
|||
27745F081CE49C000067C6A3 /* RuntimeMetaData.h in Headers */ = {isa = PBXBuildFile; fileRef = 27745EFC1CE49C000067C6A3 /* RuntimeMetaData.h */; };
|
||||
27874F1E1CCB7A0700AF1C53 /* CoreFoundation.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 27874F1D1CCB7A0700AF1C53 /* CoreFoundation.framework */; };
|
||||
27874F211CCB7B1700AF1C53 /* CoreFoundation.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 27874F1D1CCB7A0700AF1C53 /* CoreFoundation.framework */; };
|
||||
2793DC851F08083F00A84290 /* TokenSource.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 2793DC841F08083F00A84290 /* TokenSource.cpp */; };
|
||||
2793DC861F08083F00A84290 /* TokenSource.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 2793DC841F08083F00A84290 /* TokenSource.cpp */; };
|
||||
2793DC871F08083F00A84290 /* TokenSource.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 2793DC841F08083F00A84290 /* TokenSource.cpp */; };
|
||||
2793DC891F08087500A84290 /* Chunk.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 2793DC881F08087500A84290 /* Chunk.cpp */; };
|
||||
2793DC8A1F08087500A84290 /* Chunk.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 2793DC881F08087500A84290 /* Chunk.cpp */; };
|
||||
2793DC8B1F08087500A84290 /* Chunk.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 2793DC881F08087500A84290 /* Chunk.cpp */; };
|
||||
2793DC8D1F08088F00A84290 /* ParseTreeListener.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 2793DC8C1F08088F00A84290 /* ParseTreeListener.cpp */; };
|
||||
2793DC8E1F08088F00A84290 /* ParseTreeListener.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 2793DC8C1F08088F00A84290 /* ParseTreeListener.cpp */; };
|
||||
2793DC8F1F08088F00A84290 /* ParseTreeListener.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 2793DC8C1F08088F00A84290 /* ParseTreeListener.cpp */; };
|
||||
2793DC911F0808A200A84290 /* TerminalNode.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 2793DC901F0808A200A84290 /* TerminalNode.cpp */; };
|
||||
2793DC921F0808A200A84290 /* TerminalNode.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 2793DC901F0808A200A84290 /* TerminalNode.cpp */; };
|
||||
2793DC931F0808A200A84290 /* TerminalNode.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 2793DC901F0808A200A84290 /* TerminalNode.cpp */; };
|
||||
2793DC961F0808E100A84290 /* ErrorNode.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 2793DC941F0808E100A84290 /* ErrorNode.cpp */; };
|
||||
2793DC971F0808E100A84290 /* ErrorNode.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 2793DC941F0808E100A84290 /* ErrorNode.cpp */; };
|
||||
2793DC981F0808E100A84290 /* ErrorNode.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 2793DC941F0808E100A84290 /* ErrorNode.cpp */; };
|
||||
2793DC991F0808E100A84290 /* ParseTreeVisitor.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 2793DC951F0808E100A84290 /* ParseTreeVisitor.cpp */; };
|
||||
2793DC9A1F0808E100A84290 /* ParseTreeVisitor.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 2793DC951F0808E100A84290 /* ParseTreeVisitor.cpp */; };
|
||||
2793DC9B1F0808E100A84290 /* ParseTreeVisitor.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 2793DC951F0808E100A84290 /* ParseTreeVisitor.cpp */; };
|
||||
2793DC9D1F08090D00A84290 /* Any.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 2793DC9C1F08090D00A84290 /* Any.cpp */; };
|
||||
2793DC9E1F08090D00A84290 /* Any.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 2793DC9C1F08090D00A84290 /* Any.cpp */; };
|
||||
2793DC9F1F08090D00A84290 /* Any.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 2793DC9C1F08090D00A84290 /* Any.cpp */; };
|
||||
2793DCA41F08095F00A84290 /* ANTLRErrorListener.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 2793DCA01F08095F00A84290 /* ANTLRErrorListener.cpp */; };
|
||||
2793DCA51F08095F00A84290 /* ANTLRErrorListener.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 2793DCA01F08095F00A84290 /* ANTLRErrorListener.cpp */; };
|
||||
2793DCA61F08095F00A84290 /* ANTLRErrorListener.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 2793DCA01F08095F00A84290 /* ANTLRErrorListener.cpp */; };
|
||||
2793DCA71F08095F00A84290 /* ANTLRErrorStrategy.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 2793DCA11F08095F00A84290 /* ANTLRErrorStrategy.cpp */; };
|
||||
2793DCA81F08095F00A84290 /* ANTLRErrorStrategy.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 2793DCA11F08095F00A84290 /* ANTLRErrorStrategy.cpp */; };
|
||||
2793DCA91F08095F00A84290 /* ANTLRErrorStrategy.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 2793DCA11F08095F00A84290 /* ANTLRErrorStrategy.cpp */; };
|
||||
2793DCAA1F08095F00A84290 /* Token.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 2793DCA21F08095F00A84290 /* Token.cpp */; };
|
||||
2793DCAB1F08095F00A84290 /* Token.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 2793DCA21F08095F00A84290 /* Token.cpp */; };
|
||||
2793DCAC1F08095F00A84290 /* Token.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 2793DCA21F08095F00A84290 /* Token.cpp */; };
|
||||
2793DCAD1F08095F00A84290 /* WritableToken.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 2793DCA31F08095F00A84290 /* WritableToken.cpp */; };
|
||||
2793DCAE1F08095F00A84290 /* WritableToken.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 2793DCA31F08095F00A84290 /* WritableToken.cpp */; };
|
||||
2793DCAF1F08095F00A84290 /* WritableToken.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 2793DCA31F08095F00A84290 /* WritableToken.cpp */; };
|
||||
2793DCB31F08099C00A84290 /* BlockStartState.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 2793DCB01F08099C00A84290 /* BlockStartState.cpp */; };
|
||||
2793DCB41F08099C00A84290 /* BlockStartState.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 2793DCB01F08099C00A84290 /* BlockStartState.cpp */; };
|
||||
2793DCB51F08099C00A84290 /* BlockStartState.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 2793DCB01F08099C00A84290 /* BlockStartState.cpp */; };
|
||||
2793DCB61F08099C00A84290 /* LexerAction.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 2793DCB11F08099C00A84290 /* LexerAction.cpp */; };
|
||||
2793DCB71F08099C00A84290 /* LexerAction.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 2793DCB11F08099C00A84290 /* LexerAction.cpp */; };
|
||||
2793DCB81F08099C00A84290 /* LexerAction.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 2793DCB11F08099C00A84290 /* LexerAction.cpp */; };
|
||||
2794D8561CE7821B00FADD0F /* antlr4-common.h in Headers */ = {isa = PBXBuildFile; fileRef = 2794D8551CE7821B00FADD0F /* antlr4-common.h */; };
|
||||
2794D8571CE7821B00FADD0F /* antlr4-common.h in Headers */ = {isa = PBXBuildFile; fileRef = 2794D8551CE7821B00FADD0F /* antlr4-common.h */; };
|
||||
2794D8581CE7821B00FADD0F /* antlr4-common.h in Headers */ = {isa = PBXBuildFile; fileRef = 2794D8551CE7821B00FADD0F /* antlr4-common.h */; };
|
||||
|
@ -1061,7 +1097,6 @@
|
|||
276E5CBD1CDB57AA003FF4B4 /* InterpreterRuleContext.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = InterpreterRuleContext.h; sourceTree = "<group>"; wrapsLines = 0; };
|
||||
276E5CBE1CDB57AA003FF4B4 /* IntStream.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = IntStream.cpp; sourceTree = "<group>"; };
|
||||
276E5CBF1CDB57AA003FF4B4 /* IntStream.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = IntStream.h; sourceTree = "<group>"; };
|
||||
276E5CC01CDB57AA003FF4B4 /* IRecognizer.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = IRecognizer.h; sourceTree = "<group>"; };
|
||||
276E5CC11CDB57AA003FF4B4 /* Lexer.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = Lexer.cpp; sourceTree = "<group>"; wrapsLines = 0; };
|
||||
276E5CC21CDB57AA003FF4B4 /* Lexer.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = Lexer.h; sourceTree = "<group>"; };
|
||||
276E5CC31CDB57AA003FF4B4 /* LexerInterpreter.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = LexerInterpreter.cpp; sourceTree = "<group>"; wrapsLines = 0; };
|
||||
|
@ -1152,6 +1187,19 @@
|
|||
27874F1D1CCB7A0700AF1C53 /* CoreFoundation.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreFoundation.framework; path = System/Library/Frameworks/CoreFoundation.framework; sourceTree = SDKROOT; };
|
||||
278E313E1D9D6534001C28F9 /* Tests.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = Tests.m; sourceTree = "<group>"; };
|
||||
278E31401D9D6534001C28F9 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = "<group>"; };
|
||||
2793DC841F08083F00A84290 /* TokenSource.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = TokenSource.cpp; sourceTree = "<group>"; };
|
||||
2793DC881F08087500A84290 /* Chunk.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = Chunk.cpp; sourceTree = "<group>"; };
|
||||
2793DC8C1F08088F00A84290 /* ParseTreeListener.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = ParseTreeListener.cpp; sourceTree = "<group>"; };
|
||||
2793DC901F0808A200A84290 /* TerminalNode.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = TerminalNode.cpp; sourceTree = "<group>"; };
|
||||
2793DC941F0808E100A84290 /* ErrorNode.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = ErrorNode.cpp; sourceTree = "<group>"; };
|
||||
2793DC951F0808E100A84290 /* ParseTreeVisitor.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = ParseTreeVisitor.cpp; sourceTree = "<group>"; };
|
||||
2793DC9C1F08090D00A84290 /* Any.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = Any.cpp; sourceTree = "<group>"; };
|
||||
2793DCA01F08095F00A84290 /* ANTLRErrorListener.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = ANTLRErrorListener.cpp; sourceTree = "<group>"; };
|
||||
2793DCA11F08095F00A84290 /* ANTLRErrorStrategy.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = ANTLRErrorStrategy.cpp; sourceTree = "<group>"; };
|
||||
2793DCA21F08095F00A84290 /* Token.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = Token.cpp; sourceTree = "<group>"; };
|
||||
2793DCA31F08095F00A84290 /* WritableToken.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = WritableToken.cpp; sourceTree = "<group>"; };
|
||||
2793DCB01F08099C00A84290 /* BlockStartState.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = BlockStartState.cpp; sourceTree = "<group>"; };
|
||||
2793DCB11F08099C00A84290 /* LexerAction.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = LexerAction.cpp; sourceTree = "<group>"; };
|
||||
2794D8551CE7821B00FADD0F /* antlr4-common.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = "antlr4-common.h"; sourceTree = "<group>"; };
|
||||
27AC52CF1CE773A80093AAAB /* antlr4-runtime.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = "antlr4-runtime.h"; sourceTree = "<group>"; };
|
||||
27B36AC41DACE7AF0069C868 /* RuleContextWithAltNum.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = RuleContextWithAltNum.cpp; sourceTree = "<group>"; };
|
||||
|
@ -1230,7 +1278,9 @@
|
|||
276E5CF91CDB57AA003FF4B4 /* tree */,
|
||||
2794D8551CE7821B00FADD0F /* antlr4-common.h */,
|
||||
27AC52CF1CE773A80093AAAB /* antlr4-runtime.h */,
|
||||
2793DCA01F08095F00A84290 /* ANTLRErrorListener.cpp */,
|
||||
276E5C0C1CDB57AA003FF4B4 /* ANTLRErrorListener.h */,
|
||||
2793DCA11F08095F00A84290 /* ANTLRErrorStrategy.cpp */,
|
||||
276E5C0D1CDB57AA003FF4B4 /* ANTLRErrorStrategy.h */,
|
||||
276E5C0E1CDB57AA003FF4B4 /* ANTLRFileStream.cpp */,
|
||||
276E5C0F1CDB57AA003FF4B4 /* ANTLRFileStream.h */,
|
||||
|
@ -1266,7 +1316,6 @@
|
|||
276E5CBD1CDB57AA003FF4B4 /* InterpreterRuleContext.h */,
|
||||
276E5CBE1CDB57AA003FF4B4 /* IntStream.cpp */,
|
||||
276E5CBF1CDB57AA003FF4B4 /* IntStream.h */,
|
||||
276E5CC01CDB57AA003FF4B4 /* IRecognizer.h */,
|
||||
276E5CC11CDB57AA003FF4B4 /* Lexer.cpp */,
|
||||
276E5CC21CDB57AA003FF4B4 /* Lexer.h */,
|
||||
276E5CC31CDB57AA003FF4B4 /* LexerInterpreter.cpp */,
|
||||
|
@ -1295,8 +1344,10 @@
|
|||
27B36AC51DACE7AF0069C868 /* RuleContextWithAltNum.h */,
|
||||
27745EFB1CE49C000067C6A3 /* RuntimeMetaData.cpp */,
|
||||
27745EFC1CE49C000067C6A3 /* RuntimeMetaData.h */,
|
||||
2793DCA21F08095F00A84290 /* Token.cpp */,
|
||||
276E5CF01CDB57AA003FF4B4 /* Token.h */,
|
||||
276E5CF21CDB57AA003FF4B4 /* TokenFactory.h */,
|
||||
2793DC841F08083F00A84290 /* TokenSource.cpp */,
|
||||
276E5CF41CDB57AA003FF4B4 /* TokenSource.h */,
|
||||
276E5CF51CDB57AA003FF4B4 /* TokenStream.cpp */,
|
||||
276E5CF61CDB57AA003FF4B4 /* TokenStream.h */,
|
||||
|
@ -1308,6 +1359,7 @@
|
|||
276E5D251CDB57AA003FF4B4 /* UnbufferedTokenStream.h */,
|
||||
276E5D271CDB57AA003FF4B4 /* Vocabulary.cpp */,
|
||||
276E5D281CDB57AA003FF4B4 /* Vocabulary.h */,
|
||||
2793DCA31F08095F00A84290 /* WritableToken.cpp */,
|
||||
276E5D2A1CDB57AA003FF4B4 /* WritableToken.h */,
|
||||
);
|
||||
name = runtime;
|
||||
|
@ -1350,6 +1402,7 @@
|
|||
276E5C321CDB57AA003FF4B4 /* BasicState.h */,
|
||||
276E5C331CDB57AA003FF4B4 /* BlockEndState.cpp */,
|
||||
276E5C341CDB57AA003FF4B4 /* BlockEndState.h */,
|
||||
2793DCB01F08099C00A84290 /* BlockStartState.cpp */,
|
||||
276E5C351CDB57AA003FF4B4 /* BlockStartState.h */,
|
||||
276E5C371CDB57AA003FF4B4 /* ContextSensitivityInfo.cpp */,
|
||||
276E5C381CDB57AA003FF4B4 /* ContextSensitivityInfo.h */,
|
||||
|
@ -1365,6 +1418,7 @@
|
|||
276E5C421CDB57AA003FF4B4 /* EpsilonTransition.h */,
|
||||
276E5C431CDB57AA003FF4B4 /* ErrorInfo.cpp */,
|
||||
276E5C441CDB57AA003FF4B4 /* ErrorInfo.h */,
|
||||
2793DCB11F08099C00A84290 /* LexerAction.cpp */,
|
||||
276E5C451CDB57AA003FF4B4 /* LexerAction.h */,
|
||||
276E5C461CDB57AA003FF4B4 /* LexerActionExecutor.cpp */,
|
||||
276E5C471CDB57AA003FF4B4 /* LexerActionExecutor.h */,
|
||||
|
@ -1483,6 +1537,7 @@
|
|||
276E5CE41CDB57AA003FF4B4 /* support */ = {
|
||||
isa = PBXGroup;
|
||||
children = (
|
||||
2793DC9C1F08090D00A84290 /* Any.cpp */,
|
||||
27F4A8551D4CEB2A00E067EE /* Any.h */,
|
||||
276E5CE51CDB57AA003FF4B4 /* Arrays.cpp */,
|
||||
276E5CE61CDB57AA003FF4B4 /* Arrays.h */,
|
||||
|
@ -1504,6 +1559,7 @@
|
|||
276E5D061CDB57AA003FF4B4 /* pattern */,
|
||||
27DB448A1D045537007E790B /* xpath */,
|
||||
276E5CFA1CDB57AA003FF4B4 /* AbstractParseTreeVisitor.h */,
|
||||
2793DC941F0808E100A84290 /* ErrorNode.cpp */,
|
||||
276E5CFB1CDB57AA003FF4B4 /* ErrorNode.h */,
|
||||
276E5CFC1CDB57AA003FF4B4 /* ErrorNodeImpl.cpp */,
|
||||
276E5CFD1CDB57AA003FF4B4 /* ErrorNodeImpl.h */,
|
||||
|
@ -1511,11 +1567,14 @@
|
|||
27D414511DEB0D3D00D0F3F9 /* IterativeParseTreeWalker.h */,
|
||||
276566DF1DA93BFB000869BE /* ParseTree.cpp */,
|
||||
276E5CFE1CDB57AA003FF4B4 /* ParseTree.h */,
|
||||
2793DC8C1F08088F00A84290 /* ParseTreeListener.cpp */,
|
||||
276E5D001CDB57AA003FF4B4 /* ParseTreeListener.h */,
|
||||
276E5D021CDB57AA003FF4B4 /* ParseTreeProperty.h */,
|
||||
2793DC951F0808E100A84290 /* ParseTreeVisitor.cpp */,
|
||||
276E5D031CDB57AA003FF4B4 /* ParseTreeVisitor.h */,
|
||||
276E5D041CDB57AA003FF4B4 /* ParseTreeWalker.cpp */,
|
||||
276E5D051CDB57AA003FF4B4 /* ParseTreeWalker.h */,
|
||||
2793DC901F0808A200A84290 /* TerminalNode.cpp */,
|
||||
276E5D181CDB57AA003FF4B4 /* TerminalNode.h */,
|
||||
276E5D191CDB57AA003FF4B4 /* TerminalNodeImpl.cpp */,
|
||||
276E5D1A1CDB57AA003FF4B4 /* TerminalNodeImpl.h */,
|
||||
|
@ -1529,6 +1588,7 @@
|
|||
isa = PBXGroup;
|
||||
children = (
|
||||
276E5D071CDB57AA003FF4B4 /* Chunk.h */,
|
||||
2793DC881F08087500A84290 /* Chunk.cpp */,
|
||||
276E5D081CDB57AA003FF4B4 /* ParseTreeMatch.cpp */,
|
||||
276E5D091CDB57AA003FF4B4 /* ParseTreeMatch.h */,
|
||||
276E5D0A1CDB57AA003FF4B4 /* ParseTreePattern.cpp */,
|
||||
|
@ -1707,7 +1767,6 @@
|
|||
27DB44CC1D0463DB007E790B /* XPathElement.h in Headers */,
|
||||
276E5F581CDB57AA003FF4B4 /* LexerNoViableAltException.h in Headers */,
|
||||
276E5D811CDB57AA003FF4B4 /* ATNSimulator.h in Headers */,
|
||||
276E5F461CDB57AA003FF4B4 /* IRecognizer.h in Headers */,
|
||||
27DB44B61D0463CC007E790B /* XPathLexer.h in Headers */,
|
||||
276E5FC41CDB57AA003FF4B4 /* guid.h in Headers */,
|
||||
276E602D1CDB57AA003FF4B4 /* TagChunk.h in Headers */,
|
||||
|
@ -1875,7 +1934,6 @@
|
|||
276E60141CDB57AA003FF4B4 /* ParseTreeMatch.h in Headers */,
|
||||
276E5F571CDB57AA003FF4B4 /* LexerNoViableAltException.h in Headers */,
|
||||
276E5D801CDB57AA003FF4B4 /* ATNSimulator.h in Headers */,
|
||||
276E5F451CDB57AA003FF4B4 /* IRecognizer.h in Headers */,
|
||||
276E5FC31CDB57AA003FF4B4 /* guid.h in Headers */,
|
||||
276E602C1CDB57AA003FF4B4 /* TagChunk.h in Headers */,
|
||||
276E5E941CDB57AA003FF4B4 /* RuleStopState.h in Headers */,
|
||||
|
@ -2033,7 +2091,6 @@
|
|||
276E60131CDB57AA003FF4B4 /* ParseTreeMatch.h in Headers */,
|
||||
276E5F561CDB57AA003FF4B4 /* LexerNoViableAltException.h in Headers */,
|
||||
276E5D7F1CDB57AA003FF4B4 /* ATNSimulator.h in Headers */,
|
||||
276E5F441CDB57AA003FF4B4 /* IRecognizer.h in Headers */,
|
||||
276E5FC21CDB57AA003FF4B4 /* guid.h in Headers */,
|
||||
276E602B1CDB57AA003FF4B4 /* TagChunk.h in Headers */,
|
||||
276E5E931CDB57AA003FF4B4 /* RuleStopState.h in Headers */,
|
||||
|
@ -2225,10 +2282,12 @@
|
|||
276E60451CDB57AA003FF4B4 /* TerminalNodeImpl.cpp in Sources */,
|
||||
276E5DD21CDB57AA003FF4B4 /* ErrorInfo.cpp in Sources */,
|
||||
276E5F551CDB57AA003FF4B4 /* LexerNoViableAltException.cpp in Sources */,
|
||||
2793DCB81F08099C00A84290 /* LexerAction.cpp in Sources */,
|
||||
276E5E561CDB57AA003FF4B4 /* PlusBlockStartState.cpp in Sources */,
|
||||
276E5E1D1CDB57AA003FF4B4 /* LexerSkipAction.cpp in Sources */,
|
||||
276E5EBC1CDB57AA003FF4B4 /* StarLoopEntryState.cpp in Sources */,
|
||||
276E5D721CDB57AA003FF4B4 /* ATNDeserializer.cpp in Sources */,
|
||||
2793DC8B1F08087500A84290 /* Chunk.cpp in Sources */,
|
||||
276E5E2F1CDB57AA003FF4B4 /* LookaheadEventInfo.cpp in Sources */,
|
||||
276E5DFF1CDB57AA003FF4B4 /* LexerIndexedCustomAction.cpp in Sources */,
|
||||
276E60511CDB57AA003FF4B4 /* Trees.cpp in Sources */,
|
||||
|
@ -2256,6 +2315,8 @@
|
|||
276E5E921CDB57AA003FF4B4 /* RuleStopState.cpp in Sources */,
|
||||
276E60631CDB57AA003FF4B4 /* UnbufferedTokenStream.cpp in Sources */,
|
||||
276E5DDB1CDB57AA003FF4B4 /* LexerActionExecutor.cpp in Sources */,
|
||||
2793DC981F0808E100A84290 /* ErrorNode.cpp in Sources */,
|
||||
2793DCAF1F08095F00A84290 /* WritableToken.cpp in Sources */,
|
||||
276E5E9E1CDB57AA003FF4B4 /* SemanticContext.cpp in Sources */,
|
||||
276E5EC81CDB57AA003FF4B4 /* Transition.cpp in Sources */,
|
||||
276E601E1CDB57AA003FF4B4 /* ParseTreePatternMatcher.cpp in Sources */,
|
||||
|
@ -2263,12 +2324,15 @@
|
|||
276E5D481CDB57AA003FF4B4 /* ActionTransition.cpp in Sources */,
|
||||
276E5DC61CDB57AA003FF4B4 /* EmptyPredictionContext.cpp in Sources */,
|
||||
276E5ED41CDB57AA003FF4B4 /* BailErrorStrategy.cpp in Sources */,
|
||||
2793DC9B1F0808E100A84290 /* ParseTreeVisitor.cpp in Sources */,
|
||||
2793DCAC1F08095F00A84290 /* Token.cpp in Sources */,
|
||||
276E5FA31CDB57AA003FF4B4 /* Recognizer.cpp in Sources */,
|
||||
276E5D6C1CDB57AA003FF4B4 /* ATNDeserializationOptions.cpp in Sources */,
|
||||
276E60361CDB57AA003FF4B4 /* TokenTagToken.cpp in Sources */,
|
||||
27DB44D51D0463DB007E790B /* XPathTokenElement.cpp in Sources */,
|
||||
27DB44D11D0463DB007E790B /* XPathRuleElement.cpp in Sources */,
|
||||
276E5DED1CDB57AA003FF4B4 /* LexerATNSimulator.cpp in Sources */,
|
||||
2793DCB51F08099C00A84290 /* BlockStartState.cpp in Sources */,
|
||||
276E606C1CDB57AA003FF4B4 /* Vocabulary.cpp in Sources */,
|
||||
276E5F1C1CDB57AA003FF4B4 /* LexerDFASerializer.cpp in Sources */,
|
||||
276E60181CDB57AA003FF4B4 /* ParseTreePattern.cpp in Sources */,
|
||||
|
@ -2293,7 +2357,9 @@
|
|||
276E5D781CDB57AA003FF4B4 /* ATNSerializer.cpp in Sources */,
|
||||
27745F051CE49C000067C6A3 /* RuntimeMetaData.cpp in Sources */,
|
||||
276E5DAE1CDB57AA003FF4B4 /* ContextSensitivityInfo.cpp in Sources */,
|
||||
2793DCA61F08095F00A84290 /* ANTLRErrorListener.cpp in Sources */,
|
||||
276E5D661CDB57AA003FF4B4 /* ATNConfigSet.cpp in Sources */,
|
||||
2793DC9F1F08090D00A84290 /* Any.cpp in Sources */,
|
||||
276E5FAF1CDB57AA003FF4B4 /* Arrays.cpp in Sources */,
|
||||
276E5ECE1CDB57AA003FF4B4 /* WildcardTransition.cpp in Sources */,
|
||||
276E5E861CDB57AA003FF4B4 /* RangeTransition.cpp in Sources */,
|
||||
|
@ -2301,6 +2367,7 @@
|
|||
276E5D9C1CDB57AA003FF4B4 /* BasicState.cpp in Sources */,
|
||||
276E5FC11CDB57AA003FF4B4 /* guid.cpp in Sources */,
|
||||
276E5E801CDB57AA003FF4B4 /* ProfilingATNSimulator.cpp in Sources */,
|
||||
2793DCA91F08095F00A84290 /* ANTLRErrorStrategy.cpp in Sources */,
|
||||
276E5F401CDB57AA003FF4B4 /* IntStream.cpp in Sources */,
|
||||
276E5F5B1CDB57AA003FF4B4 /* ListTokenSource.cpp in Sources */,
|
||||
276E5F6D1CDB57AA003FF4B4 /* MurmurHash.cpp in Sources */,
|
||||
|
@ -2315,6 +2382,7 @@
|
|||
27DB44CF1D0463DB007E790B /* XPathRuleAnywhereElement.cpp in Sources */,
|
||||
276E5E441CDB57AA003FF4B4 /* OrderedATNConfigSet.cpp in Sources */,
|
||||
276E5DCC1CDB57AA003FF4B4 /* EpsilonTransition.cpp in Sources */,
|
||||
2793DC8F1F08088F00A84290 /* ParseTreeListener.cpp in Sources */,
|
||||
276E5D5A1CDB57AA003FF4B4 /* ATN.cpp in Sources */,
|
||||
276E5EE61CDB57AA003FF4B4 /* CharStream.cpp in Sources */,
|
||||
276E5EE01CDB57AA003FF4B4 /* BufferedTokenStream.cpp in Sources */,
|
||||
|
@ -2333,6 +2401,8 @@
|
|||
276E5DC01CDB57AA003FF4B4 /* DecisionState.cpp in Sources */,
|
||||
276E5E981CDB57AA003FF4B4 /* RuleTransition.cpp in Sources */,
|
||||
276E5EF81CDB57AA003FF4B4 /* CommonTokenStream.cpp in Sources */,
|
||||
2793DC871F08083F00A84290 /* TokenSource.cpp in Sources */,
|
||||
2793DC931F0808A200A84290 /* TerminalNode.cpp in Sources */,
|
||||
276E60121CDB57AA003FF4B4 /* ParseTreeMatch.cpp in Sources */,
|
||||
276566E21DA93BFB000869BE /* ParseTree.cpp in Sources */,
|
||||
276E5EEC1CDB57AA003FF4B4 /* CommonToken.cpp in Sources */,
|
||||
|
@ -2365,10 +2435,12 @@
|
|||
276E60441CDB57AA003FF4B4 /* TerminalNodeImpl.cpp in Sources */,
|
||||
276E5DD11CDB57AA003FF4B4 /* ErrorInfo.cpp in Sources */,
|
||||
276E5F541CDB57AA003FF4B4 /* LexerNoViableAltException.cpp in Sources */,
|
||||
2793DCB71F08099C00A84290 /* LexerAction.cpp in Sources */,
|
||||
276E5E551CDB57AA003FF4B4 /* PlusBlockStartState.cpp in Sources */,
|
||||
276E5E1C1CDB57AA003FF4B4 /* LexerSkipAction.cpp in Sources */,
|
||||
276E5EBB1CDB57AA003FF4B4 /* StarLoopEntryState.cpp in Sources */,
|
||||
276E5D711CDB57AA003FF4B4 /* ATNDeserializer.cpp in Sources */,
|
||||
2793DC8A1F08087500A84290 /* Chunk.cpp in Sources */,
|
||||
276E5E2E1CDB57AA003FF4B4 /* LookaheadEventInfo.cpp in Sources */,
|
||||
276E5DFE1CDB57AA003FF4B4 /* LexerIndexedCustomAction.cpp in Sources */,
|
||||
276E60501CDB57AA003FF4B4 /* Trees.cpp in Sources */,
|
||||
|
@ -2396,6 +2468,8 @@
|
|||
276E5E911CDB57AA003FF4B4 /* RuleStopState.cpp in Sources */,
|
||||
276E60621CDB57AA003FF4B4 /* UnbufferedTokenStream.cpp in Sources */,
|
||||
276E5DDA1CDB57AA003FF4B4 /* LexerActionExecutor.cpp in Sources */,
|
||||
2793DC971F0808E100A84290 /* ErrorNode.cpp in Sources */,
|
||||
2793DCAE1F08095F00A84290 /* WritableToken.cpp in Sources */,
|
||||
276E5E9D1CDB57AA003FF4B4 /* SemanticContext.cpp in Sources */,
|
||||
276E5EC71CDB57AA003FF4B4 /* Transition.cpp in Sources */,
|
||||
276E601D1CDB57AA003FF4B4 /* ParseTreePatternMatcher.cpp in Sources */,
|
||||
|
@ -2403,12 +2477,15 @@
|
|||
276E5D471CDB57AA003FF4B4 /* ActionTransition.cpp in Sources */,
|
||||
276E5DC51CDB57AA003FF4B4 /* EmptyPredictionContext.cpp in Sources */,
|
||||
276E5ED31CDB57AA003FF4B4 /* BailErrorStrategy.cpp in Sources */,
|
||||
2793DC9A1F0808E100A84290 /* ParseTreeVisitor.cpp in Sources */,
|
||||
2793DCAB1F08095F00A84290 /* Token.cpp in Sources */,
|
||||
276E5FA21CDB57AA003FF4B4 /* Recognizer.cpp in Sources */,
|
||||
276E5D6B1CDB57AA003FF4B4 /* ATNDeserializationOptions.cpp in Sources */,
|
||||
276E60351CDB57AA003FF4B4 /* TokenTagToken.cpp in Sources */,
|
||||
27DB44C31D0463DA007E790B /* XPathTokenElement.cpp in Sources */,
|
||||
27DB44BF1D0463DA007E790B /* XPathRuleElement.cpp in Sources */,
|
||||
276E5DEC1CDB57AA003FF4B4 /* LexerATNSimulator.cpp in Sources */,
|
||||
2793DCB41F08099C00A84290 /* BlockStartState.cpp in Sources */,
|
||||
276E606B1CDB57AA003FF4B4 /* Vocabulary.cpp in Sources */,
|
||||
276E5F1B1CDB57AA003FF4B4 /* LexerDFASerializer.cpp in Sources */,
|
||||
276E60171CDB57AA003FF4B4 /* ParseTreePattern.cpp in Sources */,
|
||||
|
@ -2433,7 +2510,9 @@
|
|||
276E5D771CDB57AA003FF4B4 /* ATNSerializer.cpp in Sources */,
|
||||
27745F041CE49C000067C6A3 /* RuntimeMetaData.cpp in Sources */,
|
||||
276E5DAD1CDB57AA003FF4B4 /* ContextSensitivityInfo.cpp in Sources */,
|
||||
2793DCA51F08095F00A84290 /* ANTLRErrorListener.cpp in Sources */,
|
||||
276E5D651CDB57AA003FF4B4 /* ATNConfigSet.cpp in Sources */,
|
||||
2793DC9E1F08090D00A84290 /* Any.cpp in Sources */,
|
||||
276E5FAE1CDB57AA003FF4B4 /* Arrays.cpp in Sources */,
|
||||
276E5ECD1CDB57AA003FF4B4 /* WildcardTransition.cpp in Sources */,
|
||||
276E5E851CDB57AA003FF4B4 /* RangeTransition.cpp in Sources */,
|
||||
|
@ -2441,6 +2520,7 @@
|
|||
276E5D9B1CDB57AA003FF4B4 /* BasicState.cpp in Sources */,
|
||||
276E5FC01CDB57AA003FF4B4 /* guid.cpp in Sources */,
|
||||
276E5E7F1CDB57AA003FF4B4 /* ProfilingATNSimulator.cpp in Sources */,
|
||||
2793DCA81F08095F00A84290 /* ANTLRErrorStrategy.cpp in Sources */,
|
||||
276E5F3F1CDB57AA003FF4B4 /* IntStream.cpp in Sources */,
|
||||
276E5F5A1CDB57AA003FF4B4 /* ListTokenSource.cpp in Sources */,
|
||||
276E5F6C1CDB57AA003FF4B4 /* MurmurHash.cpp in Sources */,
|
||||
|
@ -2455,6 +2535,7 @@
|
|||
27DB44BD1D0463DA007E790B /* XPathRuleAnywhereElement.cpp in Sources */,
|
||||
276E5E431CDB57AA003FF4B4 /* OrderedATNConfigSet.cpp in Sources */,
|
||||
276E5DCB1CDB57AA003FF4B4 /* EpsilonTransition.cpp in Sources */,
|
||||
2793DC8E1F08088F00A84290 /* ParseTreeListener.cpp in Sources */,
|
||||
276E5D591CDB57AA003FF4B4 /* ATN.cpp in Sources */,
|
||||
276E5EE51CDB57AA003FF4B4 /* CharStream.cpp in Sources */,
|
||||
276E5EDF1CDB57AA003FF4B4 /* BufferedTokenStream.cpp in Sources */,
|
||||
|
@ -2473,6 +2554,8 @@
|
|||
276E5DBF1CDB57AA003FF4B4 /* DecisionState.cpp in Sources */,
|
||||
276E5E971CDB57AA003FF4B4 /* RuleTransition.cpp in Sources */,
|
||||
276E5EF71CDB57AA003FF4B4 /* CommonTokenStream.cpp in Sources */,
|
||||
2793DC861F08083F00A84290 /* TokenSource.cpp in Sources */,
|
||||
2793DC921F0808A200A84290 /* TerminalNode.cpp in Sources */,
|
||||
276E60111CDB57AA003FF4B4 /* ParseTreeMatch.cpp in Sources */,
|
||||
276566E11DA93BFB000869BE /* ParseTree.cpp in Sources */,
|
||||
276E5EEB1CDB57AA003FF4B4 /* CommonToken.cpp in Sources */,
|
||||
|
@ -2505,10 +2588,12 @@
|
|||
276E5DB21CDB57AA003FF4B4 /* DecisionEventInfo.cpp in Sources */,
|
||||
276E60431CDB57AA003FF4B4 /* TerminalNodeImpl.cpp in Sources */,
|
||||
276E5DD01CDB57AA003FF4B4 /* ErrorInfo.cpp in Sources */,
|
||||
2793DCB61F08099C00A84290 /* LexerAction.cpp in Sources */,
|
||||
276E5F531CDB57AA003FF4B4 /* LexerNoViableAltException.cpp in Sources */,
|
||||
276E5E541CDB57AA003FF4B4 /* PlusBlockStartState.cpp in Sources */,
|
||||
276E5E1B1CDB57AA003FF4B4 /* LexerSkipAction.cpp in Sources */,
|
||||
276E5EBA1CDB57AA003FF4B4 /* StarLoopEntryState.cpp in Sources */,
|
||||
2793DC891F08087500A84290 /* Chunk.cpp in Sources */,
|
||||
276E5D701CDB57AA003FF4B4 /* ATNDeserializer.cpp in Sources */,
|
||||
276E5E2D1CDB57AA003FF4B4 /* LookaheadEventInfo.cpp in Sources */,
|
||||
276E5DFD1CDB57AA003FF4B4 /* LexerIndexedCustomAction.cpp in Sources */,
|
||||
|
@ -2536,6 +2621,8 @@
|
|||
276E60611CDB57AA003FF4B4 /* UnbufferedTokenStream.cpp in Sources */,
|
||||
276E5DD91CDB57AA003FF4B4 /* LexerActionExecutor.cpp in Sources */,
|
||||
27DB449D1D045537007E790B /* XPath.cpp in Sources */,
|
||||
2793DC961F0808E100A84290 /* ErrorNode.cpp in Sources */,
|
||||
2793DCAD1F08095F00A84290 /* WritableToken.cpp in Sources */,
|
||||
276E5E9C1CDB57AA003FF4B4 /* SemanticContext.cpp in Sources */,
|
||||
27DB44AD1D045537007E790B /* XPathWildcardElement.cpp in Sources */,
|
||||
276E5EC61CDB57AA003FF4B4 /* Transition.cpp in Sources */,
|
||||
|
@ -2543,12 +2630,15 @@
|
|||
27DB44A51D045537007E790B /* XPathRuleElement.cpp in Sources */,
|
||||
276E5F201CDB57AA003FF4B4 /* DiagnosticErrorListener.cpp in Sources */,
|
||||
276E5D461CDB57AA003FF4B4 /* ActionTransition.cpp in Sources */,
|
||||
2793DC991F0808E100A84290 /* ParseTreeVisitor.cpp in Sources */,
|
||||
2793DCAA1F08095F00A84290 /* Token.cpp in Sources */,
|
||||
276E5DC41CDB57AA003FF4B4 /* EmptyPredictionContext.cpp in Sources */,
|
||||
276E5ED21CDB57AA003FF4B4 /* BailErrorStrategy.cpp in Sources */,
|
||||
276E5FA11CDB57AA003FF4B4 /* Recognizer.cpp in Sources */,
|
||||
276E5D6A1CDB57AA003FF4B4 /* ATNDeserializationOptions.cpp in Sources */,
|
||||
276E60341CDB57AA003FF4B4 /* TokenTagToken.cpp in Sources */,
|
||||
276E5DEB1CDB57AA003FF4B4 /* LexerATNSimulator.cpp in Sources */,
|
||||
2793DCB31F08099C00A84290 /* BlockStartState.cpp in Sources */,
|
||||
276E606A1CDB57AA003FF4B4 /* Vocabulary.cpp in Sources */,
|
||||
276E5F1A1CDB57AA003FF4B4 /* LexerDFASerializer.cpp in Sources */,
|
||||
276E60161CDB57AA003FF4B4 /* ParseTreePattern.cpp in Sources */,
|
||||
|
@ -2573,7 +2663,9 @@
|
|||
276E5D761CDB57AA003FF4B4 /* ATNSerializer.cpp in Sources */,
|
||||
27745F031CE49C000067C6A3 /* RuntimeMetaData.cpp in Sources */,
|
||||
276E5DAC1CDB57AA003FF4B4 /* ContextSensitivityInfo.cpp in Sources */,
|
||||
2793DCA41F08095F00A84290 /* ANTLRErrorListener.cpp in Sources */,
|
||||
276E5D641CDB57AA003FF4B4 /* ATNConfigSet.cpp in Sources */,
|
||||
2793DC9D1F08090D00A84290 /* Any.cpp in Sources */,
|
||||
276E5FAD1CDB57AA003FF4B4 /* Arrays.cpp in Sources */,
|
||||
276E5ECC1CDB57AA003FF4B4 /* WildcardTransition.cpp in Sources */,
|
||||
276E5E841CDB57AA003FF4B4 /* RangeTransition.cpp in Sources */,
|
||||
|
@ -2581,6 +2673,7 @@
|
|||
276E5D9A1CDB57AA003FF4B4 /* BasicState.cpp in Sources */,
|
||||
276E5FBF1CDB57AA003FF4B4 /* guid.cpp in Sources */,
|
||||
276E5E7E1CDB57AA003FF4B4 /* ProfilingATNSimulator.cpp in Sources */,
|
||||
2793DCA71F08095F00A84290 /* ANTLRErrorStrategy.cpp in Sources */,
|
||||
276E5F3E1CDB57AA003FF4B4 /* IntStream.cpp in Sources */,
|
||||
276E5F591CDB57AA003FF4B4 /* ListTokenSource.cpp in Sources */,
|
||||
276E5F6B1CDB57AA003FF4B4 /* MurmurHash.cpp in Sources */,
|
||||
|
@ -2595,6 +2688,7 @@
|
|||
276E5D581CDB57AA003FF4B4 /* ATN.cpp in Sources */,
|
||||
276E5EE41CDB57AA003FF4B4 /* CharStream.cpp in Sources */,
|
||||
27DB44AB1D045537007E790B /* XPathWildcardAnywhereElement.cpp in Sources */,
|
||||
2793DC8D1F08088F00A84290 /* ParseTreeListener.cpp in Sources */,
|
||||
276E5EDE1CDB57AA003FF4B4 /* BufferedTokenStream.cpp in Sources */,
|
||||
276E5F021CDB57AA003FF4B4 /* DefaultErrorStrategy.cpp in Sources */,
|
||||
276E5D401CDB57AA003FF4B4 /* AbstractPredicateTransition.cpp in Sources */,
|
||||
|
@ -2613,6 +2707,8 @@
|
|||
276E5DBE1CDB57AA003FF4B4 /* DecisionState.cpp in Sources */,
|
||||
276E5E961CDB57AA003FF4B4 /* RuleTransition.cpp in Sources */,
|
||||
276E5EF61CDB57AA003FF4B4 /* CommonTokenStream.cpp in Sources */,
|
||||
2793DC851F08083F00A84290 /* TokenSource.cpp in Sources */,
|
||||
2793DC911F0808A200A84290 /* TerminalNode.cpp in Sources */,
|
||||
276E60101CDB57AA003FF4B4 /* ParseTreeMatch.cpp in Sources */,
|
||||
276566E01DA93BFB000869BE /* ParseTree.cpp in Sources */,
|
||||
276E5EEA1CDB57AA003FF4B4 /* CommonToken.cpp in Sources */,
|
||||
|
|
|
@ -1,3 +1,8 @@
|
|||
/* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.
|
||||
* Use of this file is governed by the BSD 3-clause license that
|
||||
* can be found in the LICENSE.txt file in the project root.
|
||||
*/
|
||||
|
||||
#include "ANTLRErrorListener.h"
|
||||
|
||||
antlr4::ANTLRErrorListener::~ANTLRErrorListener()
|
||||
|
|
|
@ -1,3 +1,8 @@
|
|||
/* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.
|
||||
* Use of this file is governed by the BSD 3-clause license that
|
||||
* can be found in the LICENSE.txt file in the project root.
|
||||
*/
|
||||
|
||||
#include "ANTLRErrorStrategy.h"
|
||||
|
||||
antlr4::ANTLRErrorStrategy::~ANTLRErrorStrategy()
|
||||
|
|
|
@ -27,7 +27,7 @@ namespace antlr4 {
|
|||
/// </summary>
|
||||
class ANTLR4CPP_PUBLIC IntStream {
|
||||
public:
|
||||
static const size_t EOF = std::numeric_limits<size_t>::max();
|
||||
static const size_t EOF = static_cast<size_t>(-1); // std::numeric_limits<size_t>::max(); doesn't work in VS 2013
|
||||
|
||||
/// The value returned by <seealso cref="#LA LA()"/> when the end of the stream is
|
||||
/// reached.
|
||||
|
|
|
@ -11,7 +11,7 @@ namespace antlr4 {
|
|||
|
||||
class ANTLR4CPP_PUBLIC Recognizer {
|
||||
public:
|
||||
static const size_t EOF = std::numeric_limits<size_t>::max();
|
||||
static const size_t EOF = static_cast<size_t>(-1); // std::numeric_limits<size_t>::max(); doesn't work in VS 2013.
|
||||
|
||||
Recognizer();
|
||||
Recognizer(Recognizer const&) = delete;
|
||||
|
|
|
@ -1,3 +1,8 @@
|
|||
/* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.
|
||||
* Use of this file is governed by the BSD 3-clause license that
|
||||
* can be found in the LICENSE.txt file in the project root.
|
||||
*/
|
||||
|
||||
#include "Token.h"
|
||||
|
||||
antlr4::Token::~Token() {
|
||||
|
|
|
@ -18,7 +18,7 @@ namespace antlr4 {
|
|||
|
||||
/// During lookahead operations, this "token" signifies we hit rule end ATN state
|
||||
/// and did not follow it despite needing to.
|
||||
static const size_t EPSILON = std::numeric_limits<size_t>::max() - 1;
|
||||
static const size_t EPSILON = static_cast<size_t>(-2);
|
||||
static const size_t MIN_USER_TOKEN_TYPE = 1;
|
||||
static const size_t EOF = IntStream::EOF;
|
||||
|
||||
|
|
|
@ -1,3 +1,8 @@
|
|||
/* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.
|
||||
* Use of this file is governed by the BSD 3-clause license that
|
||||
* can be found in the LICENSE.txt file in the project root.
|
||||
*/
|
||||
|
||||
#include "TokenSource.h"
|
||||
|
||||
antlr4::TokenSource::~TokenSource() {
|
||||
|
|
|
@ -52,7 +52,7 @@ void UnbufferedCharStream::sync(size_t want) {
|
|||
|
||||
size_t UnbufferedCharStream::fill(size_t n) {
|
||||
for (size_t i = 0; i < n; i++) {
|
||||
if (_data.size() > 0 && _data.back() == (uint32_t)EOF) {
|
||||
if (_data.size() > 0 && _data.back() == 0xFFFF) {
|
||||
return i;
|
||||
}
|
||||
|
||||
|
@ -89,23 +89,23 @@ size_t UnbufferedCharStream::LA(ssize_t i) {
|
|||
}
|
||||
|
||||
// We can look back only as many chars as we have buffered.
|
||||
ssize_t index = (ssize_t)_p + i - 1;
|
||||
ssize_t index = static_cast<ssize_t>(_p) + i - 1;
|
||||
if (index < 0) {
|
||||
throw IndexOutOfBoundsException();
|
||||
}
|
||||
|
||||
if (i > 0) {
|
||||
sync((size_t)i); // No need to sync if we look back.
|
||||
sync(static_cast<size_t>(i)); // No need to sync if we look back.
|
||||
}
|
||||
if ((size_t)index >= _data.size()) {
|
||||
if (static_cast<size_t>(index) >= _data.size()) {
|
||||
return EOF;
|
||||
}
|
||||
|
||||
if (_data[(size_t)index] == (uint32_t)EOF) {
|
||||
if (_data[static_cast<size_t>(index)] == 0xFFFF) {
|
||||
return EOF;
|
||||
}
|
||||
|
||||
return _data[(size_t)index];
|
||||
return _data[static_cast<size_t>(index)];
|
||||
}
|
||||
|
||||
ssize_t UnbufferedCharStream::mark() {
|
||||
|
@ -113,13 +113,13 @@ ssize_t UnbufferedCharStream::mark() {
|
|||
_lastCharBufferStart = _lastChar;
|
||||
}
|
||||
|
||||
ssize_t mark = -(ssize_t)_numMarkers - 1;
|
||||
ssize_t mark = -static_cast<ssize_t>(_numMarkers) - 1;
|
||||
_numMarkers++;
|
||||
return mark;
|
||||
}
|
||||
|
||||
void UnbufferedCharStream::release(ssize_t marker) {
|
||||
ssize_t expectedMark = -(ssize_t)_numMarkers;
|
||||
ssize_t expectedMark = -static_cast<ssize_t>(_numMarkers);
|
||||
if (marker != expectedMark) {
|
||||
throw IllegalStateException("release() called with an invalid marker.");
|
||||
}
|
||||
|
@ -147,16 +147,16 @@ void UnbufferedCharStream::seek(size_t index) {
|
|||
}
|
||||
|
||||
// index == to bufferStartIndex should set p to 0
|
||||
ssize_t i = (ssize_t)index - (ssize_t)getBufferStartIndex();
|
||||
ssize_t i = static_cast<ssize_t>(index) - static_cast<ssize_t>(getBufferStartIndex());
|
||||
if (i < 0) {
|
||||
throw IllegalArgumentException(std::string("cannot seek to negative index ") + std::to_string(index));
|
||||
} else if (i >= (ssize_t)_data.size()) {
|
||||
} else if (i >= static_cast<ssize_t>(_data.size())) {
|
||||
throw UnsupportedOperationException("Seek to index outside buffer: " + std::to_string(index) +
|
||||
" not in " + std::to_string(getBufferStartIndex()) + ".." +
|
||||
std::to_string(getBufferStartIndex() + _data.size()));
|
||||
}
|
||||
|
||||
_p = (size_t)i;
|
||||
_p = static_cast<size_t>(i);
|
||||
_currentCharIndex = index;
|
||||
if (_p == 0) {
|
||||
_lastChar = _lastCharBufferStart;
|
||||
|
@ -189,7 +189,7 @@ std::string UnbufferedCharStream::getText(const misc::Interval &interval) {
|
|||
}
|
||||
}
|
||||
|
||||
if (interval.a < (ssize_t)bufferStartIndex || interval.b >= ssize_t(bufferStartIndex + _data.size())) {
|
||||
if (interval.a < static_cast<ssize_t>(bufferStartIndex) || interval.b >= ssize_t(bufferStartIndex + _data.size())) {
|
||||
throw UnsupportedOperationException("interval " + interval.toString() + " outside buffer: " +
|
||||
std::to_string(bufferStartIndex) + ".." + std::to_string(bufferStartIndex + _data.size() - 1));
|
||||
}
|
||||
|
|
|
@ -46,17 +46,17 @@ Token* UnbufferedTokenStream::LT(ssize_t i)
|
|||
}
|
||||
|
||||
sync(i);
|
||||
ssize_t index = (ssize_t)_p + i - 1;
|
||||
ssize_t index = static_cast<ssize_t>(_p) + i - 1;
|
||||
if (index < 0) {
|
||||
throw IndexOutOfBoundsException(std::string("LT(") + std::to_string(i) + std::string(") gives negative index"));
|
||||
}
|
||||
|
||||
if (index >= (ssize_t)_tokens.size()) {
|
||||
if (index >= static_cast<ssize_t>(_tokens.size())) {
|
||||
assert(_tokens.size() > 0 && _tokens.back()->getType() == EOF);
|
||||
return _tokens.back().get();
|
||||
}
|
||||
|
||||
return _tokens[(size_t)index].get();
|
||||
return _tokens[static_cast<size_t>(index)].get();
|
||||
}
|
||||
|
||||
size_t UnbufferedTokenStream::LA(ssize_t i)
|
||||
|
@ -113,9 +113,9 @@ void UnbufferedTokenStream::consume()
|
|||
/// </summary>
|
||||
void UnbufferedTokenStream::sync(ssize_t want)
|
||||
{
|
||||
ssize_t need = ((ssize_t)_p + want - 1) - (ssize_t)_tokens.size() + 1; // how many more elements we need?
|
||||
ssize_t need = (static_cast<ssize_t>(_p) + want - 1) - static_cast<ssize_t>(_tokens.size()) + 1; // how many more elements we need?
|
||||
if (need > 0) {
|
||||
fill((size_t)need);
|
||||
fill(static_cast<size_t>(need));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -177,7 +177,7 @@ void UnbufferedTokenStream::release(ssize_t marker)
|
|||
if (_p > 0) {
|
||||
// Copy tokens[p]..tokens[n-1] to tokens[0]..tokens[(n-1)-p], reset ptrs
|
||||
// p is last valid token; move nothing if p==n as we have no valid char
|
||||
_tokens.erase(_tokens.begin(), _tokens.begin() + (ssize_t)_p);
|
||||
_tokens.erase(_tokens.begin(), _tokens.begin() + static_cast<ssize_t>(_p));
|
||||
_p = 0;
|
||||
}
|
||||
|
||||
|
|
|
@ -1,3 +1,8 @@
|
|||
/* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.
|
||||
* Use of this file is governed by the BSD 3-clause license that
|
||||
* can be found in the LICENSE.txt file in the project root.
|
||||
*/
|
||||
|
||||
#include "WritableToken.h"
|
||||
|
||||
antlr4::WritableToken::~WritableToken() {
|
||||
|
|
|
@ -63,6 +63,8 @@
|
|||
typedef std::basic_string<__int32> i32string;
|
||||
|
||||
typedef i32string UTF32String;
|
||||
#else
|
||||
typedef std::u32string UTF32String;
|
||||
#endif
|
||||
|
||||
#ifdef ANTLR4CPP_EXPORTS
|
||||
|
|
|
@ -752,6 +752,7 @@ Ref<LexerAction> ATNDeserializer::lexerActionFactory(LexerActionType type, int d
|
|||
return std::make_shared<LexerTypeAction>(data1);
|
||||
|
||||
default:
|
||||
throw IllegalArgumentException("The specified lexer action type " + std::to_string((size_t)type) + " is not valid.");
|
||||
throw IllegalArgumentException("The specified lexer action type " + std::to_string(static_cast<size_t>(type)) +
|
||||
" is not valid.");
|
||||
}
|
||||
}
|
||||
|
|
|
@ -58,7 +58,7 @@ std::vector<size_t> ATNSerializer::serialize() {
|
|||
serializeUUID(data, ATNDeserializer::SERIALIZED_UUID());
|
||||
|
||||
// convert grammar type to ATN const to avoid dependence on ANTLRParser
|
||||
data.push_back((size_t)atn->grammarType);
|
||||
data.push_back(static_cast<size_t>(atn->grammarType));
|
||||
data.push_back(atn->maxTokenType);
|
||||
size_t nedges = 0;
|
||||
|
||||
|
@ -288,7 +288,7 @@ std::vector<size_t> ATNSerializer::serialize() {
|
|||
if (atn->grammarType == ATNType::LEXER) {
|
||||
data.push_back(atn->lexerActions.size());
|
||||
for (Ref<LexerAction> &action : atn->lexerActions) {
|
||||
data.push_back((size_t)action->getActionType());
|
||||
data.push_back(static_cast<size_t>(action->getActionType()));
|
||||
switch (action->getActionType()) {
|
||||
case LexerActionType::CHANNEL:
|
||||
{
|
||||
|
@ -348,7 +348,8 @@ std::vector<size_t> ATNSerializer::serialize() {
|
|||
|
||||
default:
|
||||
throw IllegalArgumentException("The specified lexer action type " +
|
||||
std::to_string((size_t)action->getActionType()) + " is not valid.");
|
||||
std::to_string(static_cast<size_t>(action->getActionType())) +
|
||||
" is not valid.");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -77,7 +77,7 @@ namespace atn {
|
|||
virtual ~ATNState();
|
||||
|
||||
static const size_t INITIAL_NUM_TRANSITIONS = 4;
|
||||
static const size_t INVALID_STATE_NUMBER = std::numeric_limits<size_t>::max();
|
||||
static const size_t INVALID_STATE_NUMBER = static_cast<size_t>(-1); // std::numeric_limits<size_t>::max();
|
||||
|
||||
enum {
|
||||
ATN_INVALID_TYPE = 0,
|
||||
|
|
|
@ -1,3 +1,8 @@
|
|||
/* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.
|
||||
* Use of this file is governed by the BSD 3-clause license that
|
||||
* can be found in the LICENSE.txt file in the project root.
|
||||
*/
|
||||
|
||||
#include "BlockStartState.h"
|
||||
|
||||
antlr4::atn::BlockStartState::~BlockStartState() {
|
||||
|
|
|
@ -144,12 +144,12 @@ void LL1Analyzer::_LOOK(ATNState *s, ATNState *stopState, Ref<PredictionContext>
|
|||
} else if (t->isEpsilon()) {
|
||||
_LOOK(t->target, stopState, ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF);
|
||||
} else if (t->getSerializationType() == Transition::WILDCARD) {
|
||||
look.addAll(misc::IntervalSet::of(Token::MIN_USER_TOKEN_TYPE, (ssize_t)_atn.maxTokenType));
|
||||
look.addAll(misc::IntervalSet::of(Token::MIN_USER_TOKEN_TYPE, static_cast<ssize_t>(_atn.maxTokenType)));
|
||||
} else {
|
||||
misc::IntervalSet set = t->label();
|
||||
if (!set.isEmpty()) {
|
||||
if (is<NotSetTransition*>(t)) {
|
||||
set = set.complement(misc::IntervalSet::of(Token::MIN_USER_TOKEN_TYPE, (ssize_t)_atn.maxTokenType));
|
||||
set = set.complement(misc::IntervalSet::of(Token::MIN_USER_TOKEN_TYPE, static_cast<ssize_t>(_atn.maxTokenType)));
|
||||
}
|
||||
look.addAll(set);
|
||||
}
|
||||
|
|
|
@ -1,3 +1,8 @@
|
|||
/* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.
|
||||
* Use of this file is governed by the BSD 3-clause license that
|
||||
* can be found in the LICENSE.txt file in the project root.
|
||||
*/
|
||||
|
||||
#include "LexerAction.h"
|
||||
|
||||
antlr4::atn::LexerAction::~LexerAction() {
|
||||
|
|
|
@ -32,7 +32,7 @@ void LexerChannelAction::execute(Lexer *lexer) {
|
|||
|
||||
size_t LexerChannelAction::hashCode() const {
|
||||
size_t hash = MurmurHash::initialize();
|
||||
hash = MurmurHash::update(hash, (size_t)getActionType());
|
||||
hash = MurmurHash::update(hash, static_cast<size_t>(getActionType()));
|
||||
hash = MurmurHash::update(hash, _channel);
|
||||
return MurmurHash::finish(hash, 2);
|
||||
}
|
||||
|
|
|
@ -38,7 +38,7 @@ void LexerCustomAction::execute(Lexer *lexer) {
|
|||
|
||||
size_t LexerCustomAction::hashCode() const {
|
||||
size_t hash = MurmurHash::initialize();
|
||||
hash = MurmurHash::update(hash, (size_t)getActionType());
|
||||
hash = MurmurHash::update(hash, static_cast<size_t>(getActionType()));
|
||||
hash = MurmurHash::update(hash, _ruleIndex);
|
||||
hash = MurmurHash::update(hash, _actionIndex);
|
||||
return MurmurHash::finish(hash, 3);
|
||||
|
|
|
@ -33,7 +33,7 @@ void LexerModeAction::execute(Lexer *lexer) {
|
|||
|
||||
size_t LexerModeAction::hashCode() const {
|
||||
size_t hash = MurmurHash::initialize();
|
||||
hash = MurmurHash::update(hash, (size_t)getActionType());
|
||||
hash = MurmurHash::update(hash, static_cast<size_t>(getActionType()));
|
||||
hash = MurmurHash::update(hash, _mode);
|
||||
return MurmurHash::finish(hash, 2);
|
||||
}
|
||||
|
|
|
@ -34,7 +34,7 @@ void LexerMoreAction::execute(Lexer *lexer) {
|
|||
|
||||
size_t LexerMoreAction::hashCode() const {
|
||||
size_t hash = MurmurHash::initialize();
|
||||
hash = MurmurHash::update(hash, (size_t)getActionType());
|
||||
hash = MurmurHash::update(hash, static_cast<size_t>(getActionType()));
|
||||
return MurmurHash::finish(hash, 1);
|
||||
}
|
||||
|
||||
|
|
|
@ -34,7 +34,7 @@ void LexerPopModeAction::execute(Lexer *lexer) {
|
|||
|
||||
size_t LexerPopModeAction::hashCode() const {
|
||||
size_t hash = MurmurHash::initialize();
|
||||
hash = MurmurHash::update(hash, (size_t)getActionType());
|
||||
hash = MurmurHash::update(hash, static_cast<size_t>(getActionType()));
|
||||
return MurmurHash::finish(hash, 1);
|
||||
}
|
||||
|
||||
|
|
|
@ -33,7 +33,7 @@ void LexerPushModeAction::execute(Lexer *lexer) {
|
|||
|
||||
size_t LexerPushModeAction::hashCode() const {
|
||||
size_t hash = MurmurHash::initialize();
|
||||
hash = MurmurHash::update(hash, (size_t)getActionType());
|
||||
hash = MurmurHash::update(hash, static_cast<size_t>(getActionType()));
|
||||
hash = MurmurHash::update(hash, _mode);
|
||||
return MurmurHash::finish(hash, 2);
|
||||
}
|
||||
|
|
|
@ -34,7 +34,7 @@ void LexerSkipAction::execute(Lexer *lexer) {
|
|||
|
||||
size_t LexerSkipAction::hashCode() const {
|
||||
size_t hash = MurmurHash::initialize();
|
||||
hash = MurmurHash::update(hash, (size_t)getActionType());
|
||||
hash = MurmurHash::update(hash, static_cast<size_t>(getActionType()));
|
||||
return MurmurHash::finish(hash, 1);
|
||||
}
|
||||
|
||||
|
|
|
@ -33,7 +33,7 @@ void LexerTypeAction::execute(Lexer *lexer) {
|
|||
|
||||
size_t LexerTypeAction::hashCode() const {
|
||||
size_t hash = MurmurHash::initialize();
|
||||
hash = MurmurHash::update(hash, (size_t)getActionType());
|
||||
hash = MurmurHash::update(hash, static_cast<size_t>(getActionType()));
|
||||
hash = MurmurHash::update(hash, _type);
|
||||
return MurmurHash::finish(hash, 2);
|
||||
}
|
||||
|
|
|
@ -184,7 +184,7 @@ size_t ParserATNSimulator::execATN(dfa::DFA &dfa, dfa::DFAState *s0, TokenStream
|
|||
throw e;
|
||||
}
|
||||
|
||||
if (D->requiresFullContext && mode != PredictionMode::SLL) {
|
||||
if (D->requiresFullContext && _mode != PredictionMode::SLL) {
|
||||
// IF PREDS, MIGHT RESOLVE TO SINGLE ALT => SLL (or syntax error)
|
||||
BitSet conflictingAlts;
|
||||
if (D->predicates.size() != 0) {
|
||||
|
@ -283,7 +283,7 @@ dfa::DFAState *ParserATNSimulator::computeTargetState(dfa::DFA &dfa, dfa::DFASta
|
|||
D->isAcceptState = true;
|
||||
D->configs->uniqueAlt = predictedAlt;
|
||||
D->prediction = predictedAlt;
|
||||
} else if (PredictionModeClass::hasSLLConflictTerminatingPrediction(mode, D->configs.get())) {
|
||||
} else if (PredictionModeClass::hasSLLConflictTerminatingPrediction(_mode, D->configs.get())) {
|
||||
// MORE THAN ONE VIABLE ALTERNATIVE
|
||||
D->configs->conflictingAlts = getConflictingAlts(D->configs.get());
|
||||
D->requiresFullContext = true;
|
||||
|
@ -370,7 +370,7 @@ size_t ParserATNSimulator::execATNWithFullContext(dfa::DFA &dfa, dfa::DFAState *
|
|||
predictedAlt = reach->uniqueAlt;
|
||||
break;
|
||||
}
|
||||
if (mode != PredictionMode::LL_EXACT_AMBIG_DETECTION) {
|
||||
if (_mode != PredictionMode::LL_EXACT_AMBIG_DETECTION) {
|
||||
predictedAlt = PredictionModeClass::resolvesToJustOneViableAlt(altSubSets);
|
||||
if (predictedAlt != ATN::INVALID_ALT_NUMBER) {
|
||||
break;
|
||||
|
@ -1332,11 +1332,11 @@ void ParserATNSimulator::reportAmbiguity(dfa::DFA &dfa, dfa::DFAState * /*D*/, s
|
|||
}
|
||||
|
||||
void ParserATNSimulator::setPredictionMode(PredictionMode newMode) {
|
||||
mode = newMode;
|
||||
_mode = newMode;
|
||||
}
|
||||
|
||||
atn::PredictionMode ParserATNSimulator::getPredictionMode() {
|
||||
return mode;
|
||||
return _mode;
|
||||
}
|
||||
|
||||
Parser* ParserATNSimulator::getParser() {
|
||||
|
@ -1352,6 +1352,6 @@ bool ParserATNSimulator::getLrLoopSetting() {
|
|||
}
|
||||
|
||||
void ParserATNSimulator::InitializeInstanceFields() {
|
||||
mode = PredictionMode::LL;
|
||||
_mode = PredictionMode::LL;
|
||||
_startIndex = 0;
|
||||
}
|
||||
|
|
|
@ -243,20 +243,133 @@ namespace atn {
|
|||
* the input.</p>
|
||||
*/
|
||||
class ANTLR4CPP_PUBLIC ParserATNSimulator : public ATNSimulator {
|
||||
protected:
|
||||
Parser *const parser;
|
||||
|
||||
public:
|
||||
/// Testing only!
|
||||
ParserATNSimulator(const ATN &atn, std::vector<dfa::DFA> &decisionToDFA,
|
||||
PredictionContextCache &sharedContextCache);
|
||||
|
||||
ParserATNSimulator(Parser *parser, const ATN &atn, std::vector<dfa::DFA> &decisionToDFA,
|
||||
PredictionContextCache &sharedContextCache);
|
||||
|
||||
virtual void reset() override;
|
||||
virtual void clearDFA() override;
|
||||
virtual size_t adaptivePredict(TokenStream *input, size_t decision, ParserRuleContext *outerContext);
|
||||
|
||||
static const bool TURN_OFF_LR_LOOP_ENTRY_BRANCH_OPT;
|
||||
|
||||
std::vector<dfa::DFA> &decisionToDFA;
|
||||
|
||||
/** Implements first-edge (loop entry) elimination as an optimization
|
||||
* during closure operations. See antlr/antlr4#1398.
|
||||
*
|
||||
* The optimization is to avoid adding the loop entry config when
|
||||
* the exit path can only lead back to the same
|
||||
* StarLoopEntryState after popping context at the rule end state
|
||||
* (traversing only epsilon edges, so we're still in closure, in
|
||||
* this same rule).
|
||||
*
|
||||
* We need to detect any state that can reach loop entry on
|
||||
* epsilon w/o exiting rule. We don't have to look at FOLLOW
|
||||
* links, just ensure that all stack tops for config refer to key
|
||||
* states in LR rule.
|
||||
*
|
||||
* To verify we are in the right situation we must first check
|
||||
* closure is at a StarLoopEntryState generated during LR removal.
|
||||
* Then we check that each stack top of context is a return state
|
||||
* from one of these cases:
|
||||
*
|
||||
* 1. 'not' expr, '(' type ')' expr. The return state points at loop entry state
|
||||
* 2. expr op expr. The return state is the block end of internal block of (...)*
|
||||
* 3. 'between' expr 'and' expr. The return state of 2nd expr reference.
|
||||
* That state points at block end of internal block of (...)*.
|
||||
* 4. expr '?' expr ':' expr. The return state points at block end,
|
||||
* which points at loop entry state.
|
||||
*
|
||||
* If any is true for each stack top, then closure does not add a
|
||||
* config to the current config set for edge[0], the loop entry branch.
|
||||
*
|
||||
* Conditions fail if any context for the current config is:
|
||||
*
|
||||
* a. empty (we'd fall out of expr to do a global FOLLOW which could
|
||||
* even be to some weird spot in expr) or,
|
||||
* b. lies outside of expr or,
|
||||
* c. lies within expr but at a state not the BlockEndState
|
||||
* generated during LR removal
|
||||
*
|
||||
* Do we need to evaluate predicates ever in closure for this case?
|
||||
*
|
||||
* No. Predicates, including precedence predicates, are only
|
||||
* evaluated when computing a DFA start state. I.e., only before
|
||||
* the lookahead (but not parser) consumes a token.
|
||||
*
|
||||
* There are no epsilon edges allowed in LR rule alt blocks or in
|
||||
* the "primary" part (ID here). If closure is in
|
||||
* StarLoopEntryState any lookahead operation will have consumed a
|
||||
* token as there are no epsilon-paths that lead to
|
||||
* StarLoopEntryState. We do not have to evaluate predicates
|
||||
* therefore if we are in the generated StarLoopEntryState of a LR
|
||||
* rule. Note that when making a prediction starting at that
|
||||
* decision point, decision d=2, compute-start-state performs
|
||||
* closure starting at edges[0], edges[1] emanating from
|
||||
* StarLoopEntryState. That means it is not performing closure on
|
||||
* StarLoopEntryState during compute-start-state.
|
||||
*
|
||||
* How do we know this always gives same prediction answer?
|
||||
*
|
||||
* Without predicates, loop entry and exit paths are ambiguous
|
||||
* upon remaining input +b (in, say, a+b). Either paths lead to
|
||||
* valid parses. Closure can lead to consuming + immediately or by
|
||||
* falling out of this call to expr back into expr and loop back
|
||||
* again to StarLoopEntryState to match +b. In this special case,
|
||||
* we choose the more efficient path, which is to take the bypass
|
||||
* path.
|
||||
*
|
||||
* The lookahead language has not changed because closure chooses
|
||||
* one path over the other. Both paths lead to consuming the same
|
||||
* remaining input during a lookahead operation. If the next token
|
||||
* is an operator, lookahead will enter the choice block with
|
||||
* operators. If it is not, lookahead will exit expr. Same as if
|
||||
* closure had chosen to enter the choice block immediately.
|
||||
*
|
||||
* Closure is examining one config (some loopentrystate, some alt,
|
||||
* context) which means it is considering exactly one alt. Closure
|
||||
* always copies the same alt to any derived configs.
|
||||
*
|
||||
* How do we know this optimization doesn't mess up precedence in
|
||||
* our parse trees?
|
||||
*
|
||||
* Looking through expr from left edge of stat only has to confirm
|
||||
* that an input, say, a+b+c; begins with any valid interpretation
|
||||
* of an expression. The precedence actually doesn't matter when
|
||||
* making a decision in stat seeing through expr. It is only when
|
||||
* parsing rule expr that we must use the precedence to get the
|
||||
* right interpretation and, hence, parse tree.
|
||||
*/
|
||||
bool canDropLoopEntryEdgeInLeftRecursiveRule(ATNConfig *config) const;
|
||||
virtual std::string getRuleName(size_t index);
|
||||
|
||||
virtual Ref<ATNConfig> precedenceTransition(Ref<ATNConfig> const& config, PrecedencePredicateTransition *pt,
|
||||
bool collectPredicates, bool inContext, bool fullCtx);
|
||||
|
||||
void setPredictionMode(PredictionMode newMode);
|
||||
PredictionMode getPredictionMode();
|
||||
|
||||
Parser* getParser();
|
||||
|
||||
virtual std::string getTokenName(size_t t);
|
||||
|
||||
virtual std::string getLookaheadName(TokenStream *input);
|
||||
|
||||
private:
|
||||
/// <summary>
|
||||
/// SLL, LL, or LL + exact ambig detection? </summary>
|
||||
PredictionMode mode;
|
||||
|
||||
/// Used for debugging in adaptivePredict around execATN but I cut
|
||||
/// it out for clarity now that alg. works well. We can leave this
|
||||
/// "dead" code for a bit.
|
||||
/// </summary>
|
||||
virtual void dumpDeadEndConfigs(NoViableAltException &nvae);
|
||||
|
||||
protected:
|
||||
Parser *const parser;
|
||||
|
||||
/// <summary>
|
||||
/// Each prediction operation uses a cache for merge of prediction contexts.
|
||||
/// Don't keep around as it wastes huge amounts of memory. The merge cache
|
||||
|
@ -273,20 +386,7 @@ namespace atn {
|
|||
size_t _startIndex;
|
||||
ParserRuleContext *_outerContext;
|
||||
dfa::DFA *_dfa; // Reference into the decisionToDFA vector.
|
||||
|
||||
public:
|
||||
/// Testing only!
|
||||
ParserATNSimulator(const ATN &atn, std::vector<dfa::DFA> &decisionToDFA,
|
||||
PredictionContextCache &sharedContextCache);
|
||||
|
||||
ParserATNSimulator(Parser *parser, const ATN &atn, std::vector<dfa::DFA> &decisionToDFA,
|
||||
PredictionContextCache &sharedContextCache);
|
||||
|
||||
virtual void reset() override;
|
||||
virtual void clearDFA() override;
|
||||
virtual size_t adaptivePredict(TokenStream *input, size_t decision, ParserRuleContext *outerContext);
|
||||
|
||||
protected:
|
||||
|
||||
/// <summary>
|
||||
/// Performs ATN simulation to compute a predicted alternative based
|
||||
/// upon the remaining input, but also updates the DFA cache to avoid
|
||||
|
@ -350,7 +450,7 @@ namespace atn {
|
|||
|
||||
// comes back with reach.uniqueAlt set to a valid alt
|
||||
virtual size_t execATNWithFullContext(dfa::DFA &dfa, dfa::DFAState *D, ATNConfigSet *s0,
|
||||
TokenStream *input, size_t startIndex, ParserRuleContext *outerContext); // how far we got before failing over
|
||||
TokenStream *input, size_t startIndex, ParserRuleContext *outerContext); // how far we got before failing over
|
||||
|
||||
virtual std::unique_ptr<ATNConfigSet> computeReachSet(ATNConfigSet *closure, size_t t, bool fullCtx);
|
||||
|
||||
|
@ -549,10 +649,10 @@ namespace atn {
|
|||
virtual ATNState *getReachableTarget(Transition *trans, size_t ttype);
|
||||
|
||||
virtual std::vector<Ref<SemanticContext>> getPredsForAmbigAlts(const antlrcpp::BitSet &ambigAlts,
|
||||
ATNConfigSet *configs, size_t nalts);
|
||||
ATNConfigSet *configs, size_t nalts);
|
||||
|
||||
virtual std::vector<dfa::DFAState::PredPrediction*> getPredicatePredictions(const antlrcpp::BitSet &ambigAlts,
|
||||
std::vector<Ref<SemanticContext>> altToPred);
|
||||
std::vector<Ref<SemanticContext>> altToPred);
|
||||
|
||||
/**
|
||||
* This method is used to improve the localization of error messages by
|
||||
|
@ -601,7 +701,7 @@ namespace atn {
|
|||
* identified and {@link #adaptivePredict} should report an error instead.
|
||||
*/
|
||||
size_t getSynValidOrSemInvalidAltThatFinishedDecisionEntryRule(ATNConfigSet *configs,
|
||||
ParserRuleContext *outerContext);
|
||||
ParserRuleContext *outerContext);
|
||||
|
||||
virtual size_t getAltThatFinishedDecisionEntryRule(ATNConfigSet *configs);
|
||||
|
||||
|
@ -615,7 +715,7 @@ namespace atn {
|
|||
* prediction, which is where predicates need to evaluate.
|
||||
*/
|
||||
std::pair<ATNConfigSet *, ATNConfigSet *> splitAccordingToSemanticValidity(ATNConfigSet *configs,
|
||||
ParserRuleContext *outerContext);
|
||||
ParserRuleContext *outerContext);
|
||||
|
||||
/// <summary>
|
||||
/// Look through a list of predicate/alt pairs, returning alts for the
|
||||
|
@ -627,7 +727,6 @@ namespace atn {
|
|||
virtual antlrcpp::BitSet evalSemanticContext(std::vector<dfa::DFAState::PredPrediction*> predPredictions,
|
||||
ParserRuleContext *outerContext, bool complete);
|
||||
|
||||
|
||||
/**
|
||||
* Evaluate a semantic context within a specific parser context.
|
||||
*
|
||||
|
@ -672,111 +771,15 @@ namespace atn {
|
|||
|
||||
virtual void closureCheckingStopState(Ref<ATNConfig> const& config, ATNConfigSet *configs, ATNConfig::Set &closureBusy,
|
||||
bool collectPredicates, bool fullCtx, int depth, bool treatEofAsEpsilon);
|
||||
|
||||
|
||||
/// Do the actual work of walking epsilon edges.
|
||||
virtual void closure_(Ref<ATNConfig> const& config, ATNConfigSet *configs, ATNConfig::Set &closureBusy,
|
||||
bool collectPredicates, bool fullCtx, int depth, bool treatEofAsEpsilon);
|
||||
|
||||
public:
|
||||
/** Implements first-edge (loop entry) elimination as an optimization
|
||||
* during closure operations. See antlr/antlr4#1398.
|
||||
*
|
||||
* The optimization is to avoid adding the loop entry config when
|
||||
* the exit path can only lead back to the same
|
||||
* StarLoopEntryState after popping context at the rule end state
|
||||
* (traversing only epsilon edges, so we're still in closure, in
|
||||
* this same rule).
|
||||
*
|
||||
* We need to detect any state that can reach loop entry on
|
||||
* epsilon w/o exiting rule. We don't have to look at FOLLOW
|
||||
* links, just ensure that all stack tops for config refer to key
|
||||
* states in LR rule.
|
||||
*
|
||||
* To verify we are in the right situation we must first check
|
||||
* closure is at a StarLoopEntryState generated during LR removal.
|
||||
* Then we check that each stack top of context is a return state
|
||||
* from one of these cases:
|
||||
*
|
||||
* 1. 'not' expr, '(' type ')' expr. The return state points at loop entry state
|
||||
* 2. expr op expr. The return state is the block end of internal block of (...)*
|
||||
* 3. 'between' expr 'and' expr. The return state of 2nd expr reference.
|
||||
* That state points at block end of internal block of (...)*.
|
||||
* 4. expr '?' expr ':' expr. The return state points at block end,
|
||||
* which points at loop entry state.
|
||||
*
|
||||
* If any is true for each stack top, then closure does not add a
|
||||
* config to the current config set for edge[0], the loop entry branch.
|
||||
*
|
||||
* Conditions fail if any context for the current config is:
|
||||
*
|
||||
* a. empty (we'd fall out of expr to do a global FOLLOW which could
|
||||
* even be to some weird spot in expr) or,
|
||||
* b. lies outside of expr or,
|
||||
* c. lies within expr but at a state not the BlockEndState
|
||||
* generated during LR removal
|
||||
*
|
||||
* Do we need to evaluate predicates ever in closure for this case?
|
||||
*
|
||||
* No. Predicates, including precedence predicates, are only
|
||||
* evaluated when computing a DFA start state. I.e., only before
|
||||
* the lookahead (but not parser) consumes a token.
|
||||
*
|
||||
* There are no epsilon edges allowed in LR rule alt blocks or in
|
||||
* the "primary" part (ID here). If closure is in
|
||||
* StarLoopEntryState any lookahead operation will have consumed a
|
||||
* token as there are no epsilon-paths that lead to
|
||||
* StarLoopEntryState. We do not have to evaluate predicates
|
||||
* therefore if we are in the generated StarLoopEntryState of a LR
|
||||
* rule. Note that when making a prediction starting at that
|
||||
* decision point, decision d=2, compute-start-state performs
|
||||
* closure starting at edges[0], edges[1] emanating from
|
||||
* StarLoopEntryState. That means it is not performing closure on
|
||||
* StarLoopEntryState during compute-start-state.
|
||||
*
|
||||
* How do we know this always gives same prediction answer?
|
||||
*
|
||||
* Without predicates, loop entry and exit paths are ambiguous
|
||||
* upon remaining input +b (in, say, a+b). Either paths lead to
|
||||
* valid parses. Closure can lead to consuming + immediately or by
|
||||
* falling out of this call to expr back into expr and loop back
|
||||
* again to StarLoopEntryState to match +b. In this special case,
|
||||
* we choose the more efficient path, which is to take the bypass
|
||||
* path.
|
||||
*
|
||||
* The lookahead language has not changed because closure chooses
|
||||
* one path over the other. Both paths lead to consuming the same
|
||||
* remaining input during a lookahead operation. If the next token
|
||||
* is an operator, lookahead will enter the choice block with
|
||||
* operators. If it is not, lookahead will exit expr. Same as if
|
||||
* closure had chosen to enter the choice block immediately.
|
||||
*
|
||||
* Closure is examining one config (some loopentrystate, some alt,
|
||||
* context) which means it is considering exactly one alt. Closure
|
||||
* always copies the same alt to any derived configs.
|
||||
*
|
||||
* How do we know this optimization doesn't mess up precedence in
|
||||
* our parse trees?
|
||||
*
|
||||
* Looking through expr from left edge of stat only has to confirm
|
||||
* that an input, say, a+b+c; begins with any valid interpretation
|
||||
* of an expression. The precedence actually doesn't matter when
|
||||
* making a decision in stat seeing through expr. It is only when
|
||||
* parsing rule expr that we must use the precedence to get the
|
||||
* right interpretation and, hence, parse tree.
|
||||
*/
|
||||
bool canDropLoopEntryEdgeInLeftRecursiveRule(ATNConfig *config) const;
|
||||
virtual std::string getRuleName(size_t index);
|
||||
|
||||
protected:
|
||||
|
||||
virtual Ref<ATNConfig> getEpsilonTarget(Ref<ATNConfig> const& config, Transition *t, bool collectPredicates,
|
||||
bool inContext, bool fullCtx, bool treatEofAsEpsilon);
|
||||
virtual Ref<ATNConfig> actionTransition(Ref<ATNConfig> const& config, ActionTransition *t);
|
||||
|
||||
public:
|
||||
virtual Ref<ATNConfig> precedenceTransition(Ref<ATNConfig> const& config, PrecedencePredicateTransition *pt,
|
||||
bool collectPredicates, bool inContext, bool fullCtx);
|
||||
|
||||
protected:
|
||||
virtual Ref<ATNConfig> predTransition(Ref<ATNConfig> const& config, PredicateTransition *pt, bool collectPredicates,
|
||||
bool inContext, bool fullCtx);
|
||||
|
||||
|
@ -832,19 +835,6 @@ namespace atn {
|
|||
|
||||
virtual antlrcpp::BitSet getConflictingAltsOrUniqueAlt(ATNConfigSet *configs);
|
||||
|
||||
public:
|
||||
virtual std::string getTokenName(size_t t);
|
||||
|
||||
virtual std::string getLookaheadName(TokenStream *input);
|
||||
|
||||
/// <summary>
|
||||
/// Used for debugging in adaptivePredict around execATN but I cut
|
||||
/// it out for clarity now that alg. works well. We can leave this
|
||||
/// "dead" code for a bit.
|
||||
/// </summary>
|
||||
virtual void dumpDeadEndConfigs(NoViableAltException &nvae);
|
||||
|
||||
protected:
|
||||
virtual NoViableAltException noViableAlt(TokenStream *input, ParserRuleContext *outerContext,
|
||||
ATNConfigSet *configs, size_t startIndex);
|
||||
|
||||
|
@ -901,13 +891,10 @@ namespace atn {
|
|||
const antlrcpp::BitSet &ambigAlts,
|
||||
ATNConfigSet *configs); // configs that LL not SLL considered conflicting
|
||||
|
||||
public:
|
||||
void setPredictionMode(PredictionMode newMode);
|
||||
PredictionMode getPredictionMode();
|
||||
|
||||
Parser* getParser();
|
||||
|
||||
private:
|
||||
// SLL, LL, or LL + exact ambig detection?
|
||||
PredictionMode _mode;
|
||||
|
||||
static bool getLrLoopSetting();
|
||||
void InitializeInstanceFields();
|
||||
};
|
||||
|
|
|
@ -17,7 +17,6 @@ namespace atn {
|
|||
class PredictionContextMergeCache;
|
||||
|
||||
typedef std::unordered_set<Ref<PredictionContext>, PredictionContextHasher, PredictionContextComparer> PredictionContextCache;
|
||||
//typedef std::map<std::pair<Ref<PredictionContext>, Ref<PredictionContext>>, Ref<PredictionContext>> PredictionContextMergeCache;
|
||||
|
||||
class ANTLR4CPP_PUBLIC PredictionContext {
|
||||
public:
|
||||
|
@ -28,10 +27,10 @@ namespace atn {
|
|||
/// Represents $ in an array in full context mode, when $
|
||||
/// doesn't mean wildcard: $ + x = [$,x]. Here,
|
||||
/// $ = EMPTY_RETURN_STATE.
|
||||
// ml: originally Integer.MAX_VALUE, which would be (size_t)-1 for us, but this is already used in places where
|
||||
// ml: originally Integer.MAX_VALUE, which would be -1 for us, but this is already used in places where
|
||||
// -1 is converted to unsigned, so we use a different value here. Any value does the job provided it doesn't
|
||||
// conflict with real return states.
|
||||
static const size_t EMPTY_RETURN_STATE = std::numeric_limits<size_t>::max() - 9;
|
||||
static const size_t EMPTY_RETURN_STATE = static_cast<size_t>(-10); // std::numeric_limits<size_t>::max() - 9;
|
||||
|
||||
private:
|
||||
static const size_t INITIAL_HASH = 1;
|
||||
|
|
|
@ -82,7 +82,7 @@ int SemanticContext::PrecedencePredicate::compareTo(PrecedencePredicate *o) {
|
|||
|
||||
size_t SemanticContext::PrecedencePredicate::hashCode() const {
|
||||
size_t hashCode = 1;
|
||||
hashCode = 31 * hashCode + (size_t)precedence;
|
||||
hashCode = 31 * hashCode + static_cast<size_t>(precedence);
|
||||
return hashCode;
|
||||
}
|
||||
|
||||
|
|
|
@ -10,16 +10,16 @@ using namespace antlr4::misc;
|
|||
Interval::~Interval() = default;
|
||||
|
||||
size_t antlr4::misc::numericToSymbol(ssize_t v) {
|
||||
return (size_t)v;
|
||||
return static_cast<size_t>(v);
|
||||
}
|
||||
|
||||
ssize_t antlr4::misc::symbolToNumeric(size_t v) {
|
||||
return (ssize_t)v;
|
||||
return static_cast<ssize_t>(v);
|
||||
}
|
||||
|
||||
Interval const Interval::INVALID;
|
||||
|
||||
Interval::Interval() : Interval((ssize_t)-1, -2) { // Need an explicit cast here for VS.
|
||||
Interval::Interval() : Interval(static_cast<ssize_t>(-1), -2) { // Need an explicit cast here for VS.
|
||||
}
|
||||
|
||||
Interval::Interval(size_t a_, size_t b_) : Interval(symbolToNumeric(a_), symbolToNumeric(b_)) {
|
||||
|
@ -41,8 +41,8 @@ bool Interval::operator == (const Interval &other) const {
|
|||
|
||||
size_t Interval::hashCode() const {
|
||||
size_t hash = 23;
|
||||
hash = hash * 31 + (size_t)a;
|
||||
hash = hash * 31 + (size_t)b;
|
||||
hash = hash * 31 + static_cast<size_t>(a);
|
||||
hash = hash * 31 + static_cast<size_t>(b);
|
||||
return hash;
|
||||
}
|
||||
|
||||
|
|
|
@ -1,9 +1,16 @@
|
|||
/* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.
|
||||
* Use of this file is governed by the BSD 3-clause license that
|
||||
* can be found in the LICENSE.txt file in the project root.
|
||||
*/
|
||||
|
||||
#include "Any.h"
|
||||
|
||||
antlrcpp::Any::~Any()
|
||||
using namespace antlrcpp;
|
||||
|
||||
Any::~Any()
|
||||
{
|
||||
delete _ptr;
|
||||
}
|
||||
|
||||
antlrcpp::Any::Base::~Base() {
|
||||
Any::Base::~Base() {
|
||||
}
|
||||
|
|
|
@ -19,7 +19,7 @@ namespace antlrcpp {
|
|||
template<class T>
|
||||
using StorageType = typename std::decay<T>::type;
|
||||
|
||||
struct Any
|
||||
struct ANTLR4CPP_PUBLIC Any
|
||||
{
|
||||
bool isNull() const { return _ptr == nullptr; }
|
||||
bool isNotNull() const { return _ptr != nullptr; }
|
||||
|
|
|
@ -1,3 +1,8 @@
|
|||
/* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.
|
||||
* Use of this file is governed by the BSD 3-clause license that
|
||||
* can be found in the LICENSE.txt file in the project root.
|
||||
*/
|
||||
|
||||
#include "tree/ErrorNode.h"
|
||||
|
||||
antlr4::tree::ErrorNode::~ErrorNode() {
|
||||
|
|
|
@ -1,31 +1,6 @@
|
|||
/*
|
||||
* [The "BSD license"]
|
||||
* Copyright (c) 2012 Terence Parr
|
||||
* Copyright (c) 2012 Sam Harwell
|
||||
* All rights reserved.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions
|
||||
* are met:
|
||||
*
|
||||
* 1. Redistributions of source code must retain the above copyright
|
||||
* notice, this list of conditions and the following disclaimer.
|
||||
* 2. Redistributions in binary form must reproduce the above copyright
|
||||
* notice, this list of conditions and the following disclaimer in the
|
||||
* documentation and/or other materials provided with the distribution.
|
||||
* 3. The name of the author may not be used to endorse or promote products
|
||||
* derived from this software without specific prior written permission.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
|
||||
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
|
||||
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
|
||||
* IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
|
||||
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
|
||||
* THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
/* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.
|
||||
* Use of this file is governed by the BSD 3-clause license that
|
||||
* can be found in the LICENSE.txt file in the project root.
|
||||
*/
|
||||
|
||||
#include "support/CPPUtils.h"
|
||||
|
|
|
@ -1,3 +1,8 @@
|
|||
/* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.
|
||||
* Use of this file is governed by the BSD 3-clause license that
|
||||
* can be found in the LICENSE.txt file in the project root.
|
||||
*/
|
||||
|
||||
#include "ParseTreeListener.h"
|
||||
|
||||
antlr4::tree::ParseTreeListener::~ParseTreeListener() {
|
||||
|
|
|
@ -1,3 +1,8 @@
|
|||
/* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.
|
||||
* Use of this file is governed by the BSD 3-clause license that
|
||||
* can be found in the LICENSE.txt file in the project root.
|
||||
*/
|
||||
|
||||
#include "ParseTreeVisitor.h"
|
||||
|
||||
antlr4::tree::ParseTreeVisitor::~ParseTreeVisitor() {
|
||||
|
|
|
@ -1,3 +1,8 @@
|
|||
/* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.
|
||||
* Use of this file is governed by the BSD 3-clause license that
|
||||
* can be found in the LICENSE.txt file in the project root.
|
||||
*/
|
||||
|
||||
#include "tree/TerminalNode.h"
|
||||
|
||||
antlr4::tree::TerminalNode::~TerminalNode() {
|
||||
|
|
|
@ -1,3 +1,8 @@
|
|||
/* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.
|
||||
* Use of this file is governed by the BSD 3-clause license that
|
||||
* can be found in the LICENSE.txt file in the project root.
|
||||
*/
|
||||
|
||||
#include "tree/pattern/Chunk.h"
|
||||
|
||||
antlr4::tree::pattern::Chunk::~Chunk() {
|
||||
|
|
Loading…
Reference in New Issue