forked from jasder/antlr
Merge branch 'master' into fix-missing-variable-declarations
This commit is contained in:
commit
97aaddc006
|
@ -130,3 +130,6 @@ YYYY/MM/DD, github id, Full name, email
|
|||
2016/12/22, akosthekiss, Akos Kiss, akiss@inf.u-szeged.hu
|
||||
2016/12/24, adrpo, Adrian Pop, adrian.pop@liu.se
|
||||
2017/01/11, robertbrignull, Robert Brignull, robertbrignull@gmail.com
|
||||
2017/01/13, marcelo-rocha, Marcelo Rocha, mcrocha@gmail.com
|
||||
2017/01/23, bhamiltoncx, Ben Hamilton, bhamiltoncx+antlr@gmail.com
|
||||
2017/01/18, mshockwave, Bekket McClane, yihshyng223@gmail.com
|
|
@ -409,7 +409,7 @@ public class BaseCSharpTest implements RuntimeTestSupport /*, SpecialRuntimeTest
|
|||
}
|
||||
|
||||
private String locateTool(String tool) {
|
||||
String[] roots = { "/opt/local/bin/", "/usr/bin/", "/usr/local/bin/" };
|
||||
String[] roots = { "/opt/local/bin/", "/usr/local/bin/", "/usr/bin/" };
|
||||
for(String root : roots) {
|
||||
if(new File(root + tool).exists())
|
||||
return root + tool;
|
||||
|
|
|
@ -371,6 +371,7 @@ namespace Antlr4.Runtime
|
|||
this._tokenSource = tokenSource;
|
||||
tokens.Clear();
|
||||
p = -1;
|
||||
this.fetchedEOF = false;
|
||||
}
|
||||
|
||||
public virtual IList<IToken> GetTokens()
|
||||
|
|
|
@ -107,5 +107,13 @@ if (WITH_DEMO)
|
|||
add_subdirectory(demo)
|
||||
endif(WITH_DEMO)
|
||||
|
||||
install(FILES License.txt README.md VERSION
|
||||
if( EXISTS LICENSE.txt)
|
||||
install(FILES LICENSE.txt
|
||||
DESTINATION "share/doc/libantlr4")
|
||||
elseif(EXISTS ../../LICENSE.txt)
|
||||
install(FILES ../../LICENSE.txt
|
||||
DESTINATION "share/doc/libantlr4")
|
||||
endif()
|
||||
|
||||
install(FILES README.md VERSION
|
||||
DESTINATION "share/doc/libantlr4")
|
||||
|
|
|
@ -155,9 +155,9 @@ ExternalProject_ADD(
|
|||
|
||||
ExternalProject_Get_Property(antlr4cpp INSTALL_DIR)
|
||||
|
||||
list(APPEND ANTLR4CPP_INCLUDE_DIRS ${INSTALL_DIR}/include)
|
||||
list(APPEND ANTLR4CPP_INCLUDE_DIRS ${INSTALL_DIR}/include/antlr4-runtime)
|
||||
foreach(src_path misc atn dfa tree support)
|
||||
list(APPEND ANTLR4CPP_INCLUDE_DIRS ${INSTALL_DIR}/include/${src_path})
|
||||
list(APPEND ANTLR4CPP_INCLUDE_DIRS ${INSTALL_DIR}/include/antlr4-runtime/${src_path})
|
||||
endforeach(src_path)
|
||||
|
||||
set(ANTLR4CPP_LIBS "${INSTALL_DIR}/lib")
|
||||
|
|
|
@ -2,8 +2,13 @@
|
|||
|
||||
# Zip it
|
||||
rm -f antlr4-cpp-runtime-source.zip
|
||||
zip -r antlr4-cpp-runtime-source.zip "README.md" "cmake" "demo" "runtime" "CMakeLists.txt" "License.txt" "deploy-macos.sh" "deploy-source.sh" "deploy-windows.cmd" "VERSION" \
|
||||
zip -r antlr4-cpp-runtime-source.zip "README.md" "cmake" "demo" "runtime" "CMakeLists.txt" "deploy-macos.sh" "deploy-source.sh" "deploy-windows.cmd" "VERSION" \
|
||||
-X -x "*.DS_Store*" "antlrcpp.xcodeproj/xcuserdata/*" "*Build*" "*DerivedData*" "*.jar" "demo/generated/*" "*.vscode*" "runtime/build/*"
|
||||
|
||||
# Add the license file from the ANTLR root as well.
|
||||
pushd ../../
|
||||
zip runtime/cpp/antlr4-cpp-runtime-source.zip LICENSE.txt
|
||||
popd
|
||||
|
||||
# Deploy
|
||||
#cp antlr4-cpp-runtime-source.zip ~/antlr/sites/website-antlr4/download
|
||||
|
|
|
@ -29,27 +29,20 @@ namespace tree {
|
|||
template<typename V>
|
||||
class ANTLR4CPP_PUBLIC ParseTreeProperty {
|
||||
public:
|
||||
ParseTreeProperty() {
|
||||
InitializeInstanceFields();
|
||||
}
|
||||
|
||||
virtual V get(ParseTree *node) {
|
||||
return _annotations.get(node);
|
||||
return _annotations[node];
|
||||
}
|
||||
virtual void put(ParseTree *node, V value) {
|
||||
_annotations.put(node, value);
|
||||
_annotations[node] = value;
|
||||
}
|
||||
virtual V removeFrom(ParseTree *node) {
|
||||
return _annotations->remove(node);
|
||||
auto value = _annotations[node];
|
||||
_annotations.erase(node);
|
||||
return value;
|
||||
}
|
||||
|
||||
protected:
|
||||
std::map<ParseTree*, V> _annotations;
|
||||
|
||||
private:
|
||||
void InitializeInstanceFields() {
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
} // namespace tree
|
||||
|
|
|
@ -93,11 +93,11 @@ func NewBaseATNConfig(c ATNConfig, state ATNState, context PredictionContext, se
|
|||
}
|
||||
|
||||
return &BaseATNConfig{
|
||||
state: state,
|
||||
alt: c.GetAlt(),
|
||||
context: context,
|
||||
semanticContext: semanticContext,
|
||||
reachesIntoOuterContext: c.GetReachesIntoOuterContext(),
|
||||
state: state,
|
||||
alt: c.GetAlt(),
|
||||
context: context,
|
||||
semanticContext: semanticContext,
|
||||
reachesIntoOuterContext: c.GetReachesIntoOuterContext(),
|
||||
precedenceFilterSuppressed: c.getPrecedenceFilterSuppressed(),
|
||||
}
|
||||
}
|
||||
|
@ -202,8 +202,6 @@ func (b *BaseATNConfig) String() string {
|
|||
return fmt.Sprintf("(%v,%v%v%v%v)", b.state, b.alt, s1, s2, s3)
|
||||
}
|
||||
|
||||
|
||||
|
||||
type LexerATNConfig struct {
|
||||
*BaseATNConfig
|
||||
lexerActionExecutor *LexerActionExecutor
|
||||
|
|
|
@ -15,7 +15,7 @@ type IATNSimulator interface {
|
|||
type BaseATNSimulator struct {
|
||||
atn *ATN
|
||||
sharedContextCache *PredictionContextCache
|
||||
decisionToDFA []*DFA
|
||||
decisionToDFA []*DFA
|
||||
}
|
||||
|
||||
func NewBaseATNSimulator(atn *ATN, sharedContextCache *PredictionContextCache) *BaseATNSimulator {
|
||||
|
|
|
@ -10,7 +10,7 @@ import (
|
|||
)
|
||||
|
||||
var (
|
||||
LexerATNSimulatorDebug = false
|
||||
LexerATNSimulatorDebug = false
|
||||
LexerATNSimulatorDFADebug = false
|
||||
|
||||
LexerATNSimulatorMinDFAEdge = 0
|
||||
|
|
|
@ -11,10 +11,10 @@ import (
|
|||
)
|
||||
|
||||
var (
|
||||
ParserATNSimulatorDebug = false
|
||||
ParserATNSimulatorDebug = false
|
||||
ParserATNSimulatorListATNDecisions = false
|
||||
ParserATNSimulatorDFADebug = false
|
||||
ParserATNSimulatorRetryDebug = false
|
||||
ParserATNSimulatorDFADebug = false
|
||||
ParserATNSimulatorRetryDebug = false
|
||||
)
|
||||
|
||||
type ParserATNSimulator struct {
|
||||
|
@ -118,7 +118,7 @@ func (p *ParserATNSimulator) AdaptivePredict(input TokenStream, decision int, ou
|
|||
// closure block that determines whether a precedence rule
|
||||
// should continue or complete.
|
||||
|
||||
t2 := dfa.atnStartState
|
||||
t2 := dfa.atnStartState
|
||||
t, ok := t2.(*StarLoopEntryState)
|
||||
if !dfa.precedenceDfa && ok {
|
||||
if t.precedenceRuleDecision {
|
||||
|
|
|
@ -23,7 +23,7 @@ const (
|
|||
|
||||
var (
|
||||
BasePredictionContextglobalNodeCount = 1
|
||||
BasePredictionContextid = BasePredictionContextglobalNodeCount
|
||||
BasePredictionContextid = BasePredictionContextglobalNodeCount
|
||||
)
|
||||
|
||||
type PredictionContext interface {
|
||||
|
@ -620,8 +620,8 @@ func mergeArrays(a, b *ArrayPredictionContext, rootIsWildcard bool, mergeCache *
|
|||
j := 0 // walks b
|
||||
k := 0 // walks target M array
|
||||
|
||||
mergedReturnStates := make([]int, len(a.returnStates) + len(b.returnStates))
|
||||
mergedParents := make([]PredictionContext, len(a.returnStates) + len(b.returnStates))
|
||||
mergedReturnStates := make([]int, len(a.returnStates)+len(b.returnStates))
|
||||
mergedParents := make([]PredictionContext, len(a.returnStates)+len(b.returnStates))
|
||||
// walk and merge to yield mergedParents, mergedReturnStates
|
||||
for i < len(a.returnStates) && j < len(b.returnStates) {
|
||||
aParent := a.parents[i]
|
||||
|
|
|
@ -5,13 +5,13 @@
|
|||
package antlr
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"errors"
|
||||
"fmt"
|
||||
"hash/fnv"
|
||||
"sort"
|
||||
"strings"
|
||||
"bytes"
|
||||
"strconv"
|
||||
"strings"
|
||||
)
|
||||
|
||||
func intMin(a, b int) int {
|
||||
|
|
|
@ -234,6 +234,7 @@ BufferedTokenStream.prototype.setTokenSource = function(tokenSource) {
|
|||
this.tokenSource = tokenSource;
|
||||
this.tokens = [];
|
||||
this.index = -1;
|
||||
this.fetchedEOF = false;
|
||||
};
|
||||
|
||||
|
||||
|
@ -280,8 +281,7 @@ BufferedTokenStream.prototype.getHiddenTokensToRight = function(tokenIndex,
|
|||
if (tokenIndex < 0 || tokenIndex >= this.tokens.length) {
|
||||
throw "" + tokenIndex + " not in 0.." + this.tokens.length - 1;
|
||||
}
|
||||
var nextOnChannel = this.nextTokenOnChannel(tokenIndex + 1,
|
||||
Lexer.DEFAULT_TOKEN_CHANNEL);
|
||||
var nextOnChannel = this.nextTokenOnChannel(tokenIndex + 1, Lexer.DEFAULT_TOKEN_CHANNEL);
|
||||
var from_ = tokenIndex + 1;
|
||||
// if none onchannel to right, nextOnChannel=-1 so set to = last token
|
||||
var to = nextOnChannel === -1 ? this.tokens.length - 1 : nextOnChannel;
|
||||
|
@ -300,8 +300,7 @@ BufferedTokenStream.prototype.getHiddenTokensToLeft = function(tokenIndex,
|
|||
if (tokenIndex < 0 || tokenIndex >= this.tokens.length) {
|
||||
throw "" + tokenIndex + " not in 0.." + this.tokens.length - 1;
|
||||
}
|
||||
var prevOnChannel = this.previousTokenOnChannel(tokenIndex - 1,
|
||||
Lexer.DEFAULT_TOKEN_CHANNEL);
|
||||
var prevOnChannel = this.previousTokenOnChannel(tokenIndex - 1, Lexer.DEFAULT_TOKEN_CHANNEL);
|
||||
if (prevOnChannel === tokenIndex - 1) {
|
||||
return null;
|
||||
}
|
||||
|
|
|
@ -192,7 +192,7 @@ class BufferedTokenStream(TokenStream):
|
|||
self.tokenSource = tokenSource
|
||||
self.tokens = []
|
||||
self.index = -1
|
||||
|
||||
self.fetchedEOF = False
|
||||
|
||||
|
||||
# Given a starting index, return the index of the next token on channel.
|
||||
|
|
|
@ -194,7 +194,7 @@ class BufferedTokenStream(TokenStream):
|
|||
self.tokenSource = tokenSource
|
||||
self.tokens = []
|
||||
self.index = -1
|
||||
|
||||
self.fetchedEOF = False
|
||||
|
||||
|
||||
# Given a starting index, return the index of the next token on channel.
|
||||
|
|
Loading…
Reference in New Issue