Merge branch 'master' into master

This commit is contained in:
bekket mcclane 2017-01-23 17:52:59 +08:00 committed by GitHub
commit 4f3785a29e
14 changed files with 43 additions and 38 deletions

View File

@ -129,4 +129,5 @@ YYYY/MM/DD, github id, Full name, email
2016/12/11, Gaulouis, Gaulouis, gaulouis.com@gmail.com
2016/12/22, akosthekiss, Akos Kiss, akiss@inf.u-szeged.hu
2016/12/24, adrpo, Adrian Pop, adrian.pop@liu.se
2017/01/13, marcelo-rocha, Marcelo Rocha, mcrocha@gmail.com
2017/01/18, mshockwave, Bekket McClane, yihshyng223@gmail.com

View File

@ -371,6 +371,7 @@ namespace Antlr4.Runtime
this._tokenSource = tokenSource;
tokens.Clear();
p = -1;
this.fetchedEOF = false;
}
public virtual IList<IToken> GetTokens()

View File

@ -107,5 +107,13 @@ if (WITH_DEMO)
add_subdirectory(demo)
endif(WITH_DEMO)
install(FILES License.txt README.md VERSION
if( EXISTS LICENSE.txt)
install(FILES LICENSE.txt
DESTINATION "share/doc/libantlr4")
elseif(EXISTS ../../LICENSE.txt)
install(FILES ../../LICENSE.txt
DESTINATION "share/doc/libantlr4")
endif()
install(FILES README.md VERSION
DESTINATION "share/doc/libantlr4")

View File

@ -2,8 +2,13 @@
# Zip it
rm -f antlr4-cpp-runtime-source.zip
zip -r antlr4-cpp-runtime-source.zip "README.md" "cmake" "demo" "runtime" "CMakeLists.txt" "License.txt" "deploy-macos.sh" "deploy-source.sh" "deploy-windows.cmd" "VERSION" \
zip -r antlr4-cpp-runtime-source.zip "README.md" "cmake" "demo" "runtime" "CMakeLists.txt" "deploy-macos.sh" "deploy-source.sh" "deploy-windows.cmd" "VERSION" \
-X -x "*.DS_Store*" "antlrcpp.xcodeproj/xcuserdata/*" "*Build*" "*DerivedData*" "*.jar" "demo/generated/*" "*.vscode*" "runtime/build/*"
# Add the license file from the ANTLR root as well.
pushd ../../
zip runtime/cpp/antlr4-cpp-runtime-source.zip LICENSE.txt
popd
# Deploy
#cp antlr4-cpp-runtime-source.zip ~/antlr/sites/website-antlr4/download

View File

@ -29,27 +29,20 @@ namespace tree {
template<typename V>
class ANTLR4CPP_PUBLIC ParseTreeProperty {
public:
ParseTreeProperty() {
InitializeInstanceFields();
}
virtual V get(ParseTree *node) {
return _annotations.get(node);
return _annotations[node];
}
virtual void put(ParseTree *node, V value) {
_annotations.put(node, value);
_annotations[node] = value;
}
virtual V removeFrom(ParseTree *node) {
return _annotations->remove(node);
auto value = _annotations[node];
_annotations.erase(node);
return value;
}
protected:
std::map<ParseTree*, V> _annotations;
private:
void InitializeInstanceFields() {
}
};
} // namespace tree

View File

@ -202,8 +202,6 @@ func (b *BaseATNConfig) String() string {
return fmt.Sprintf("(%v,%v%v%v%v)", b.state, b.alt, s1, s2, s3)
}
type LexerATNConfig struct {
*BaseATNConfig
lexerActionExecutor *LexerActionExecutor

View File

@ -5,13 +5,13 @@
package antlr
import (
"bytes"
"errors"
"fmt"
"hash/fnv"
"sort"
"strings"
"bytes"
"strconv"
"strings"
)
func intMin(a, b int) int {

View File

@ -234,6 +234,7 @@ BufferedTokenStream.prototype.setTokenSource = function(tokenSource) {
this.tokenSource = tokenSource;
this.tokens = [];
this.index = -1;
this.fetchedEOF = false;
};
@ -280,8 +281,7 @@ BufferedTokenStream.prototype.getHiddenTokensToRight = function(tokenIndex,
if (tokenIndex < 0 || tokenIndex >= this.tokens.length) {
throw "" + tokenIndex + " not in 0.." + this.tokens.length - 1;
}
var nextOnChannel = this.nextTokenOnChannel(tokenIndex + 1,
Lexer.DEFAULT_TOKEN_CHANNEL);
var nextOnChannel = this.nextTokenOnChannel(tokenIndex + 1, Lexer.DEFAULT_TOKEN_CHANNEL);
var from_ = tokenIndex + 1;
// if none onchannel to right, nextOnChannel=-1 so set to = last token
var to = nextOnChannel === -1 ? this.tokens.length - 1 : nextOnChannel;
@ -300,8 +300,7 @@ BufferedTokenStream.prototype.getHiddenTokensToLeft = function(tokenIndex,
if (tokenIndex < 0 || tokenIndex >= this.tokens.length) {
throw "" + tokenIndex + " not in 0.." + this.tokens.length - 1;
}
var prevOnChannel = this.previousTokenOnChannel(tokenIndex - 1,
Lexer.DEFAULT_TOKEN_CHANNEL);
var prevOnChannel = this.previousTokenOnChannel(tokenIndex - 1, Lexer.DEFAULT_TOKEN_CHANNEL);
if (prevOnChannel === tokenIndex - 1) {
return null;
}

View File

@ -192,7 +192,7 @@ class BufferedTokenStream(TokenStream):
self.tokenSource = tokenSource
self.tokens = []
self.index = -1
self.fetchedEOF = False
# Given a starting index, return the index of the next token on channel.

View File

@ -194,7 +194,7 @@ class BufferedTokenStream(TokenStream):
self.tokenSource = tokenSource
self.tokens = []
self.index = -1
self.fetchedEOF = False
# Given a starting index, return the index of the next token on channel.