Merge pull request #1983 from shirou/master

[Go] Add CommonTokenStream test.
This commit is contained in:
Terence Parr 2017-10-21 13:00:02 -07:00 committed by GitHub
commit 593b49fa3d
4 changed files with 389 additions and 0 deletions

View File

@ -0,0 +1,154 @@
// Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.
// Use of this file is governed by the BSD 3-clause license that
// can be found in the LICENSE.txt file in the project root.
package antlr
import (
"testing"
)
type commonTokenStreamTestLexer struct {
*BaseLexer
tokens []Token
i int
}
func (l *commonTokenStreamTestLexer) NextToken() Token {
tmp := l.tokens[l.i]
l.i++
return tmp
}
func TestCommonTokenStreamOffChannel(t *testing.T) {
assert := assertNew(t)
lexEngine := &commonTokenStreamTestLexer{
tokens: []Token{
newTestCommonToken(1, " ", LexerHidden), // 0
newTestCommonToken(1, "x", LexerDefaultTokenChannel), // 1
newTestCommonToken(1, " ", LexerHidden), // 2
newTestCommonToken(1, "=", LexerDefaultTokenChannel), // 3
newTestCommonToken(1, "34", LexerDefaultTokenChannel), // 4
newTestCommonToken(1, " ", LexerHidden), // 5
newTestCommonToken(1, " ", LexerHidden), // 6
newTestCommonToken(1, ";", LexerDefaultTokenChannel), // 7
newTestCommonToken(1, "\n", LexerHidden), // 9
newTestCommonToken(TokenEOF, "", LexerDefaultTokenChannel), // 10
},
}
tokens := NewCommonTokenStream(lexEngine, TokenDefaultChannel)
assert.Equal("x", tokens.LT(1).GetText()) // must skip first off channel token
tokens.Consume()
assert.Equal("=", tokens.LT(1).GetText())
assert.Equal("x", tokens.LT(-1).GetText())
tokens.Consume()
assert.Equal("34", tokens.LT(1).GetText())
assert.Equal("=", tokens.LT(-1).GetText())
tokens.Consume()
assert.Equal(";", tokens.LT(1).GetText())
assert.Equal("34", tokens.LT(-1).GetText())
tokens.Consume()
assert.Equal(TokenEOF, tokens.LT(1).GetTokenType())
assert.Equal(";", tokens.LT(-1).GetText())
assert.Equal("34", tokens.LT(-2).GetText())
assert.Equal("=", tokens.LT(-3).GetText())
assert.Equal("x", tokens.LT(-4).GetText())
}
func TestCommonTokenStreamFetchOffChannel(t *testing.T) {
assert := assertNew(t)
lexEngine := &commonTokenStreamTestLexer{
tokens: []Token{
newTestCommonToken(1, " ", LexerHidden), // 0
newTestCommonToken(1, "x", LexerDefaultTokenChannel), // 1
newTestCommonToken(1, " ", LexerHidden), // 2
newTestCommonToken(1, "=", LexerDefaultTokenChannel), // 3
newTestCommonToken(1, "34", LexerDefaultTokenChannel), // 4
newTestCommonToken(1, " ", LexerHidden), // 5
newTestCommonToken(1, " ", LexerHidden), // 6
newTestCommonToken(1, ";", LexerDefaultTokenChannel), // 7
newTestCommonToken(1, " ", LexerHidden), // 8
newTestCommonToken(1, "\n", LexerHidden), // 9
newTestCommonToken(TokenEOF, "", LexerDefaultTokenChannel), // 10
},
}
tokens := NewCommonTokenStream(lexEngine, TokenDefaultChannel)
tokens.Fill()
assert.Nil(tokens.getHiddenTokensToLeft(0, -1))
assert.Nil(tokens.getHiddenTokensToRight(0, -1))
assert.Equal("[[@0,0:0=' ',<1>,channel=1,0:-1]]", tokensToString(tokens.getHiddenTokensToLeft(1, -1)))
assert.Equal("[[@2,0:0=' ',<1>,channel=1,0:-1]]", tokensToString(tokens.getHiddenTokensToRight(1, -1)))
assert.Nil(tokens.getHiddenTokensToLeft(2, -1))
assert.Nil(tokens.getHiddenTokensToRight(2, -1))
assert.Equal("[[@2,0:0=' ',<1>,channel=1,0:-1]]", tokensToString(tokens.getHiddenTokensToLeft(3, -1)))
assert.Nil(tokens.getHiddenTokensToRight(3, -1))
assert.Nil(tokens.getHiddenTokensToLeft(4, -1))
assert.Equal("[[@5,0:0=' ',<1>,channel=1,0:-1], [@6,0:0=' ',<1>,channel=1,0:-1]]",
tokensToString(tokens.getHiddenTokensToRight(4, -1)))
assert.Nil(tokens.getHiddenTokensToLeft(5, -1))
assert.Equal("[[@6,0:0=' ',<1>,channel=1,0:-1]]",
tokensToString(tokens.getHiddenTokensToRight(5, -1)))
assert.Equal("[[@5,0:0=' ',<1>,channel=1,0:-1]]",
tokensToString(tokens.getHiddenTokensToLeft(6, -1)))
assert.Nil(tokens.getHiddenTokensToRight(6, -1))
assert.Equal("[[@5,0:0=' ',<1>,channel=1,0:-1], [@6,0:0=' ',<1>,channel=1,0:-1]]",
tokensToString(tokens.getHiddenTokensToLeft(7, -1)))
assert.Equal("[[@8,0:0=' ',<1>,channel=1,0:-1], [@9,0:0='\\n',<1>,channel=1,0:-1]]",
tokensToString(tokens.getHiddenTokensToRight(7, -1)))
assert.Nil(tokens.getHiddenTokensToLeft(8, -1))
assert.Equal("[[@9,0:0='\\n',<1>,channel=1,0:-1]]",
tokensToString(tokens.getHiddenTokensToRight(8, -1)))
assert.Equal("[[@8,0:0=' ',<1>,channel=1,0:-1]]",
tokensToString(tokens.getHiddenTokensToLeft(9, -1)))
assert.Nil(tokens.getHiddenTokensToRight(9, -1))
}
type commonTokenStreamTestLexerSingleEOF struct {
*BaseLexer
tokens []Token
i int
}
func (l *commonTokenStreamTestLexerSingleEOF) NextToken() Token {
return newTestCommonToken(TokenEOF, "", LexerDefaultTokenChannel)
}
func TestCommonTokenStreamSingleEOF(t *testing.T) {
assert := assertNew(t)
lexEngine := &commonTokenStreamTestLexerSingleEOF{}
tokens := NewCommonTokenStream(lexEngine, TokenDefaultChannel)
tokens.Fill()
assert.Equal(TokenEOF, tokens.LA(1))
assert.Equal(0, tokens.index)
assert.Equal(1, tokens.Size())
}
func TestCommonTokenStreamCannotConsumeEOF(t *testing.T) {
assert := assertNew(t)
lexEngine := &commonTokenStreamTestLexerSingleEOF{}
tokens := NewCommonTokenStream(lexEngine, TokenDefaultChannel)
tokens.Fill()
assert.Equal(TokenEOF, tokens.LA(1))
assert.Equal(0, tokens.index)
assert.Equal(1, tokens.Size())
assert.Panics(tokens.Consume)
}

View File

@ -0,0 +1,98 @@
// Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.
// Use of this file is governed by the BSD 3-clause license that
// can be found in the LICENSE.txt file in the project root.
// These assert functions are borrowed from https://github.com/stretchr/testify/ (MIT License)
package antlr
import (
"fmt"
"reflect"
"testing"
)
type assert struct {
t *testing.T
}
func assertNew(t *testing.T) *assert {
return &assert{
t: t,
}
}
func (a *assert) Equal(expected, actual interface{}) bool {
if !objectsAreEqual(expected, actual) {
return a.Fail(fmt.Sprintf("Not equal:\n"+
"expected: %#v\n"+
" actual: %#v\n", expected, actual))
}
return true
}
func objectsAreEqual(expected, actual interface{}) bool {
if expected == nil || actual == nil {
return expected == actual
}
return reflect.DeepEqual(expected, actual)
}
func (a *assert) Nil(object interface{}) bool {
if isNil(object) {
return true
}
return a.Fail(fmt.Sprintf("Expected nil, but got: %#v", object))
}
func (a *assert) NotNil(object interface{}) bool {
if !isNil(object) {
return true
}
return a.Fail("Expected value not to be nil.")
}
// isNil checks if a specified object is nil or not, without Failing.
func isNil(object interface{}) bool {
if object == nil {
return true
}
value := reflect.ValueOf(object)
kind := value.Kind()
if kind >= reflect.Chan && kind <= reflect.Slice && value.IsNil() {
return true
}
return false
}
func (a *assert) Panics(f func()) bool {
if funcDidPanic, panicValue := didPanic(f); !funcDidPanic {
return a.Fail(fmt.Sprintf("func %#v should panic\n\r\tPanic value:\t%v", f, panicValue))
}
return true
}
// Fail reports a failure through
func (a *assert) Fail(failureMessage string) bool {
a.t.Errorf("%s", failureMessage)
return false
}
// didPanic returns true if the function passed to it panics. Otherwise, it returns false.
func didPanic(f func()) (bool, interface{}) {
didPanic := false
var message interface{}
func() {
defer func() {
if message = recover(); message != nil {
didPanic = true
}
}()
// call the target function
f()
}()
return didPanic, message
}

View File

@ -0,0 +1,107 @@
// Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.
// Use of this file is governed by the BSD 3-clause license that
// can be found in the LICENSE.txt file in the project root.
package antlr
/*
LexerB is a lexer for testing purpose.
This file is generated from this grammer.
lexer grammar LexerB;
ID : 'a'..'z'+;
INT : '0'..'9'+;
SEMI : ';';
ASSIGN : '=';
PLUS : '+';
MULT : '*';
WS : ' '+;
*/
var lexerB_serializedLexerAtn = []uint16{
3, 24715, 42794, 33075, 47597, 16764, 15335, 30598, 22884, 2, 9, 40, 8,
1, 4, 2, 9, 2, 4, 3, 9, 3, 4, 4, 9, 4, 4, 5, 9, 5, 4, 6, 9, 6, 4, 7, 9,
7, 4, 8, 9, 8, 3, 2, 6, 2, 19, 10, 2, 13, 2, 14, 2, 20, 3, 3, 6, 3, 24,
10, 3, 13, 3, 14, 3, 25, 3, 4, 3, 4, 3, 5, 3, 5, 3, 6, 3, 6, 3, 7, 3, 7,
3, 8, 6, 8, 37, 10, 8, 13, 8, 14, 8, 38, 2, 2, 9, 3, 3, 5, 4, 7, 5, 9,
6, 11, 7, 13, 8, 15, 9, 3, 2, 2, 2, 42, 2, 3, 3, 2, 2, 2, 2, 5, 3, 2, 2,
2, 2, 7, 3, 2, 2, 2, 2, 9, 3, 2, 2, 2, 2, 11, 3, 2, 2, 2, 2, 13, 3, 2,
2, 2, 2, 15, 3, 2, 2, 2, 3, 18, 3, 2, 2, 2, 5, 23, 3, 2, 2, 2, 7, 27, 3,
2, 2, 2, 9, 29, 3, 2, 2, 2, 11, 31, 3, 2, 2, 2, 13, 33, 3, 2, 2, 2, 15,
36, 3, 2, 2, 2, 17, 19, 4, 99, 124, 2, 18, 17, 3, 2, 2, 2, 19, 20, 3, 2,
2, 2, 20, 18, 3, 2, 2, 2, 20, 21, 3, 2, 2, 2, 21, 4, 3, 2, 2, 2, 22, 24,
4, 50, 59, 2, 23, 22, 3, 2, 2, 2, 24, 25, 3, 2, 2, 2, 25, 23, 3, 2, 2,
2, 25, 26, 3, 2, 2, 2, 26, 6, 3, 2, 2, 2, 27, 28, 7, 61, 2, 2, 28, 8, 3,
2, 2, 2, 29, 30, 7, 63, 2, 2, 30, 10, 3, 2, 2, 2, 31, 32, 7, 45, 2, 2,
32, 12, 3, 2, 2, 2, 33, 34, 7, 44, 2, 2, 34, 14, 3, 2, 2, 2, 35, 37, 7,
34, 2, 2, 36, 35, 3, 2, 2, 2, 37, 38, 3, 2, 2, 2, 38, 36, 3, 2, 2, 2, 38,
39, 3, 2, 2, 2, 39, 16, 3, 2, 2, 2, 6, 2, 20, 25, 38, 2,
}
var lexerB_lexerDeserializer = NewATNDeserializer(nil)
var lexerB_lexerAtn = lexerB_lexerDeserializer.DeserializeFromUInt16(lexerB_serializedLexerAtn)
var lexerB_lexerChannelNames = []string{
"DEFAULT_TOKEN_CHANNEL", "HIDDEN",
}
var lexerB_lexerModeNames = []string{
"DEFAULT_MODE",
}
var lexerB_lexerLiteralNames = []string{
"", "", "", "';'", "'='", "'+'", "'*'",
}
var lexerB_lexerSymbolicNames = []string{
"", "ID", "INT", "SEMI", "ASSIGN", "PLUS", "MULT", "WS",
}
var lexerB_lexerRuleNames = []string{
"ID", "INT", "SEMI", "ASSIGN", "PLUS", "MULT", "WS",
}
type LexerB struct {
*BaseLexer
channelNames []string
modeNames []string
// TODO: EOF string
}
var lexerB_lexerDecisionToDFA = make([]*DFA, len(lexerB_lexerAtn.DecisionToState))
func init() {
for index, ds := range lexerB_lexerAtn.DecisionToState {
lexerB_lexerDecisionToDFA[index] = NewDFA(ds, index)
}
}
func NewLexerB(input CharStream) *LexerB {
l := new(LexerB)
l.BaseLexer = NewBaseLexer(input)
l.Interpreter = NewLexerATNSimulator(l, lexerB_lexerAtn, lexerB_lexerDecisionToDFA, NewPredictionContextCache())
l.channelNames = lexerB_lexerChannelNames
l.modeNames = lexerB_lexerModeNames
l.RuleNames = lexerB_lexerRuleNames
l.LiteralNames = lexerB_lexerLiteralNames
l.SymbolicNames = lexerB_lexerSymbolicNames
l.GrammarFileName = "LexerB.g4"
// TODO: l.EOF = TokenEOF
return l
}
// LexerB tokens.
const (
LexerBID = 1
LexerBINT = 2
LexerBSEMI = 3
LexerBASSIGN = 4
LexerBPLUS = 5
LexerBMULT = 6
LexerBWS = 7
)

View File

@ -0,0 +1,30 @@
package antlr
import (
"fmt"
"strings"
)
// newTestCommonToken create common token with tokentype, text and channel
// notice: test purpose only
func newTestCommonToken(tokenType int, text string, channel int) *CommonToken {
t := new(CommonToken)
t.BaseToken = new(BaseToken)
t.tokenType = tokenType
t.channel = channel
t.text = text
t.line = 0
t.column = -1
return t
}
// tokensToString returnes []Tokens string
// notice: test purpose only
func tokensToString(tokens []Token) string {
buf := make([]string, len(tokens))
for i, token := range tokens {
buf[i] = fmt.Sprintf("%v", token)
}
return "[" + strings.Join(buf, ", ") + "]"
}