Everything compiles and gens properly for Arithmetic example

This commit is contained in:
Peter Boyer 2015-12-24 14:49:44 -06:00
parent a2e273c5f6
commit 36ae2795f8
41 changed files with 1400 additions and 350 deletions

View File

@ -1,6 +1,6 @@
grammar Arithmetic;
options { language = JavaScript; }
options { language = Go; }
equation
: expression relop expression

View File

@ -0,0 +1,146 @@
// Generated from Arithmetic.g4 by ANTLR 4.5.1
import org.antlr.v4.runtime.ParserRuleContext;
import org.antlr.v4.runtime.tree.ErrorNode;
import org.antlr.v4.runtime.tree.TerminalNode;
/**
* This class provides an empty implementation of {@link ArithmeticListener},
* which can be extended to create a listener which only needs to handle a subset
* of the available methods.
*/
public class ArithmeticBaseListener implements ArithmeticListener {
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterEquation(ArithmeticParser.EquationContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitEquation(ArithmeticParser.EquationContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterExpression(ArithmeticParser.ExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitExpression(ArithmeticParser.ExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterMultiplyingExpression(ArithmeticParser.MultiplyingExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitMultiplyingExpression(ArithmeticParser.MultiplyingExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterPowExpression(ArithmeticParser.PowExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitPowExpression(ArithmeticParser.PowExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterAtom(ArithmeticParser.AtomContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitAtom(ArithmeticParser.AtomContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterScientific(ArithmeticParser.ScientificContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitScientific(ArithmeticParser.ScientificContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterRelop(ArithmeticParser.RelopContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitRelop(ArithmeticParser.RelopContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterNumber(ArithmeticParser.NumberContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitNumber(ArithmeticParser.NumberContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterVariable(ArithmeticParser.VariableContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitVariable(ArithmeticParser.VariableContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterEveryRule(ParserRuleContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitEveryRule(ParserRuleContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void visitTerminal(TerminalNode node) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void visitErrorNode(ErrorNode node) { }
}

View File

@ -0,0 +1,120 @@
// Generated from Arithmetic.g4 by ANTLR 4.5.1
import org.antlr.v4.runtime.Lexer;
import org.antlr.v4.runtime.CharStream;
import org.antlr.v4.runtime.Token;
import org.antlr.v4.runtime.TokenStream;
import org.antlr.v4.runtime.*;
import org.antlr.v4.runtime.atn.*;
import org.antlr.v4.runtime.dfa.DFA;
import org.antlr.v4.runtime.misc.*;
@SuppressWarnings({"all", "warnings", "unchecked", "unused", "cast"})
public class ArithmeticLexer extends Lexer {
static { RuntimeMetaData.checkVersion("4.5.1", RuntimeMetaData.VERSION); }
protected static final DFA[] _decisionToDFA;
protected static final PredictionContextCache _sharedContextCache =
new PredictionContextCache();
public static final int
LPAREN=1, RPAREN=2, PLUS=3, MINUS=4, TIMES=5, DIV=6, GT=7, LT=8, EQ=9,
POINT=10, E=11, POW=12, LETTER=13, DIGIT=14, WS=15;
public static String[] modeNames = {
"DEFAULT_MODE"
};
public static final String[] ruleNames = {
"LPAREN", "RPAREN", "PLUS", "MINUS", "TIMES", "DIV", "GT", "LT", "EQ",
"POINT", "E", "POW", "LETTER", "DIGIT", "WS"
};
private static final String[] _LITERAL_NAMES = {
null, "'('", "')'", "'+'", "'-'", "'*'", "'/'", "'>'", "'<'", "'='", "'.'",
null, "'^'"
};
private static final String[] _SYMBOLIC_NAMES = {
null, "LPAREN", "RPAREN", "PLUS", "MINUS", "TIMES", "DIV", "GT", "LT",
"EQ", "POINT", "E", "POW", "LETTER", "DIGIT", "WS"
};
public static final Vocabulary VOCABULARY = new VocabularyImpl(_LITERAL_NAMES, _SYMBOLIC_NAMES);
/**
* @deprecated Use {@link #VOCABULARY} instead.
*/
@Deprecated
public static final String[] tokenNames;
static {
tokenNames = new String[_SYMBOLIC_NAMES.length];
for (int i = 0; i < tokenNames.length; i++) {
tokenNames[i] = VOCABULARY.getLiteralName(i);
if (tokenNames[i] == null) {
tokenNames[i] = VOCABULARY.getSymbolicName(i);
}
if (tokenNames[i] == null) {
tokenNames[i] = "<INVALID>";
}
}
}
@Override
@Deprecated
public String[] getTokenNames() {
return tokenNames;
}
@Override
public Vocabulary getVocabulary() {
return VOCABULARY;
}
public ArithmeticLexer(CharStream input) {
super(input);
_interp = new LexerATNSimulator(this,_ATN,_decisionToDFA,_sharedContextCache);
}
@Override
public String getGrammarFileName() { return "Arithmetic.g4"; }
@Override
public String[] getRuleNames() { return ruleNames; }
@Override
public String getSerializedATN() { return _serializedATN; }
@Override
public String[] getModeNames() { return modeNames; }
@Override
public ATN getATN() { return _ATN; }
public static final String _serializedATN =
"\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\2\21E\b\1\4\2\t\2\4"+
"\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t"+
"\13\4\f\t\f\4\r\t\r\4\16\t\16\4\17\t\17\4\20\t\20\3\2\3\2\3\3\3\3\3\4"+
"\3\4\3\5\3\5\3\6\3\6\3\7\3\7\3\b\3\b\3\t\3\t\3\n\3\n\3\13\3\13\3\f\3\f"+
"\3\r\3\r\3\16\5\16;\n\16\3\17\3\17\3\20\6\20@\n\20\r\20\16\20A\3\20\3"+
"\20\2\2\21\3\3\5\4\7\5\t\6\13\7\r\b\17\t\21\n\23\13\25\f\27\r\31\16\33"+
"\17\35\20\37\21\3\2\5\4\2GGgg\4\2C\\c|\5\2\13\f\17\17\"\"E\2\3\3\2\2\2"+
"\2\5\3\2\2\2\2\7\3\2\2\2\2\t\3\2\2\2\2\13\3\2\2\2\2\r\3\2\2\2\2\17\3\2"+
"\2\2\2\21\3\2\2\2\2\23\3\2\2\2\2\25\3\2\2\2\2\27\3\2\2\2\2\31\3\2\2\2"+
"\2\33\3\2\2\2\2\35\3\2\2\2\2\37\3\2\2\2\3!\3\2\2\2\5#\3\2\2\2\7%\3\2\2"+
"\2\t\'\3\2\2\2\13)\3\2\2\2\r+\3\2\2\2\17-\3\2\2\2\21/\3\2\2\2\23\61\3"+
"\2\2\2\25\63\3\2\2\2\27\65\3\2\2\2\31\67\3\2\2\2\33:\3\2\2\2\35<\3\2\2"+
"\2\37?\3\2\2\2!\"\7*\2\2\"\4\3\2\2\2#$\7+\2\2$\6\3\2\2\2%&\7-\2\2&\b\3"+
"\2\2\2\'(\7/\2\2(\n\3\2\2\2)*\7,\2\2*\f\3\2\2\2+,\7\61\2\2,\16\3\2\2\2"+
"-.\7@\2\2.\20\3\2\2\2/\60\7>\2\2\60\22\3\2\2\2\61\62\7?\2\2\62\24\3\2"+
"\2\2\63\64\7\60\2\2\64\26\3\2\2\2\65\66\t\2\2\2\66\30\3\2\2\2\678\7`\2"+
"\28\32\3\2\2\29;\t\3\2\2:9\3\2\2\2;\34\3\2\2\2<=\4\62;\2=\36\3\2\2\2>"+
"@\t\4\2\2?>\3\2\2\2@A\3\2\2\2A?\3\2\2\2AB\3\2\2\2BC\3\2\2\2CD\b\20\2\2"+
"D \3\2\2\2\5\2:A\3\2\3\2";
public static final ATN _ATN =
new ATNDeserializer().deserialize(_serializedATN.toCharArray());
static {
_decisionToDFA = new DFA[_ATN.getNumberOfDecisions()];
for (int i = 0; i < _ATN.getNumberOfDecisions(); i++) {
_decisionToDFA[i] = new DFA(_ATN.getDecisionState(i), i);
}
}
}

View File

@ -0,0 +1,99 @@
// Generated from Arithmetic.g4 by ANTLR 4.5.1
import org.antlr.v4.runtime.tree.ParseTreeListener;
/**
* This interface defines a complete listener for a parse tree produced by
* {@link ArithmeticParser}.
*/
public interface ArithmeticListener extends ParseTreeListener {
/**
* Enter a parse tree produced by {@link ArithmeticParser#equation}.
* @param ctx the parse tree
*/
void enterEquation(ArithmeticParser.EquationContext ctx);
/**
* Exit a parse tree produced by {@link ArithmeticParser#equation}.
* @param ctx the parse tree
*/
void exitEquation(ArithmeticParser.EquationContext ctx);
/**
* Enter a parse tree produced by {@link ArithmeticParser#expression}.
* @param ctx the parse tree
*/
void enterExpression(ArithmeticParser.ExpressionContext ctx);
/**
* Exit a parse tree produced by {@link ArithmeticParser#expression}.
* @param ctx the parse tree
*/
void exitExpression(ArithmeticParser.ExpressionContext ctx);
/**
* Enter a parse tree produced by {@link ArithmeticParser#multiplyingExpression}.
* @param ctx the parse tree
*/
void enterMultiplyingExpression(ArithmeticParser.MultiplyingExpressionContext ctx);
/**
* Exit a parse tree produced by {@link ArithmeticParser#multiplyingExpression}.
* @param ctx the parse tree
*/
void exitMultiplyingExpression(ArithmeticParser.MultiplyingExpressionContext ctx);
/**
* Enter a parse tree produced by {@link ArithmeticParser#powExpression}.
* @param ctx the parse tree
*/
void enterPowExpression(ArithmeticParser.PowExpressionContext ctx);
/**
* Exit a parse tree produced by {@link ArithmeticParser#powExpression}.
* @param ctx the parse tree
*/
void exitPowExpression(ArithmeticParser.PowExpressionContext ctx);
/**
* Enter a parse tree produced by {@link ArithmeticParser#atom}.
* @param ctx the parse tree
*/
void enterAtom(ArithmeticParser.AtomContext ctx);
/**
* Exit a parse tree produced by {@link ArithmeticParser#atom}.
* @param ctx the parse tree
*/
void exitAtom(ArithmeticParser.AtomContext ctx);
/**
* Enter a parse tree produced by {@link ArithmeticParser#scientific}.
* @param ctx the parse tree
*/
void enterScientific(ArithmeticParser.ScientificContext ctx);
/**
* Exit a parse tree produced by {@link ArithmeticParser#scientific}.
* @param ctx the parse tree
*/
void exitScientific(ArithmeticParser.ScientificContext ctx);
/**
* Enter a parse tree produced by {@link ArithmeticParser#relop}.
* @param ctx the parse tree
*/
void enterRelop(ArithmeticParser.RelopContext ctx);
/**
* Exit a parse tree produced by {@link ArithmeticParser#relop}.
* @param ctx the parse tree
*/
void exitRelop(ArithmeticParser.RelopContext ctx);
/**
* Enter a parse tree produced by {@link ArithmeticParser#number}.
* @param ctx the parse tree
*/
void enterNumber(ArithmeticParser.NumberContext ctx);
/**
* Exit a parse tree produced by {@link ArithmeticParser#number}.
* @param ctx the parse tree
*/
void exitNumber(ArithmeticParser.NumberContext ctx);
/**
* Enter a parse tree produced by {@link ArithmeticParser#variable}.
* @param ctx the parse tree
*/
void enterVariable(ArithmeticParser.VariableContext ctx);
/**
* Exit a parse tree produced by {@link ArithmeticParser#variable}.
* @param ctx the parse tree
*/
void exitVariable(ArithmeticParser.VariableContext ctx);
}

View File

@ -0,0 +1,704 @@
// Generated from Arithmetic.g4 by ANTLR 4.5.1
import org.antlr.v4.runtime.atn.*;
import org.antlr.v4.runtime.dfa.DFA;
import org.antlr.v4.runtime.*;
import org.antlr.v4.runtime.misc.*;
import org.antlr.v4.runtime.tree.*;
import java.util.List;
import java.util.Iterator;
import java.util.ArrayList;
@SuppressWarnings({"all", "warnings", "unchecked", "unused", "cast"})
public class ArithmeticParser extends Parser {
static { RuntimeMetaData.checkVersion("4.5.1", RuntimeMetaData.VERSION); }
protected static final DFA[] _decisionToDFA;
protected static final PredictionContextCache _sharedContextCache =
new PredictionContextCache();
public static final int
LPAREN=1, RPAREN=2, PLUS=3, MINUS=4, TIMES=5, DIV=6, GT=7, LT=8, EQ=9,
POINT=10, E=11, POW=12, LETTER=13, DIGIT=14, WS=15;
public static final int
RULE_equation = 0, RULE_expression = 1, RULE_multiplyingExpression = 2,
RULE_powExpression = 3, RULE_atom = 4, RULE_scientific = 5, RULE_relop = 6,
RULE_number = 7, RULE_variable = 8;
public static final String[] ruleNames = {
"equation", "expression", "multiplyingExpression", "powExpression", "atom",
"scientific", "relop", "number", "variable"
};
private static final String[] _LITERAL_NAMES = {
null, "'('", "')'", "'+'", "'-'", "'*'", "'/'", "'>'", "'<'", "'='", "'.'",
null, "'^'"
};
private static final String[] _SYMBOLIC_NAMES = {
null, "LPAREN", "RPAREN", "PLUS", "MINUS", "TIMES", "DIV", "GT", "LT",
"EQ", "POINT", "E", "POW", "LETTER", "DIGIT", "WS"
};
public static final Vocabulary VOCABULARY = new VocabularyImpl(_LITERAL_NAMES, _SYMBOLIC_NAMES);
/**
* @deprecated Use {@link #VOCABULARY} instead.
*/
@Deprecated
public static final String[] tokenNames;
static {
tokenNames = new String[_SYMBOLIC_NAMES.length];
for (int i = 0; i < tokenNames.length; i++) {
tokenNames[i] = VOCABULARY.getLiteralName(i);
if (tokenNames[i] == null) {
tokenNames[i] = VOCABULARY.getSymbolicName(i);
}
if (tokenNames[i] == null) {
tokenNames[i] = "<INVALID>";
}
}
}
@Override
@Deprecated
public String[] getTokenNames() {
return tokenNames;
}
@Override
public Vocabulary getVocabulary() {
return VOCABULARY;
}
@Override
public String getGrammarFileName() { return "Arithmetic.g4"; }
@Override
public String[] getRuleNames() { return ruleNames; }
@Override
public String getSerializedATN() { return _serializedATN; }
@Override
public ATN getATN() { return _ATN; }
public ArithmeticParser(TokenStream input) {
super(input);
_interp = new ParserATNSimulator(this,_ATN,_decisionToDFA,_sharedContextCache);
}
public static class EquationContext extends ParserRuleContext {
public List<ExpressionContext> expression() {
return getRuleContexts(ExpressionContext.class);
}
public ExpressionContext expression(int i) {
return getRuleContext(ExpressionContext.class,i);
}
public RelopContext relop() {
return getRuleContext(RelopContext.class,0);
}
public EquationContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_equation; }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof ArithmeticListener ) ((ArithmeticListener)listener).enterEquation(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
if ( listener instanceof ArithmeticListener ) ((ArithmeticListener)listener).exitEquation(this);
}
}
public final EquationContext equation() throws RecognitionException {
EquationContext _localctx = new EquationContext(_ctx, getState());
enterRule(_localctx, 0, RULE_equation);
try {
enterOuterAlt(_localctx, 1);
{
setState(18);
expression();
setState(19);
relop();
setState(20);
expression();
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class ExpressionContext extends ParserRuleContext {
public List<MultiplyingExpressionContext> multiplyingExpression() {
return getRuleContexts(MultiplyingExpressionContext.class);
}
public MultiplyingExpressionContext multiplyingExpression(int i) {
return getRuleContext(MultiplyingExpressionContext.class,i);
}
public List<TerminalNode> PLUS() { return getTokens(ArithmeticParser.PLUS); }
public TerminalNode PLUS(int i) {
return getToken(ArithmeticParser.PLUS, i);
}
public List<TerminalNode> MINUS() { return getTokens(ArithmeticParser.MINUS); }
public TerminalNode MINUS(int i) {
return getToken(ArithmeticParser.MINUS, i);
}
public ExpressionContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_expression; }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof ArithmeticListener ) ((ArithmeticListener)listener).enterExpression(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
if ( listener instanceof ArithmeticListener ) ((ArithmeticListener)listener).exitExpression(this);
}
}
public final ExpressionContext expression() throws RecognitionException {
ExpressionContext _localctx = new ExpressionContext(_ctx, getState());
enterRule(_localctx, 2, RULE_expression);
int _la;
try {
int _alt;
enterOuterAlt(_localctx, 1);
{
setState(22);
multiplyingExpression();
setState(27);
_errHandler.sync(this);
_alt = getInterpreter().adaptivePredict(_input,0,_ctx);
while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) {
if ( _alt==1 ) {
{
{
setState(23);
_la = _input.LA(1);
if ( !(_la==PLUS || _la==MINUS) ) {
_errHandler.recoverInline(this);
} else {
consume();
}
setState(24);
multiplyingExpression();
}
}
}
setState(29);
_errHandler.sync(this);
_alt = getInterpreter().adaptivePredict(_input,0,_ctx);
}
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class MultiplyingExpressionContext extends ParserRuleContext {
public List<PowExpressionContext> powExpression() {
return getRuleContexts(PowExpressionContext.class);
}
public PowExpressionContext powExpression(int i) {
return getRuleContext(PowExpressionContext.class,i);
}
public List<TerminalNode> TIMES() { return getTokens(ArithmeticParser.TIMES); }
public TerminalNode TIMES(int i) {
return getToken(ArithmeticParser.TIMES, i);
}
public List<TerminalNode> DIV() { return getTokens(ArithmeticParser.DIV); }
public TerminalNode DIV(int i) {
return getToken(ArithmeticParser.DIV, i);
}
public MultiplyingExpressionContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_multiplyingExpression; }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof ArithmeticListener ) ((ArithmeticListener)listener).enterMultiplyingExpression(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
if ( listener instanceof ArithmeticListener ) ((ArithmeticListener)listener).exitMultiplyingExpression(this);
}
}
public final MultiplyingExpressionContext multiplyingExpression() throws RecognitionException {
MultiplyingExpressionContext _localctx = new MultiplyingExpressionContext(_ctx, getState());
enterRule(_localctx, 4, RULE_multiplyingExpression);
int _la;
try {
int _alt;
enterOuterAlt(_localctx, 1);
{
setState(30);
powExpression();
setState(35);
_errHandler.sync(this);
_alt = getInterpreter().adaptivePredict(_input,1,_ctx);
while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) {
if ( _alt==1 ) {
{
{
setState(31);
_la = _input.LA(1);
if ( !(_la==TIMES || _la==DIV) ) {
_errHandler.recoverInline(this);
} else {
consume();
}
setState(32);
powExpression();
}
}
}
setState(37);
_errHandler.sync(this);
_alt = getInterpreter().adaptivePredict(_input,1,_ctx);
}
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class PowExpressionContext extends ParserRuleContext {
public AtomContext atom() {
return getRuleContext(AtomContext.class,0);
}
public TerminalNode POW() { return getToken(ArithmeticParser.POW, 0); }
public ExpressionContext expression() {
return getRuleContext(ExpressionContext.class,0);
}
public PowExpressionContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_powExpression; }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof ArithmeticListener ) ((ArithmeticListener)listener).enterPowExpression(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
if ( listener instanceof ArithmeticListener ) ((ArithmeticListener)listener).exitPowExpression(this);
}
}
public final PowExpressionContext powExpression() throws RecognitionException {
PowExpressionContext _localctx = new PowExpressionContext(_ctx, getState());
enterRule(_localctx, 6, RULE_powExpression);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
setState(38);
atom();
setState(41);
_la = _input.LA(1);
if (_la==POW) {
{
setState(39);
match(POW);
setState(40);
expression();
}
}
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class AtomContext extends ParserRuleContext {
public ScientificContext scientific() {
return getRuleContext(ScientificContext.class,0);
}
public VariableContext variable() {
return getRuleContext(VariableContext.class,0);
}
public TerminalNode LPAREN() { return getToken(ArithmeticParser.LPAREN, 0); }
public ExpressionContext expression() {
return getRuleContext(ExpressionContext.class,0);
}
public TerminalNode RPAREN() { return getToken(ArithmeticParser.RPAREN, 0); }
public AtomContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_atom; }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof ArithmeticListener ) ((ArithmeticListener)listener).enterAtom(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
if ( listener instanceof ArithmeticListener ) ((ArithmeticListener)listener).exitAtom(this);
}
}
public final AtomContext atom() throws RecognitionException {
AtomContext _localctx = new AtomContext(_ctx, getState());
enterRule(_localctx, 8, RULE_atom);
try {
setState(49);
_errHandler.sync(this);
switch ( getInterpreter().adaptivePredict(_input,3,_ctx) ) {
case 1:
enterOuterAlt(_localctx, 1);
{
setState(43);
scientific();
}
break;
case 2:
enterOuterAlt(_localctx, 2);
{
setState(44);
variable();
}
break;
case 3:
enterOuterAlt(_localctx, 3);
{
setState(45);
match(LPAREN);
setState(46);
expression();
setState(47);
match(RPAREN);
}
break;
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class ScientificContext extends ParserRuleContext {
public List<NumberContext> number() {
return getRuleContexts(NumberContext.class);
}
public NumberContext number(int i) {
return getRuleContext(NumberContext.class,i);
}
public TerminalNode E() { return getToken(ArithmeticParser.E, 0); }
public ScientificContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_scientific; }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof ArithmeticListener ) ((ArithmeticListener)listener).enterScientific(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
if ( listener instanceof ArithmeticListener ) ((ArithmeticListener)listener).exitScientific(this);
}
}
public final ScientificContext scientific() throws RecognitionException {
ScientificContext _localctx = new ScientificContext(_ctx, getState());
enterRule(_localctx, 10, RULE_scientific);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
setState(51);
number();
setState(54);
_la = _input.LA(1);
if (_la==E) {
{
setState(52);
match(E);
setState(53);
number();
}
}
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class RelopContext extends ParserRuleContext {
public TerminalNode EQ() { return getToken(ArithmeticParser.EQ, 0); }
public TerminalNode GT() { return getToken(ArithmeticParser.GT, 0); }
public TerminalNode LT() { return getToken(ArithmeticParser.LT, 0); }
public RelopContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_relop; }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof ArithmeticListener ) ((ArithmeticListener)listener).enterRelop(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
if ( listener instanceof ArithmeticListener ) ((ArithmeticListener)listener).exitRelop(this);
}
}
public final RelopContext relop() throws RecognitionException {
RelopContext _localctx = new RelopContext(_ctx, getState());
enterRule(_localctx, 12, RULE_relop);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
setState(56);
_la = _input.LA(1);
if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << GT) | (1L << LT) | (1L << EQ))) != 0)) ) {
_errHandler.recoverInline(this);
} else {
consume();
}
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class NumberContext extends ParserRuleContext {
public TerminalNode MINUS() { return getToken(ArithmeticParser.MINUS, 0); }
public List<TerminalNode> DIGIT() { return getTokens(ArithmeticParser.DIGIT); }
public TerminalNode DIGIT(int i) {
return getToken(ArithmeticParser.DIGIT, i);
}
public TerminalNode POINT() { return getToken(ArithmeticParser.POINT, 0); }
public NumberContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_number; }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof ArithmeticListener ) ((ArithmeticListener)listener).enterNumber(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
if ( listener instanceof ArithmeticListener ) ((ArithmeticListener)listener).exitNumber(this);
}
}
public final NumberContext number() throws RecognitionException {
NumberContext _localctx = new NumberContext(_ctx, getState());
enterRule(_localctx, 14, RULE_number);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
setState(59);
_la = _input.LA(1);
if (_la==MINUS) {
{
setState(58);
match(MINUS);
}
}
setState(62);
_errHandler.sync(this);
_la = _input.LA(1);
do {
{
{
setState(61);
match(DIGIT);
}
}
setState(64);
_errHandler.sync(this);
_la = _input.LA(1);
} while ( _la==DIGIT );
setState(72);
_la = _input.LA(1);
if (_la==POINT) {
{
setState(66);
match(POINT);
setState(68);
_errHandler.sync(this);
_la = _input.LA(1);
do {
{
{
setState(67);
match(DIGIT);
}
}
setState(70);
_errHandler.sync(this);
_la = _input.LA(1);
} while ( _la==DIGIT );
}
}
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class VariableContext extends ParserRuleContext {
public List<TerminalNode> LETTER() { return getTokens(ArithmeticParser.LETTER); }
public TerminalNode LETTER(int i) {
return getToken(ArithmeticParser.LETTER, i);
}
public TerminalNode MINUS() { return getToken(ArithmeticParser.MINUS, 0); }
public List<TerminalNode> DIGIT() { return getTokens(ArithmeticParser.DIGIT); }
public TerminalNode DIGIT(int i) {
return getToken(ArithmeticParser.DIGIT, i);
}
public VariableContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_variable; }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof ArithmeticListener ) ((ArithmeticListener)listener).enterVariable(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
if ( listener instanceof ArithmeticListener ) ((ArithmeticListener)listener).exitVariable(this);
}
}
public final VariableContext variable() throws RecognitionException {
VariableContext _localctx = new VariableContext(_ctx, getState());
enterRule(_localctx, 16, RULE_variable);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
setState(75);
_la = _input.LA(1);
if (_la==MINUS) {
{
setState(74);
match(MINUS);
}
}
setState(77);
match(LETTER);
setState(81);
_errHandler.sync(this);
_la = _input.LA(1);
while (_la==LETTER || _la==DIGIT) {
{
{
setState(78);
_la = _input.LA(1);
if ( !(_la==LETTER || _la==DIGIT) ) {
_errHandler.recoverInline(this);
} else {
consume();
}
}
}
setState(83);
_errHandler.sync(this);
_la = _input.LA(1);
}
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static final String _serializedATN =
"\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\3\21W\4\2\t\2\4\3\t"+
"\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b\t\b\4\t\t\t\4\n\t\n\3\2\3\2\3\2"+
"\3\2\3\3\3\3\3\3\7\3\34\n\3\f\3\16\3\37\13\3\3\4\3\4\3\4\7\4$\n\4\f\4"+
"\16\4\'\13\4\3\5\3\5\3\5\5\5,\n\5\3\6\3\6\3\6\3\6\3\6\3\6\5\6\64\n\6\3"+
"\7\3\7\3\7\5\79\n\7\3\b\3\b\3\t\5\t>\n\t\3\t\6\tA\n\t\r\t\16\tB\3\t\3"+
"\t\6\tG\n\t\r\t\16\tH\5\tK\n\t\3\n\5\nN\n\n\3\n\3\n\7\nR\n\n\f\n\16\n"+
"U\13\n\3\n\2\2\13\2\4\6\b\n\f\16\20\22\2\6\3\2\5\6\3\2\7\b\3\2\t\13\3"+
"\2\17\20Y\2\24\3\2\2\2\4\30\3\2\2\2\6 \3\2\2\2\b(\3\2\2\2\n\63\3\2\2\2"+
"\f\65\3\2\2\2\16:\3\2\2\2\20=\3\2\2\2\22M\3\2\2\2\24\25\5\4\3\2\25\26"+
"\5\16\b\2\26\27\5\4\3\2\27\3\3\2\2\2\30\35\5\6\4\2\31\32\t\2\2\2\32\34"+
"\5\6\4\2\33\31\3\2\2\2\34\37\3\2\2\2\35\33\3\2\2\2\35\36\3\2\2\2\36\5"+
"\3\2\2\2\37\35\3\2\2\2 %\5\b\5\2!\"\t\3\2\2\"$\5\b\5\2#!\3\2\2\2$\'\3"+
"\2\2\2%#\3\2\2\2%&\3\2\2\2&\7\3\2\2\2\'%\3\2\2\2(+\5\n\6\2)*\7\16\2\2"+
"*,\5\4\3\2+)\3\2\2\2+,\3\2\2\2,\t\3\2\2\2-\64\5\f\7\2.\64\5\22\n\2/\60"+
"\7\3\2\2\60\61\5\4\3\2\61\62\7\4\2\2\62\64\3\2\2\2\63-\3\2\2\2\63.\3\2"+
"\2\2\63/\3\2\2\2\64\13\3\2\2\2\658\5\20\t\2\66\67\7\r\2\2\679\5\20\t\2"+
"8\66\3\2\2\289\3\2\2\29\r\3\2\2\2:;\t\4\2\2;\17\3\2\2\2<>\7\6\2\2=<\3"+
"\2\2\2=>\3\2\2\2>@\3\2\2\2?A\7\20\2\2@?\3\2\2\2AB\3\2\2\2B@\3\2\2\2BC"+
"\3\2\2\2CJ\3\2\2\2DF\7\f\2\2EG\7\20\2\2FE\3\2\2\2GH\3\2\2\2HF\3\2\2\2"+
"HI\3\2\2\2IK\3\2\2\2JD\3\2\2\2JK\3\2\2\2K\21\3\2\2\2LN\7\6\2\2ML\3\2\2"+
"\2MN\3\2\2\2NO\3\2\2\2OS\7\17\2\2PR\t\5\2\2QP\3\2\2\2RU\3\2\2\2SQ\3\2"+
"\2\2ST\3\2\2\2T\23\3\2\2\2US\3\2\2\2\r\35%+\638=BHJMS";
public static final ATN _ATN =
new ATNDeserializer().deserialize(_serializedATN.toCharArray());
static {
_decisionToDFA = new DFA[_ATN.getNumberOfDecisions()];
for (int i = 0; i < _ATN.getNumberOfDecisions(); i++) {
_decisionToDFA[i] = new DFA(_ATN.getDecisionState(i), i);
}
}
}

View File

BIN
pb/src/parserTest/parserTest Executable file

Binary file not shown.

24
pb/src/parserTest/test.go Normal file
View File

@ -0,0 +1,24 @@
package main
import (
"antlr4"
"parser"
)
func main() {
a := antlr4.NewFileStream("foo.txt")
l := parser.NewArithmeticLexer(a)
s := antlr4.NewCommonTokenStream(l, 0)
p := parser.NewArithmeticParser(s)
p.Equation()
}

View File

@ -101,7 +101,7 @@ var <superClass> = require('./<superClass>').<superClass> // TODO
var parserATN = <atn>
var deserializer = antlr4.NewATNDeserializer(nil)
var deserializedATN = deserializer.Deserialize( []rune( parserATN ) )
var deserializedATN = deserializer.DeserializeFromUInt16( parserATN )
var literalNames = []string{ <parser.literalNames:{t | <t>}; null="\"\"", separator=", ", wrap, anchor> }
var symbolicNames = []string{ <parser.symbolicNames:{t | <t>}; null="\"\"", separator=", ", wrap, anchor> }
@ -459,7 +459,7 @@ testShiftInRange(shiftAmount) ::= <<
// produces smaller bytecode only when bits.ttypes contains more than two items
bitsetBitfieldComparison(s, bits) ::= <%
(<testShiftInRange({<offsetShiftVar(s.varName, bits.shift)>})> && ((1 \<\< <offsetShiftVar(s.varName, bits.shift)>) & (<bits.ttypes:{ttype | (1 \<\< <offsetShiftType(ttype, bits.shift)>)}; separator=" | ">)) != 0)
(<testShiftInRange({<offsetShiftVar(s.varName, bits.shift)>})> && ((1 \<\< uint(<offsetShiftVar(s.varName, bits.shift)>)) & (<bits.ttypes:{ttype | (1 \<\< <offsetShiftType(ttype, bits.shift)>)}; separator=" | ">)) != 0)
%>
isZero ::= [
@ -799,7 +799,7 @@ Lexer(lexer, atn, actionFuncs, sempredFuncs, superClass) ::= <<
var serializedLexerAtn = <atn>
var lexerDeserializer = antlr4.NewATNDeserializer(nil)
var lexerAtn = lexerDeserializer.Deserialize( []rune( serializedLexerAtn ) )
var lexerAtn = lexerDeserializer.DeserializeFromUInt16( serializedLexerAtn )
var lexerModeNames = []string{ <lexer.modes:{m| "<m>"}; separator=", ", wrap, anchor> }
var lexerLiteralNames = []string{ <lexer.literalNames:{t | <t>}; null="\"\"", separator=", ", wrap, anchor> }
@ -853,7 +853,7 @@ const (
SerializedATN(model) ::= <<
<! only one segment, can be inlined !>
""
[]uint16{ <model.serialized; wrap={<\n> }> }
>>

View File

@ -70,7 +70,7 @@ func (a *ATNConfigSet) InitATNConfigSet(fullCtx bool) {
// fields in particular, conflictingAlts is set after
// we've made a.readonly.
a.readOnly = false
// Track the elements as they are added to the set supports get(i)///
// Track the elements as they are added to the set supports Get(i)///
a.configs = make([]IATNConfig, 0)
// TODO: these fields make me pretty uncomfortable but nice to pack up info

View File

@ -32,7 +32,7 @@ func initIntArray(length, value int) []int {
type ATNDeserializer struct {
deserializationOptions *ATNDeserializationOptions
data []rune
data []uint16
pos int
uuid string
}
@ -80,7 +80,7 @@ func (this *ATNDeserializer) isFeatureSupported(feature, actualUuid string) bool
return idx2 >= idx1
}
func (this *ATNDeserializer) Deserialize(data []rune) *ATN {
func (this *ATNDeserializer) DeserializeFromUInt16(data []uint16) *ATN {
this.reset(data)
this.checkVersion()
@ -104,31 +104,19 @@ func (this *ATNDeserializer) Deserialize(data []rune) *ATN {
}
func (this *ATNDeserializer) reset(data []rune) {
func (this *ATNDeserializer) reset(data []uint16) {
// TODO not sure the copy is necessary here
temp := make([]rune, len(data))
temp := make([]uint16, len(data))
for i, c := range data {
// don't adjust the first value since that's the version number
if i == 0 {
temp[i] = c
} else if c > 1 {
} else{
temp[i] = c - 2
} else {
temp[i] = -1
}
}
// var adjust = func(c) {
// var v = c.charCodeAt(0)
// return v>1 ? v-2 : -1
// }
// var temp = data.split("").map(adjust)
// // don't adjust the first value since that's the version number
// temp[0] = data.charCodeAt(0)
this.data = temp
this.pos = 0
}
@ -296,14 +284,14 @@ func (this *ATNDeserializer) readEdges(atn *ATN, sets []*IntervalSet) {
continue
}
var outermostPrecedenceReturn = -1
if atn.ruleToStartState[t.getTarget().GetRuleIndex()].isPrecedenceRule {
if atn.ruleToStartState[t.getTarGet().GetRuleIndex()].isPrecedenceRule {
if t.precedence == 0 {
outermostPrecedenceReturn = t.getTarget().GetRuleIndex()
outermostPrecedenceReturn = t.getTarGet().GetRuleIndex()
}
}
trans := NewEpsilonTransition(t.followState, outermostPrecedenceReturn)
atn.ruleToStopState[t.getTarget().GetRuleIndex()].AddTransition(trans, -1)
atn.ruleToStopState[t.getTarGet().GetRuleIndex()].AddTransition(trans, -1)
}
}
@ -323,14 +311,14 @@ func (this *ATNDeserializer) readEdges(atn *ATN, sets []*IntervalSet) {
}
if s2, ok := state.(*PlusLoopbackState); ok {
for j := 0; j < len(s2.GetTransitions()); j++ {
target := s2.GetTransitions()[j].getTarget()
target := s2.GetTransitions()[j].getTarGet()
if t2, ok := target.(*PlusBlockStartState); ok {
t2.loopBackState = state
}
}
} else if s2, ok := state.(*StarLoopbackState); ok {
for j := 0; j < len(s2.GetTransitions()); j++ {
target := s2.GetTransitions()[j].getTarget()
target := s2.GetTransitions()[j].getTarGet()
if t2, ok := target.(*StarLoopEntryState); ok {
t2.loopBackState = state
}
@ -425,8 +413,8 @@ func (this *ATNDeserializer) generateRuleBypassTransition(atn *ATN, idx int) {
if transition == excludeTransition {
continue
}
if transition.getTarget() == endState {
transition.setTarget(bypassStop)
if transition.getTarGet() == endState {
transition.setTarGet(bypassStop)
}
}
}
@ -456,12 +444,12 @@ func (this *ATNDeserializer) stateIsEndStateFor(state IATNState, idx int) IATNSt
if _, ok := state.(*StarLoopEntryState); !ok {
return nil
}
var maybeLoopEndState = state.GetTransitions()[len(state.GetTransitions())-1].getTarget()
var maybeLoopEndState = state.GetTransitions()[len(state.GetTransitions())-1].getTarGet()
if _, ok := maybeLoopEndState.(*LoopEndState); !ok {
return nil
}
_, ok := maybeLoopEndState.GetTransitions()[0].getTarget().(*RuleStopState)
_, ok := maybeLoopEndState.GetTransitions()[0].getTarGet().(*RuleStopState)
if maybeLoopEndState.(*LoopEndState).epsilonOnlyTransitions && ok {
return state
@ -488,9 +476,9 @@ func (this *ATNDeserializer) markPrecedenceDecisions(atn *ATN) {
// precedence rule should continue or complete.
//
if atn.ruleToStartState[state.GetRuleIndex()].isPrecedenceRule {
var maybeLoopEndState = state.GetTransitions()[len(state.GetTransitions())-1].getTarget()
var maybeLoopEndState = state.GetTransitions()[len(state.GetTransitions())-1].getTarGet()
if s3, ok := maybeLoopEndState.(*LoopEndState); ok {
s := maybeLoopEndState.GetTransitions()[0].getTarget()
s := maybeLoopEndState.GetTransitions()[0].getTarGet()
_, ok2 := s.(*RuleStopState)
if s3.epsilonOnlyTransitions && ok2 {
s.(*StarLoopEntryState).precedenceRuleDecision = true
@ -524,11 +512,11 @@ func (this *ATNDeserializer) verifyATN(atn *ATN) {
switch s2 := state.(type) {
case *StarBlockStartState:
_, ok2 := s2.GetTransitions()[1].getTarget().(*LoopEndState)
_, ok2 := s2.GetTransitions()[1].getTarGet().(*LoopEndState)
this.checkCondition(ok2, "")
this.checkCondition(!s2.nonGreedy, "")
case *LoopEndState:
s3, ok2 := s2.GetTransitions()[1].getTarget().(*StarBlockStartState)
s3, ok2 := s2.GetTransitions()[1].getTarGet().(*StarBlockStartState)
this.checkCondition(ok2, "")
this.checkCondition(s3.nonGreedy, "")
default:
@ -537,7 +525,7 @@ func (this *ATNDeserializer) verifyATN(atn *ATN) {
case *StarLoopbackState:
this.checkCondition(len(state.GetTransitions()) == 1, "")
_, ok2 := state.GetTransitions()[0].getTarget().(*StarLoopEntryState)
_, ok2 := state.GetTransitions()[0].getTarGet().(*StarLoopEntryState)
this.checkCondition(ok2, "")
case *LoopEndState:
this.checkCondition(s2.loopBackState != nil, "")

View File

@ -46,7 +46,7 @@ func (ts *BufferedTokenStream) InitBufferedTokenStream(tokenSource TokenSource)
// {@link //LT LT(1)}.
//
// <p>This field is set to -1 when the stream is first constructed or when
// {@link //setTokenSource} is called, indicating that the first token has
// {@link //SetTokenSource} is called, indicating that the first token has
// not yet been fetched from the token source. For additional information,
// see the documentation of {@link IntStream} for a description of
// Initializing Methods.</p>
@ -70,24 +70,24 @@ func (ts *BufferedTokenStream) InitBufferedTokenStream(tokenSource TokenSource)
}
func (bt *BufferedTokenStream) mark() int {
func (bt *BufferedTokenStream) Mark() int {
return 0
}
func (bt *BufferedTokenStream) release(marker int) {
func (bt *BufferedTokenStream) Release(marker int) {
// no resources to release
}
func (bt *BufferedTokenStream) reset() {
bt.seek(0)
bt.Seek(0)
}
func (bt *BufferedTokenStream) seek(index int) {
func (bt *BufferedTokenStream) Seek(index int) {
bt.lazyInit()
bt.index = bt.adjustSeekIndex(index)
}
func (bt *BufferedTokenStream) get(index int) *Token {
func (bt *BufferedTokenStream) Get(index int) *Token {
bt.lazyInit()
return bt.tokens[index]
}
@ -119,7 +119,7 @@ func (bt *BufferedTokenStream) Consume() {
//
// @return {@code true} if a token is located at index {@code i}, otherwise
// {@code false}.
// @see //get(int i)
// @see //Get(int i)
// /
func (bt *BufferedTokenStream) Sync(i int) bool {
var n = i - len(bt.tokens) + 1 // how many more elements we need?
@ -235,7 +235,7 @@ func (bt *BufferedTokenStream) GetTokenSource() TokenSource {
}
// Reset bt token stream by setting its token source.///
func (bt *BufferedTokenStream) setTokenSource(tokenSource TokenSource) {
func (bt *BufferedTokenStream) SetTokenSource(tokenSource TokenSource) {
bt.tokenSource = tokenSource
bt.tokens = make([]*Token, 0)
bt.index = -1
@ -332,21 +332,34 @@ func (bt *BufferedTokenStream) GetSourceName() string {
return bt.tokenSource.GetSourceName()
}
// Get the text of all tokens in bt buffer.///
func (bt *BufferedTokenStream) GetText(interval *Interval) string {
func (bt *BufferedTokenStream) Size() int {
return len(bt.tokens)
}
func (bt *BufferedTokenStream) Index() int {
return bt.index
}
func (bt *BufferedTokenStream) GetAllText() string {
return bt.GetTextFromInterval(nil)
}
func (bt *BufferedTokenStream) GetTextFromTokens(start, end *Token) string {
return bt.GetTextFromInterval(NewInterval(start.GetTokenIndex(), end.GetTokenIndex()))
}
func (bt *BufferedTokenStream) GetTextFromRuleContext(interval IRuleContext) string {
return bt.GetTextFromInterval(interval.GetSourceInterval())
}
func (bt *BufferedTokenStream) GetTextFromInterval(interval *Interval) string {
bt.lazyInit()
bt.fill()
if interval == nil {
interval = NewInterval(0, len(bt.tokens)-1)
}
var start = interval.start
// if s2, ok := start.(*Token); ok {
// start = s2.tokenIndex
// }
var stop = interval.stop
// if s2, ok := stop.(*Token); ok {
// stop = s2.tokenIndex
// }
if start < 0 || stop < 0 {
return ""
}

View File

@ -3,5 +3,6 @@ package antlr4
type CharStream interface {
IntStream
GetText(int, int) string
GetTextFromInterval(*Interval) string
}

View File

@ -6,7 +6,7 @@
package antlr4
type TokenFactory interface {
create(source *TokenSourceCharStreamPair, ttype int, text string, channel, start, stop, line, column int) *Token
Create(source *TokenSourceCharStreamPair, ttype int, text string, channel, start, stop, line, column int) *Token
}
type CommonTokenFactory struct {
@ -45,7 +45,7 @@ func NewCommonTokenFactory(copyText bool) *CommonTokenFactory {
//
var CommonTokenFactoryDEFAULT = NewCommonTokenFactory(false)
func (this *CommonTokenFactory) create(source *TokenSourceCharStreamPair, ttype int, text string, channel, start, stop, line, column int) *Token {
func (this *CommonTokenFactory) Create(source *TokenSourceCharStreamPair, ttype int, text string, channel, start, stop, line, column int) *Token {
var t = NewCommonToken(source, ttype, channel, start, stop)
t.line = line
t.column = column

View File

@ -153,7 +153,7 @@ func (this *DefaultErrorStrategy) ReportError(recognizer IParser, e IRecognition
default:
fmt.Println("unknown recognition error type: " + reflect.TypeOf(e).Name())
// fmt.Println(e.stack)
recognizer.notifyErrorListeners(e.getMessage(), e.getOffendingToken(), e)
recognizer.notifyErrorListeners(e.GetMessage(), e.GetOffendingToken(), e)
case *NoViableAltException:
this.reportNoViableAlternative(recognizer, t)
case *InputMisMatchException:
@ -163,7 +163,7 @@ func (this *DefaultErrorStrategy) ReportError(recognizer IParser, e IRecognition
}
}
//
// {@inheritDoc}
//
// <p>The default implementation reSynchronizes the parser by consuming tokens
@ -172,7 +172,7 @@ func (this *DefaultErrorStrategy) ReportError(recognizer IParser, e IRecognition
//
func (this *DefaultErrorStrategy) Recover(recognizer IParser, e IRecognitionException) {
if this.lastErrorIndex == recognizer.getInputStream().index() &&
if this.lastErrorIndex == recognizer.GetInputStream().Index() &&
this.lastErrorStates != nil && this.lastErrorStates.contains(recognizer.GetState()) {
// uh oh, another error at same token index and previously-Visited
// state in ATN must be a case where LT(1) is in the recovery
@ -180,7 +180,7 @@ func (this *DefaultErrorStrategy) Recover(recognizer IParser, e IRecognitionExce
// at least to prevent an infinite loop this is a failsafe.
recognizer.Consume()
}
this.lastErrorIndex = recognizer.getInputStream().index()
this.lastErrorIndex = recognizer.GetInputStream().Index()
if this.lastErrorStates == nil {
this.lastErrorStates = NewIntervalSet()
}
@ -476,7 +476,7 @@ func (this *DefaultErrorStrategy) singleTokenInsertion(recognizer IParser) bool
// is free to conjure up and insert the missing token
var atn = recognizer.GetInterpreter().atn
var currentState = atn.states[recognizer.GetState()]
var next = currentState.GetTransitions()[0].getTarget()
var next = currentState.GetTransitions()[0].getTarGet()
var expectingAtLL2 = atn.nextTokens(next, recognizer.GetParserRuleContext())
if expectingAtLL2.contains(currentSymbolType) {
this.reportMissingToken(recognizer)
@ -559,7 +559,7 @@ func (this *DefaultErrorStrategy) getMissingSymbol(recognizer IParser) *Token {
}
tf := recognizer.GetTokenFactory()
return tf.create(current.source, expectedTokenType, tokenText, TokenDefaultChannel, -1, -1, current.line, current.column)
return tf.Create(current.source, expectedTokenType, tokenText, TokenDefaultChannel, -1, -1, current.line, current.column)
}
func (this *DefaultErrorStrategy) getExpectedTokens(recognizer IParser) *IntervalSet {

View File

@ -9,8 +9,9 @@ import ()
// and what kind of problem occurred.
type IRecognitionException interface {
getOffendingToken() *Token
getMessage() string
GetOffendingToken() *Token
GetMessage() string
GetInputStream() IntStream
}
type RecognitionException struct {
@ -19,10 +20,10 @@ type RecognitionException struct {
offendingToken *Token
offendingState int
ctx IRuleContext
input CharStream
input IntStream
}
func NewRecognitionException(message string, recognizer IRecognizer, input CharStream, ctx IRuleContext) *RecognitionException {
func NewRecognitionException(message string, recognizer IRecognizer, input IntStream, ctx IRuleContext) *RecognitionException {
// todo
// Error.call(this)
@ -40,7 +41,7 @@ func NewRecognitionException(message string, recognizer IRecognizer, input CharS
return t
}
func (t *RecognitionException) InitRecognitionException(message string, recognizer IRecognizer, input CharStream, ctx IRuleContext) {
func (t *RecognitionException) InitRecognitionException(message string, recognizer IRecognizer, input IntStream, ctx IRuleContext) {
t.message = message
t.recognizer = recognizer
@ -61,14 +62,18 @@ func (t *RecognitionException) InitRecognitionException(message string, recogniz
}
}
func (this *RecognitionException) getMessage() string {
func (this *RecognitionException) GetMessage() string {
return this.message
}
func (this *RecognitionException) getOffendingToken() *Token {
func (this *RecognitionException) GetOffendingToken() *Token {
return this.offendingToken
}
func (this *RecognitionException) GetInputStream() IntStream {
return this.input
}
// <p>If the state number is not known, this method returns -1.</p>
//
@ -94,14 +99,13 @@ func (this *RecognitionException) toString() string {
}
type LexerNoViableAltException struct {
RecognitionException
*RecognitionException
startIndex int
deadEndConfigs *ATNConfigSet
}
func NewLexerNoViableAltException(lexer ILexer, input CharStream, startIndex int,
deadEndConfigs *ATNConfigSet) *LexerNoViableAltException {
func NewLexerNoViableAltException(lexer ILexer, input CharStream, startIndex int, deadEndConfigs *ATNConfigSet) *LexerNoViableAltException {
this := new(LexerNoViableAltException)
@ -115,14 +119,14 @@ func NewLexerNoViableAltException(lexer ILexer, input CharStream, startIndex int
func (this *LexerNoViableAltException) toString() string {
var symbol = ""
if this.startIndex >= 0 && this.startIndex < this.input.size() {
symbol = this.input.GetTextFromInterval(NewInterval(this.startIndex, this.startIndex))
if this.startIndex >= 0 && this.startIndex < this.input.Size() {
symbol = this.input.(CharStream).GetTextFromInterval(NewInterval(this.startIndex, this.startIndex))
}
return "LexerNoViableAltException" + symbol
}
type NoViableAltException struct {
RecognitionException
*RecognitionException
startToken *Token
offendingToken *Token
@ -135,7 +139,7 @@ type NoViableAltException struct {
// of the offending input and also knows where the parser was
// in the various paths when the error. Reported by reportNoViableAlternative()
//
func NewNoViableAltException(recognizer IParser, input CharStream, startToken *Token, offendingToken *Token, deadEndConfigs *ATNConfigSet, ctx IParserRuleContext) *NoViableAltException {
func NewNoViableAltException(recognizer IParser, input TokenStream, startToken *Token, offendingToken *Token, deadEndConfigs *ATNConfigSet, ctx IParserRuleContext) *NoViableAltException {
if ctx == nil {
ctx = recognizer.GetParserRuleContext()
@ -150,13 +154,13 @@ func NewNoViableAltException(recognizer IParser, input CharStream, startToken *T
}
if input == nil {
input = recognizer.getInputStream()
input = recognizer.GetInputStream().(TokenStream)
}
this := new(NoViableAltException)
this.InitRecognitionException("", recognizer, input, ctx)
// Which configurations did we try at input.index() that couldn't Match
// Which configurations did we try at input.Index() that couldn't Match
// input.LT(1)?//
this.deadEndConfigs = deadEndConfigs
// The token object at the start index the input stream might
@ -170,7 +174,7 @@ func NewNoViableAltException(recognizer IParser, input CharStream, startToken *T
}
type InputMisMatchException struct {
RecognitionException
*RecognitionException
}
// This signifies any kind of misMatched input exceptions such as
@ -179,7 +183,7 @@ type InputMisMatchException struct {
func NewInputMisMatchException(recognizer IParser) *InputMisMatchException {
this := new(InputMisMatchException)
this.InitRecognitionException("", recognizer, recognizer.getInputStream(), recognizer.GetParserRuleContext())
this.InitRecognitionException("", recognizer, recognizer.GetInputStream(), recognizer.GetParserRuleContext())
this.offendingToken = recognizer.getCurrentToken()
@ -193,7 +197,7 @@ func NewInputMisMatchException(recognizer IParser) *InputMisMatchException {
// prediction.
type FailedPredicateException struct {
RecognitionException
*RecognitionException
ruleIndex int
predicateIndex int
@ -204,7 +208,7 @@ func NewFailedPredicateException(recognizer *Parser, predicate string, message s
this := new(FailedPredicateException)
this.InitRecognitionException(this.formatMessage(predicate, message), recognizer, recognizer.getInputStream(), recognizer._ctx)
this.InitRecognitionException(this.formatMessage(predicate, message), recognizer, recognizer.GetInputStream(), recognizer._ctx)
var s = recognizer.Interpreter.atn.states[recognizer.state]
var trans = s.GetTransitions()[0]

View File

@ -4,6 +4,7 @@ import (
"bytes"
"io"
"os"
"fmt"
)
//
@ -12,17 +13,31 @@ import (
//
type FileStream struct {
InputStream
*InputStream
filename string
}
func NewFileStream(fileName string) {
func NewFileStream(fileName string) *FileStream {
buf := bytes.NewBuffer(nil)
// TODO
f, _ := os.Open(fileName) // Error handling elided for brevity.
io.Copy(buf, f) // Error handling elided for brevity.
f.Close()
fs := new(FileStream)
fs.filename = fileName
s := string(buf.Bytes())
fmt.Println(s)
fs.InitInputStream(s)
return fs
}
func (f *FileStream) GetSourceName() string {
return f.filename
}

View File

@ -11,12 +11,18 @@ func NewInputStream(data string) *InputStream {
is := new(InputStream)
is.InitInputStream(data)
return is
}
func (is *InputStream) InitInputStream(data string) {
is.name = "<empty>"
is.index = 0
is.data = []rune(data)
is.size = len(is.data) // number of runes
return is
}
func (is *InputStream) reset() {
@ -49,15 +55,23 @@ func (is *InputStream) LT(offset int) int {
return is.LA(offset)
}
func (is *InputStream) Index() int {
return is.index
}
func (is *InputStream) Size() int {
return is.size
}
// mark/release do nothing we have entire buffer
func (is *InputStream) mark() int {
func (is *InputStream) Mark() int {
return -1
}
func (is *InputStream) release(marker int) {
func (is *InputStream) Release(marker int) {
}
func (is *InputStream) seek(index int) {
func (is *InputStream) Seek(index int) {
if index <= is.index {
is.index = index // just jump don't update stream state (line,...)
return
@ -77,6 +91,10 @@ func (is *InputStream) GetText(start int, stop int) string {
}
}
func (is *InputStream) GetTextFromInterval(i *Interval) string {
return is.GetText(i.start, i.stop)
}
func (is *InputStream) toString() string {
return string(is.data)
}

View File

@ -1,12 +1,14 @@
package antlr4
type IntStream interface {
Consume()
LA(int) int
mark() int
release(marker int)
index() int
seek(index int)
size() int
Mark() int
Release(marker int)
Index() int
Seek(index int)
Size() int
GetSourceName() string
}

View File

@ -38,7 +38,7 @@ func (la *LL1Analyzer) getDecisionLookahead(s IATNState) []*IntervalSet {
look[alt] = NewIntervalSet()
var lookBusy = NewSet(nil, nil)
var seeThruPreds = false // fail to get lookahead upon pred
la._LOOK(s.GetTransitions()[alt].getTarget(), nil, PredictionContextEMPTY, look[alt], lookBusy, NewBitSet(), seeThruPreds, false)
la._LOOK(s.GetTransitions()[alt].getTarGet(), nil, PredictionContextEMPTY, look[alt], lookBusy, NewBitSet(), seeThruPreds, false)
// Wipe out lookahead for la alternative if we found nothing
// or we had a predicate when we !seeThruPreds
if look[alt].length() == 0 || look[alt].contains(LL1AnalyzerHIT_PRED) {
@ -168,26 +168,26 @@ func (la *LL1Analyzer) _LOOK(s, stopState IATNState, ctx IPredictionContext, loo
if t1, ok := t.(*RuleTransition); ok {
if calledRuleStack.contains(t1.getTarget().GetRuleIndex()) {
if calledRuleStack.contains(t1.getTarGet().GetRuleIndex()) {
continue
}
newContext := SingletonPredictionContextcreate(ctx, t1.followState.GetStateNumber())
newContext := SingletonPredictionContextCreate(ctx, t1.followState.GetStateNumber())
defer func() {
calledRuleStack.remove(t1.getTarget().GetRuleIndex())
calledRuleStack.remove(t1.getTarGet().GetRuleIndex())
}()
calledRuleStack.add(t1.getTarget().GetRuleIndex())
la._LOOK(t.getTarget(), stopState, newContext, look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
calledRuleStack.add(t1.getTarGet().GetRuleIndex())
la._LOOK(t.getTarGet(), stopState, newContext, look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
} else if t2, ok := t.(*AbstractPredicateTransition); ok {
if seeThruPreds {
la._LOOK(t2.getTarget(), stopState, ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
la._LOOK(t2.getTarGet(), stopState, ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
} else {
look.addOne(LL1AnalyzerHIT_PRED)
}
} else if t.getIsEpsilon() {
la._LOOK(t.getTarget(), stopState, ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
la._LOOK(t.getTarGet(), stopState, ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
} else if _, ok := t.(*WildcardTransition); ok {
look.addRange(TokenMinUserTokenType, la.atn.maxTokenType)
} else {

View File

@ -117,7 +117,7 @@ const (
func (l *Lexer) reset() {
// wack Lexer state variables
if l._input != nil {
l._input.seek(0) // rewind the input
l._input.Seek(0) // rewind the input
}
l._token = nil
l._type = TokenInvalidType
@ -134,7 +134,7 @@ func (l *Lexer) reset() {
l.Interpreter.reset()
}
func (l *Lexer) getInputStream() CharStream {
func (l *Lexer) GetInputStream() CharStream {
return l._input
}
@ -177,13 +177,13 @@ func (l *Lexer) nextToken() *Token {
}
// do this when done consuming
var tokenStartMarker = l._input.mark()
var tokenStartMarker = l._input.Mark()
// previously in finally block
defer func() {
// make sure we release marker after Match or
// unbuffered char stream will keep buffering
l._input.release(tokenStartMarker)
l._input.Release(tokenStartMarker)
}()
for true {
@ -193,7 +193,7 @@ func (l *Lexer) nextToken() *Token {
}
l._token = nil
l._channel = TokenDefaultChannel
l._tokenStartCharIndex = l._input.index()
l._tokenStartCharIndex = l._input.Index()
l._tokenStartColumn = l.Interpreter.column
l._tokenStartLine = l.Interpreter.line
l._text = nil
@ -296,7 +296,7 @@ func (l *Lexer) emitToken(token *Token) {
// custom Token objects or provide a Newfactory.
// /
func (l *Lexer) emit() *Token {
var t = l._factory.create(l._tokenFactorySourcePair, l._type, *l._text, l._channel, l._tokenStartCharIndex, l.getCharIndex()-1, l._tokenStartLine, l._tokenStartColumn)
var t = l._factory.Create(l._tokenFactorySourcePair, l._type, *l._text, l._channel, l._tokenStartCharIndex, l.getCharIndex()-1, l._tokenStartLine, l._tokenStartColumn)
l.emitToken(t)
return t
}
@ -304,7 +304,7 @@ func (l *Lexer) emit() *Token {
func (l *Lexer) emitEOF() *Token {
cpos := l.getCharPositionInLine()
lpos := l.getLine()
var eof = l._factory.create(l._tokenFactorySourcePair, TokenEOF, "", TokenDefaultChannel, l._input.index(), l._input.index()-1, lpos, cpos)
var eof = l._factory.Create(l._tokenFactorySourcePair, TokenEOF, "", TokenDefaultChannel, l._input.Index(), l._input.Index()-1, lpos, cpos)
l.emitToken(eof)
return eof
}
@ -327,7 +327,7 @@ func (l *Lexer) setType(t int) {
// What is the index of the current character of lookahead?///
func (l *Lexer) getCharIndex() int {
return l._input.index()
return l._input.Index()
}
// Return the text Matched so far for the current token or any text override.
@ -363,7 +363,7 @@ func (l *Lexer) getAllTokens() []*Token {
func (l *Lexer) notifyListeners(e IRecognitionException) {
var start = l._tokenStartCharIndex
var stop = l._input.index()
var stop = l._input.Index()
var text = l._input.GetTextFromInterval(NewInterval(start, stop))
var msg = "token recognition error at: '" + text + "'"
var listener = l.getErrorListenerDispatch()

View File

@ -107,13 +107,13 @@ func (this *LexerATNSimulator) Match(input CharStream, mode int) int {
this.Match_calls += 1
this.mode = mode
var mark = input.mark()
var mark = input.Mark()
defer func() {
input.release(mark)
input.Release(mark)
}()
this.startIndex = input.index()
this.startIndex = input.Index()
this.prevAccept.reset()
var dfa = this.decisionToDFA[mode]
if dfa.s0 == nil {
@ -279,7 +279,7 @@ func (this *LexerATNSimulator) failOrAccept(prevAccept *SimState, input CharStre
return prevAccept.dfaState.prediction
} else {
// if no accept and EOF is first char, return EOF
if t == TokenEOF && input.index() == this.startIndex {
if t == TokenEOF && input.Index() == this.startIndex {
return TokenEOF
}
panic(NewLexerNoViableAltException(this.recog, input, this.startIndex, reach))
@ -304,11 +304,11 @@ func (this *LexerATNSimulator) getReachableConfigSet(input CharStream, closure *
}
for j := 0; j < len(cfg.GetState().GetTransitions()); j++ {
var trans = cfg.GetState().GetTransitions()[j] // for each transition
var target = this.getReachableTarget(trans, t)
var target = this.getReachableTarGet(trans, t)
if target != nil {
var lexerActionExecutor = cfg.(*LexerATNConfig).lexerActionExecutor
if lexerActionExecutor != nil {
lexerActionExecutor = lexerActionExecutor.fixOffsetBeforeMatch(input.index() - this.startIndex)
lexerActionExecutor = lexerActionExecutor.fixOffsetBeforeMatch(input.Index() - this.startIndex)
}
var treatEofAsEpsilon = (t == TokenEOF)
var config = NewLexerATNConfig3(cfg.(*LexerATNConfig), target, lexerActionExecutor)
@ -328,7 +328,7 @@ func (this *LexerATNSimulator) accept(input CharStream, lexerActionExecutor *Lex
fmt.Println("ACTION %s\n", lexerActionExecutor)
}
// seek to after last char in token
input.seek(index)
input.Seek(index)
this.line = line
this.column = charPos
if lexerActionExecutor != nil && this.recog != nil {
@ -336,9 +336,9 @@ func (this *LexerATNSimulator) accept(input CharStream, lexerActionExecutor *Lex
}
}
func (this *LexerATNSimulator) getReachableTarget(trans ITransition, t int) IATNState {
func (this *LexerATNSimulator) getReachableTarGet(trans ITransition, t int) IATNState {
if trans.Matches(t, 0, 0xFFFE) {
return trans.getTarget()
return trans.getTarGet()
} else {
return nil
}
@ -348,7 +348,7 @@ func (this *LexerATNSimulator) computeStartState(input CharStream, p IATNState)
var configs = NewOrderedATNConfigSet()
for i := 0; i < len(p.GetTransitions()); i++ {
var target = p.GetTransitions()[i].getTarget()
var target = p.GetTransitions()[i].getTarGet()
var cfg = NewLexerATNConfig6(target, i+1, PredictionContextEMPTY)
this.closure(input, cfg, configs.ATNConfigSet, false, false, false)
}
@ -408,7 +408,7 @@ func (this *LexerATNSimulator) closure(input CharStream, config *LexerATNConfig,
}
for j := 0; j < len(config.state.GetTransitions()); j++ {
var trans = config.state.GetTransitions()[j]
cfg := this.getEpsilonTarget(input, config, trans, configs, speculative, treatEofAsEpsilon)
cfg := this.getEpsilonTarGet(input, config, trans, configs, speculative, treatEofAsEpsilon)
if cfg != nil {
currentAltReachedAcceptState = this.closure(input, cfg, configs,
currentAltReachedAcceptState, speculative, treatEofAsEpsilon)
@ -418,7 +418,7 @@ func (this *LexerATNSimulator) closure(input CharStream, config *LexerATNConfig,
}
// side-effect: can alter configs.hasSemanticContext
func (this *LexerATNSimulator) getEpsilonTarget(input CharStream, config *LexerATNConfig, trans ITransition,
func (this *LexerATNSimulator) getEpsilonTarGet(input CharStream, config *LexerATNConfig, trans ITransition,
configs *ATNConfigSet, speculative, treatEofAsEpsilon bool) *LexerATNConfig {
var cfg *LexerATNConfig
@ -426,8 +426,8 @@ func (this *LexerATNSimulator) getEpsilonTarget(input CharStream, config *LexerA
if trans.getSerializationType() == TransitionRULE {
rt := trans.(*RuleTransition)
var newContext = SingletonPredictionContextcreate(config.context, rt.followState.GetStateNumber())
cfg = NewLexerATNConfig2(config, trans.getTarget(), newContext)
var newContext = SingletonPredictionContextCreate(config.context, rt.followState.GetStateNumber())
cfg = NewLexerATNConfig2(config, trans.getTarGet(), newContext)
} else if trans.getSerializationType() == TransitionPRECEDENCE {
panic("Precedence predicates are not supported in lexers.")
@ -457,7 +457,7 @@ func (this *LexerATNSimulator) getEpsilonTarget(input CharStream, config *LexerA
}
configs.hasSemanticContext = true
if this.evaluatePredicate(input, pt.ruleIndex, pt.predIndex, speculative) {
cfg = NewLexerATNConfig4(config, trans.getTarget())
cfg = NewLexerATNConfig4(config, trans.getTarGet())
}
} else if trans.getSerializationType() == TransitionACTION {
if config.context == nil || config.context.hasEmptyPath() {
@ -474,19 +474,19 @@ func (this *LexerATNSimulator) getEpsilonTarget(input CharStream, config *LexerA
// additional modifications are needed before we can support
// the split operation.
var lexerActionExecutor = LexerActionExecutorappend(config.lexerActionExecutor, this.atn.lexerActions[trans.(*ActionTransition).actionIndex])
cfg = NewLexerATNConfig3(config, trans.getTarget(), lexerActionExecutor)
cfg = NewLexerATNConfig3(config, trans.getTarGet(), lexerActionExecutor)
} else {
// ignore actions in referenced rules
cfg = NewLexerATNConfig4(config, trans.getTarget())
cfg = NewLexerATNConfig4(config, trans.getTarGet())
}
} else if trans.getSerializationType() == TransitionEPSILON {
cfg = NewLexerATNConfig4(config, trans.getTarget())
cfg = NewLexerATNConfig4(config, trans.getTarGet())
} else if trans.getSerializationType() == TransitionATOM ||
trans.getSerializationType() == TransitionRANGE ||
trans.getSerializationType() == TransitionSET {
if treatEofAsEpsilon {
if trans.Matches(TokenEOF, 0, 0xFFFF) {
cfg = NewLexerATNConfig4(config, trans.getTarget())
cfg = NewLexerATNConfig4(config, trans.getTarGet())
}
}
}
@ -523,14 +523,14 @@ func (this *LexerATNSimulator) evaluatePredicate(input CharStream, ruleIndex, pr
}
var savedcolumn = this.column
var savedLine = this.line
var index = input.index()
var marker = input.mark()
var index = input.Index()
var marker = input.Mark()
defer func() {
this.column = savedcolumn
this.line = savedLine
input.seek(index)
input.release(marker)
input.Seek(index)
input.Release(marker)
}()
this.consume(input)
@ -538,7 +538,7 @@ func (this *LexerATNSimulator) evaluatePredicate(input CharStream, ruleIndex, pr
}
func (this *LexerATNSimulator) captureSimState(settings *SimState, input CharStream, dfaState *DFAState) {
settings.index = input.index()
settings.index = input.Index()
settings.line = this.line
settings.column = this.column
settings.dfaState = dfaState
@ -628,7 +628,7 @@ func (this *LexerATNSimulator) getDFA(mode int) *DFA {
// Get the text Matched so far for the current token.
func (this *LexerATNSimulator) GetText(input CharStream) string {
// index is first lookahead char, don't include.
return input.GetTextFromInterval(NewInterval(this.startIndex, input.index()-1))
return input.GetTextFromInterval(NewInterval(this.startIndex, input.Index()-1))
}
func (this *LexerATNSimulator) consume(input CharStream) {

View File

@ -130,11 +130,11 @@ func (this *LexerActionExecutor) fixOffsetBeforeMatch(offset int) *LexerActionEx
// /
func (this *LexerActionExecutor) execute(lexer ILexer, input CharStream, startIndex int) {
var requiresSeek = false
var stopIndex = input.index()
var stopIndex = input.Index()
defer func() {
if requiresSeek {
input.seek(stopIndex)
input.Seek(stopIndex)
}
}()
@ -142,11 +142,11 @@ func (this *LexerActionExecutor) execute(lexer ILexer, input CharStream, startIn
var lexerAction ILexerAction = this.lexerActions[i]
if la, ok := lexerAction.(*LexerIndexedCustomAction); ok {
var offset = la.offset
input.seek(startIndex + offset)
input.Seek(startIndex + offset)
lexerAction = la.lexerAction
requiresSeek = (startIndex + offset) != stopIndex
} else if lexerAction.getIsPositionDependent() {
input.seek(stopIndex)
input.Seek(stopIndex)
requiresSeek = false
}
lexerAction.execute(lexer)

View File

@ -11,7 +11,7 @@ type IParser interface {
Consume() *Token
GetParseListeners() []ParseTreeListener
getInputStream() CharStream
GetInputStream() IntStream
getCurrentToken() *Token
getExpectedTokens() *IntervalSet
notifyErrorListeners(msg string, offendingToken *Token, err IRecognitionException)
@ -90,7 +90,7 @@ var bypassAltsAtnCache = make(map[string]int)
// reset the parser's state//
func (p *Parser) reset() {
if p._input != nil {
p._input.seek(0)
p._input.Seek(0)
}
p._errHandler.reset(p)
p._ctx = nil
@ -332,7 +332,7 @@ func (p *Parser) GetATNWithBypassAlts() {
// ParseTreePattern p = parser.compileParseTreePattern("&ltID&gt+0",
// MyParser.RULE_expr)
// ParseTreeMatch m = p.Match(t)
// String id = m.get("ID")
// String id = m.Get("ID")
// </pre>
func (p *Parser) compileParseTreePattern(pattern, patternRuleIndex, lexer ILexer) {
@ -355,8 +355,8 @@ func (p *Parser) compileParseTreePattern(pattern, patternRuleIndex, lexer ILexer
// return m.compile(pattern, patternRuleIndex)
}
func (p *Parser) getInputStream() CharStream {
return p.GetTokenStream().(CharStream)
func (p *Parser) GetInputStream() IntStream {
return p.GetTokenStream()
}
func (p *Parser) setInputStream(input TokenStream) {
@ -395,7 +395,7 @@ func (p *Parser) notifyErrorListeners(msg string, offendingToken *Token, err IRe
func (p *Parser) Consume() *Token {
var o = p.getCurrentToken()
if o.tokenType != TokenEOF {
p.getInputStream().Consume()
p.GetInputStream().Consume()
}
var hasListener = p._parseListeners != nil && len(p._parseListeners) > 0
if p.buildParseTrees || hasListener {

View File

@ -71,19 +71,19 @@ func (this *ParserATNSimulator) AdaptivePredict(input TokenStream, decision int,
}
this._input = input
this._startIndex = input.index()
this._startIndex = input.Index()
this._outerContext = outerContext
var dfa = this.decisionToDFA[decision]
this._dfa = dfa
var m = input.mark()
var index = input.index()
var m = input.Mark()
var index = input.Index()
defer func() {
this._dfa = nil
this.mergeCache = nil // wack cache after each prediction
input.seek(index)
input.release(m)
input.Seek(index)
input.Release(m)
}()
// Now we are certain to have a specific decision's DFA
@ -205,7 +205,7 @@ func (this *ParserATNSimulator) execATN(dfa *DFA, s0 *DFAState, input TokenStrea
// If conflict in states that dip out, choose min since we
// will get error no matter what.
e := this.noViableAlt(input, outerContext, previousD.configs, startIndex)
input.seek(startIndex)
input.Seek(startIndex)
alt := this.getSynValidOrSemInvalidAltThatFinishedDecisionEntryRule(previousD.configs, outerContext)
if alt != ATNINVALID_ALT_NUMBER {
return alt
@ -220,9 +220,9 @@ func (this *ParserATNSimulator) execATN(dfa *DFA, s0 *DFAState, input TokenStrea
if ParserATNSimulatorprototypedebug {
fmt.Println("DFA state has preds in DFA sim LL failover")
}
var conflictIndex = input.index()
var conflictIndex = input.Index()
if conflictIndex != startIndex {
input.seek(startIndex)
input.Seek(startIndex)
}
conflictingAlts = this.evalSemanticContext(D.predicates, outerContext, true)
if conflictingAlts.length() == 1 {
@ -234,7 +234,7 @@ func (this *ParserATNSimulator) execATN(dfa *DFA, s0 *DFAState, input TokenStrea
if conflictIndex != startIndex {
// restore the index so reporting the fallback to full
// context occurs with the index at the correct spot
input.seek(conflictIndex)
input.Seek(conflictIndex)
}
}
if ParserATNSimulatorprototypedfa_debug {
@ -242,7 +242,7 @@ func (this *ParserATNSimulator) execATN(dfa *DFA, s0 *DFAState, input TokenStrea
}
var fullCtx = true
var s0_closure = this.computeStartState(dfa.atnStartState, outerContext, fullCtx)
this.reportAttemptingFullContext(dfa, conflictingAlts, D.configs, startIndex, input.index())
this.reportAttemptingFullContext(dfa, conflictingAlts, D.configs, startIndex, input.Index())
alt := this.execATNWithFullContext(dfa, D, s0_closure, input, startIndex, outerContext)
return alt
}
@ -250,8 +250,8 @@ func (this *ParserATNSimulator) execATN(dfa *DFA, s0 *DFAState, input TokenStrea
if D.predicates == nil {
return D.prediction
}
var stopIndex = input.index()
input.seek(startIndex)
var stopIndex = input.Index()
input.Seek(startIndex)
var alts = this.evalSemanticContext(D.predicates, outerContext, true)
if alts.length() == 0 {
panic(this.noViableAlt(input, outerContext, D.configs, startIndex))
@ -380,7 +380,7 @@ func (this *ParserATNSimulator) execATNWithFullContext(dfa *DFA, D *DFAState, s0
var foundExactAmbig = false
var reach *ATNConfigSet = nil
var previous = s0
input.seek(startIndex)
input.Seek(startIndex)
var t = input.LA(1)
var predictedAlt = -1
@ -397,7 +397,7 @@ func (this *ParserATNSimulator) execATNWithFullContext(dfa *DFA, D *DFAState, s0
// If conflict in states that dip out, choose min since we
// will get error no matter what.
var e = this.noViableAlt(input, outerContext, previous, startIndex)
input.seek(startIndex)
input.Seek(startIndex)
var alt = this.getSynValidOrSemInvalidAltThatFinishedDecisionEntryRule(previous, outerContext)
if alt != ATNINVALID_ALT_NUMBER {
return alt
@ -443,7 +443,7 @@ func (this *ParserATNSimulator) execATNWithFullContext(dfa *DFA, D *DFAState, s0
// without conflict, then we know that it's a full LL decision
// not SLL.
if reach.uniqueAlt != ATNINVALID_ALT_NUMBER {
this.reportContextSensitivity(dfa, predictedAlt, reach, startIndex, input.index())
this.reportContextSensitivity(dfa, predictedAlt, reach, startIndex, input.Index())
return predictedAlt
}
// We do not check predicates here because we have checked them
@ -473,7 +473,7 @@ func (this *ParserATNSimulator) execATNWithFullContext(dfa *DFA, D *DFAState, s0
// the fact that we should predict alternative 1. We just can't say for
// sure that there is an ambiguity without looking further.
this.reportAmbiguity(dfa, D, startIndex, input.index(), foundExactAmbig, nil, reach)
this.reportAmbiguity(dfa, D, startIndex, input.Index(), foundExactAmbig, nil, reach)
return predictedAlt
}
@ -524,7 +524,7 @@ func (this *ParserATNSimulator) computeReachSet(closure *ATNConfigSet, t int, fu
for j := 0; j < len(c.GetState().GetTransitions()); j++ {
var trans = c.GetState().GetTransitions()[j]
var target = this.getReachableTarget(trans, t)
var target = this.getReachableTarGet(trans, t)
if target != nil {
var cfg = NewATNConfig4(c, target)
intermediate.add(cfg, this.mergeCache)
@ -660,7 +660,7 @@ func (this *ParserATNSimulator) computeStartState(p IATNState, ctx IRuleContext,
var initialContext = predictionContextFromRuleContext(this.atn, ctx)
var configs = NewATNConfigSet(fullCtx)
for i := 0; i < len(p.GetTransitions()); i++ {
var target = p.GetTransitions()[i].getTarget()
var target = p.GetTransitions()[i].getTarGet()
var c = NewATNConfig5(target, i+1, initialContext, nil)
var closureBusy = NewSet(nil, nil)
this.closure(c, configs, closureBusy, true, fullCtx, false)
@ -768,9 +768,9 @@ func (this *ParserATNSimulator) applyPrecedenceFilter(configs *ATNConfigSet) *AT
return configSet
}
func (this *ParserATNSimulator) getReachableTarget(trans ITransition, ttype int) IATNState {
func (this *ParserATNSimulator) getReachableTarGet(trans ITransition, ttype int) IATNState {
if trans.Matches(ttype, 0, this.atn.maxTokenType) {
return trans.getTarget()
return trans.getTarGet()
} else {
return nil
}
@ -1052,7 +1052,7 @@ func (this *ParserATNSimulator) closure_(config IATNConfig, configs *ATNConfigSe
var t = p.GetTransitions()[i]
_, ok := t.(*ActionTransition)
var continueCollecting = collectPredicates && !ok
var c = this.getEpsilonTarget(config, t, continueCollecting, depth == 0, fullCtx, treatEofAsEpsilon)
var c = this.getEpsilonTarGet(config, t, continueCollecting, depth == 0, fullCtx, treatEofAsEpsilon)
if c != nil {
if !t.getIsEpsilon() && closureBusy.add(c) != c {
// avoid infinite recursion for EOF* and EOF+
@ -1103,7 +1103,7 @@ func (this *ParserATNSimulator) getRuleName(index int) string {
}
}
func (this *ParserATNSimulator) getEpsilonTarget(config IATNConfig, t ITransition, collectPredicates, inContext, fullCtx, treatEofAsEpsilon bool) *ATNConfig {
func (this *ParserATNSimulator) getEpsilonTarGet(config IATNConfig, t ITransition, collectPredicates, inContext, fullCtx, treatEofAsEpsilon bool) *ATNConfig {
switch t.getSerializationType() {
case TransitionRULE:
@ -1115,13 +1115,13 @@ func (this *ParserATNSimulator) getEpsilonTarget(config IATNConfig, t ITransitio
case TransitionACTION:
return this.actionTransition(config, t.(*ActionTransition))
case TransitionEPSILON:
return NewATNConfig4(config, t.getTarget())
return NewATNConfig4(config, t.getTarGet())
case TransitionATOM:
// EOF transitions act like epsilon transitions after the first EOF
// transition is traversed
if treatEofAsEpsilon {
if t.Matches(TokenEOF, 0, 1) {
return NewATNConfig4(config, t.getTarget())
return NewATNConfig4(config, t.getTarGet())
}
}
return nil
@ -1130,7 +1130,7 @@ func (this *ParserATNSimulator) getEpsilonTarget(config IATNConfig, t ITransitio
// transition is traversed
if treatEofAsEpsilon {
if t.Matches(TokenEOF, 0, 1) {
return NewATNConfig4(config, t.getTarget())
return NewATNConfig4(config, t.getTarGet())
}
}
return nil
@ -1139,7 +1139,7 @@ func (this *ParserATNSimulator) getEpsilonTarget(config IATNConfig, t ITransitio
// transition is traversed
if treatEofAsEpsilon {
if t.Matches(TokenEOF, 0, 1) {
return NewATNConfig4(config, t.getTarget())
return NewATNConfig4(config, t.getTarGet())
}
}
return nil
@ -1152,7 +1152,7 @@ func (this *ParserATNSimulator) actionTransition(config IATNConfig, t *ActionTra
if ParserATNSimulatorprototypedebug {
fmt.Println("ACTION edge " + strconv.Itoa(t.ruleIndex) + ":" + strconv.Itoa(t.actionIndex))
}
return NewATNConfig4(config, t.getTarget())
return NewATNConfig4(config, t.getTarGet())
}
func (this *ParserATNSimulator) precedenceTransition(config IATNConfig,
@ -1172,19 +1172,19 @@ func (this *ParserATNSimulator) precedenceTransition(config IATNConfig,
// during closure, which dramatically reduces the size of
// the config sets. It also obviates the need to test predicates
// later during conflict resolution.
var currentPosition = this._input.index()
this._input.seek(this._startIndex)
var currentPosition = this._input.Index()
this._input.Seek(this._startIndex)
var predSucceeds = pt.getPredicate().evaluate(this.parser, this._outerContext)
this._input.seek(currentPosition)
this._input.Seek(currentPosition)
if predSucceeds {
c = NewATNConfig4(config, pt.getTarget()) // no pred context
c = NewATNConfig4(config, pt.getTarGet()) // no pred context
}
} else {
newSemCtx := SemanticContextandContext(config.GetSemanticContext(), pt.getPredicate())
c = NewATNConfig3(config, pt.getTarget(), newSemCtx)
c = NewATNConfig3(config, pt.getTarGet(), newSemCtx)
}
} else {
c = NewATNConfig4(config, pt.getTarget())
c = NewATNConfig4(config, pt.getTarGet())
}
if ParserATNSimulatorprototypedebug {
fmt.Println("config from pred transition=" + c.toString())
@ -1208,19 +1208,19 @@ func (this *ParserATNSimulator) predTransition(config IATNConfig, pt *PredicateT
// during closure, which dramatically reduces the size of
// the config sets. It also obviates the need to test predicates
// later during conflict resolution.
var currentPosition = this._input.index()
this._input.seek(this._startIndex)
var currentPosition = this._input.Index()
this._input.Seek(this._startIndex)
var predSucceeds = pt.getPredicate().evaluate(this.parser, this._outerContext)
this._input.seek(currentPosition)
this._input.Seek(currentPosition)
if predSucceeds {
c = NewATNConfig4(config, pt.getTarget()) // no pred context
c = NewATNConfig4(config, pt.getTarGet()) // no pred context
}
} else {
var newSemCtx = SemanticContextandContext(config.GetSemanticContext(), pt.getPredicate())
c = NewATNConfig3(config, pt.getTarget(), newSemCtx)
c = NewATNConfig3(config, pt.getTarGet(), newSemCtx)
}
} else {
c = NewATNConfig4(config, pt.getTarget())
c = NewATNConfig4(config, pt.getTarGet())
}
if ParserATNSimulatorprototypedebug {
fmt.Println("config from pred transition=" + c.toString())
@ -1230,11 +1230,11 @@ func (this *ParserATNSimulator) predTransition(config IATNConfig, pt *PredicateT
func (this *ParserATNSimulator) ruleTransition(config IATNConfig, t *RuleTransition) *ATNConfig {
if ParserATNSimulatorprototypedebug {
fmt.Println("CALL rule " + this.getRuleName(t.getTarget().GetRuleIndex()) + ", ctx=" + config.GetContext().toString())
fmt.Println("CALL rule " + this.getRuleName(t.getTarGet().GetRuleIndex()) + ", ctx=" + config.GetContext().toString())
}
var returnState = t.followState
var newContext = SingletonPredictionContextcreate(config.GetContext(), returnState.GetStateNumber())
return NewATNConfig1(config, t.getTarget(), newContext)
var newContext = SingletonPredictionContextCreate(config.GetContext(), returnState.GetStateNumber())
return NewATNConfig1(config, t.getTarGet(), newContext)
}
func (this *ParserATNSimulator) getConflictingAlts(configs *ATNConfigSet) *BitSet {
@ -1296,7 +1296,7 @@ func (this *ParserATNSimulator) GetTokenName(t int) string {
if this.parser != nil && this.parser.GetLiteralNames() != nil {
if t >= len(this.parser.GetLiteralNames()) {
fmt.Println(strconv.Itoa(t) + " ttype out of range: " + strings.Join(this.parser.GetLiteralNames(), ","))
// fmt.Println(this.parser.getInputStream().GetTokens())
// fmt.Println(this.parser.GetInputStream().GetTokens())
} else {
return this.parser.GetLiteralNames()[t] + "<" + strconv.Itoa(t) + ">"
}
@ -1343,7 +1343,7 @@ func (this *ParserATNSimulator) dumpDeadEndConfigs(nvae *NoViableAltException) {
}
func (this *ParserATNSimulator) noViableAlt(input TokenStream, outerContext IParserRuleContext, configs *ATNConfigSet, startIndex int) *NoViableAltException {
return NewNoViableAltException(this.parser, input, input.get(startIndex), input.LT(1), configs, outerContext)
return NewNoViableAltException(this.parser, input, input.Get(startIndex), input.LT(1), configs, outerContext)
}
func (this *ParserATNSimulator) getUniqueAlt(configs *ATNConfigSet) int {

View File

@ -244,7 +244,7 @@ func (prc *ParserRuleContext) getChildCount() int {
}
}
func (prc *ParserRuleContext) getSourceInterval() *Interval {
func (prc *ParserRuleContext) GetSourceInterval() *Interval {
if prc.start == nil || prc.stop == nil {
return TreeINVALID_INTERVAL
} else {

View File

@ -51,18 +51,18 @@ var PredictionContextid = PredictionContextglobalNodeCount
// int hash = {@link MurmurHash//initialize MurmurHash.initialize}({@link
// //INITIAL_HASH})
//
// for (int i = 0 i &lt {@link //size()} i++) {
// for (int i = 0 i &lt {@link //Size()} i++) {
// hash = {@link MurmurHash//update MurmurHash.update}(hash, {@link //GetParent
// GetParent}(i))
// }
//
// for (int i = 0 i &lt {@link //size()} i++) {
// for (int i = 0 i &lt {@link //Size()} i++) {
// hash = {@link MurmurHash//update MurmurHash.update}(hash, {@link
// //getReturnState getReturnState}(i))
// }
//
// hash = {@link MurmurHash//finish MurmurHash.finish}(hash, 2// {@link
// //size()})
// //Size()})
// return hash
// }
// </pre>
@ -134,7 +134,7 @@ func (this *PredictionContextCache) add(ctx IPredictionContext) IPredictionConte
return ctx
}
func (this *PredictionContextCache) get(ctx IPredictionContext) IPredictionContext {
func (this *PredictionContextCache) Get(ctx IPredictionContext) IPredictionContext {
return this.cache[ctx]
}
@ -172,7 +172,7 @@ func (s *SingletonPredictionContext) InitSingletonPredictionContext(parent IPred
}
func SingletonPredictionContextcreate(parent IPredictionContext, returnState int) IPredictionContext {
func SingletonPredictionContextCreate(parent IPredictionContext, returnState int) IPredictionContext {
if returnState == PredictionContextEMPTY_RETURN_STATE && parent == nil {
// someone can pass in the bits of an array ctx that mean $
return PredictionContextEMPTY
@ -372,7 +372,7 @@ func predictionContextFromRuleContext(a *ATN, outerContext IRuleContext) IPredic
var state = a.states[outerContext.getInvokingState()]
var transition = state.GetTransitions()[0]
return SingletonPredictionContextcreate(parent, transition.(*RuleTransition).followState.GetStateNumber())
return SingletonPredictionContextCreate(parent, transition.(*RuleTransition).followState.GetStateNumber())
}
func calculateListsHashString(parents []PredictionContext, returnStates []int) string {
@ -454,11 +454,11 @@ func merge(a, b IPredictionContext, rootIsWildcard bool, mergeCache *DoubleDict)
// /
func mergeSingletons(a, b *SingletonPredictionContext, rootIsWildcard bool, mergeCache *DoubleDict) IPredictionContext {
if mergeCache != nil {
var previous = mergeCache.get(a.hashString(), b.hashString())
var previous = mergeCache.Get(a.hashString(), b.hashString())
if previous != nil {
return previous.(IPredictionContext)
}
previous = mergeCache.get(b.hashString(), a.hashString())
previous = mergeCache.Get(b.hashString(), a.hashString())
if previous != nil {
return previous.(IPredictionContext)
}
@ -485,7 +485,7 @@ func mergeSingletons(a, b *SingletonPredictionContext, rootIsWildcard bool, merg
// merge parents x and y, giving array node with x,y then remainders
// of those graphs. dup a, a' points at merged array
// Newjoined parent so create Newsingleton pointing to it, a'
var spc = SingletonPredictionContextcreate(parent, a.returnState)
var spc = SingletonPredictionContextCreate(parent, a.returnState)
if mergeCache != nil {
mergeCache.set(a.hashString(), b.hashString(), spc)
}
@ -614,11 +614,11 @@ func mergeRoot(a, b ISingletonPredictionContext, rootIsWildcard bool) IPredictio
// /
func mergeArrays(a, b *ArrayPredictionContext, rootIsWildcard bool, mergeCache *DoubleDict) IPredictionContext {
if mergeCache != nil {
var previous = mergeCache.get(a.hashString(), b.hashString())
var previous = mergeCache.Get(a.hashString(), b.hashString())
if previous != nil {
return previous.(IPredictionContext)
}
previous = mergeCache.get(b.hashString(), a.hashString())
previous = mergeCache.Get(b.hashString(), a.hashString())
if previous != nil {
return previous.(IPredictionContext)
}
@ -680,7 +680,7 @@ func mergeArrays(a, b *ArrayPredictionContext, rootIsWildcard bool, mergeCache *
// trim merged if we combined a few that had same stack tops
if k < len(mergedParents) { // write index < last position trim
if k == 1 { // for just one merged element, return singleton top
var a_ = SingletonPredictionContextcreate(mergedParents[0], mergedReturnStates[0])
var a_ = SingletonPredictionContextCreate(mergedParents[0], mergedReturnStates[0])
if mergeCache != nil {
mergeCache.set(a.hashString(), b.hashString(), a_)
}
@ -744,7 +744,7 @@ func getCachedPredictionContext(context IPredictionContext, contextCache *Predic
// if (existing != nil) {
// return existing
// }
// existing = contextCache.get(context)
// existing = contextCache.Get(context)
// if (existing != nil) {
// Visited[context] = existing
// return existing
@ -773,7 +773,7 @@ func getCachedPredictionContext(context IPredictionContext, contextCache *Predic
// if (parents.length == 0) {
// updated = PredictionContextEMPTY
// } else if (parents.length == 1) {
// updated = SingletonPredictionContext.create(parents[0], context.getReturnState(0))
// updated = SingletonPredictionContext.Create(parents[0], context.getReturnState(0))
// } else {
// updated = NewArrayPredictionContext(parents, context.returnStates)
// }

View File

@ -527,7 +527,7 @@ func PredictionModeGetStateToAltMap(configs *ATNConfigSet) *AltDict {
var m = NewAltDict()
for _, c := range configs.configs {
var alts = m.get(c.GetState().toString())
var alts = m.Get(c.GetState().toString())
if alts == nil {
alts = NewBitSet()
m.put(c.GetState().toString(), alts)

View File

@ -137,7 +137,7 @@ func (this *Recognizer) GetTokenType(tokenName string) int {
// Vocabulary vocabulary = getVocabulary();
//
// Synchronized (tokenTypeMapCache) {
// Map<String, Integer> result = tokenTypeMapCache.get(vocabulary);
// Map<String, Integer> result = tokenTypeMapCache.Get(vocabulary);
// if (result == null) {
// result = new HashMap<String, Integer>();
// for (int i = 0; i < GetATN().maxTokenType; i++) {
@ -163,8 +163,8 @@ func (this *Recognizer) GetTokenType(tokenName string) int {
// What is the error header, normally line/character position information?//
func (this *Recognizer) getErrorHeader(e IRecognitionException) string {
var line = e.getOffendingToken().line
var column = e.getOffendingToken().column
var line = e.GetOffendingToken().line
var column = e.GetOffendingToken().column
return "line " + strconv.Itoa(line) + ":" + strconv.Itoa(column)
}

View File

@ -111,7 +111,7 @@ func (this *RuleContext) isEmpty() bool {
// satisfy the ParseTree / SyntaxTree interface
func (this *RuleContext) getSourceInterval() *Interval {
func (this *RuleContext) GetSourceInterval() *Interval {
return TreeINVALID_INTERVAL
}

View File

@ -5,6 +5,8 @@ import (
"strings"
)
type TokenSourceCharStreamPair struct {
tokenSource TokenSource
charStream CharStream
@ -66,11 +68,15 @@ func (this *Token) setText(s string) {
this._text = s
}
func (this *Token) GetTokenIndex() int {
return this.tokenIndex
}
func (this *Token) GetTokenSource() TokenSource {
return this.source.tokenSource
}
func (this *Token) getInputStream() CharStream {
func (this *Token) GetInputStream() CharStream {
return this.source.charStream
}
@ -110,7 +116,7 @@ func NewCommonToken(source *TokenSourceCharStreamPair, tokenType, channel, start
// the {@link Pair} stored in {@link //source}. Otherwise, {@link //text} will
// be assigned the result of calling {@link //GetText}, and {@link //source}
// will be constructed from the result of {@link Token//GetTokenSource} and
// {@link Token//getInputStream}.</p>
// {@link Token//GetInputStream}.</p>
//
// @param oldToken The token to copy.
//
@ -128,11 +134,11 @@ func (this *CommonToken) text() string {
if this._text != "" {
return this._text
}
var input = this.getInputStream()
var input = this.GetInputStream()
if input == nil {
return ""
}
var n = input.size()
var n = input.Size()
if this.start < n && this.stop < n {
return input.GetTextFromInterval(NewInterval(this.start, this.stop))
} else {

View File

@ -6,7 +6,7 @@ type TokenSource interface {
more()
getLine() int
getCharPositionInLine() int
getInputStream() CharStream
GetInputStream() CharStream
GetSourceName() string
setTokenFactory(factory TokenFactory)
GetTokenFactory() TokenFactory

View File

@ -5,11 +5,11 @@ type TokenStream interface {
LT(k int) *Token
get(index int) *Token
Get(index int) *Token
GetTokenSource() TokenSource
setTokenSource(TokenSource)
SetTokenSource(TokenSource)
GetText() string
GetAllText() string
GetTextFromInterval(*Interval) string
GetTextFromRuleContext(IRuleContext) string
GetTextFromTokens(*Token, *Token) string

View File

@ -15,8 +15,8 @@ import (
// ATN transitions.</p>
type ITransition interface {
getTarget() IATNState
setTarget(IATNState)
getTarGet() IATNState
setTarGet(IATNState)
getIsEpsilon() bool
getLabel() *IntervalSet
getSerializationType() int
@ -49,11 +49,11 @@ func (t *Transition) InitTransition(target IATNState) {
t.label = nil
}
func (t *Transition) getTarget() IATNState {
func (t *Transition) getTarGet() IATNState {
return t.target
}
func (t *Transition) setTarget(s IATNState) {
func (t *Transition) setTarGet(s IATNState) {
t.target = s
}

View File

@ -20,7 +20,7 @@ type Tree interface {
type SyntaxTree interface {
Tree
getSourceInterval() *Interval
GetSourceInterval() *Interval
}
type ParseTree interface {
@ -131,7 +131,7 @@ func (this *TerminalNodeImpl) getPayload() interface{} {
return this.symbol
}
func (this *TerminalNodeImpl) getSourceInterval() *Interval {
func (this *TerminalNodeImpl) GetSourceInterval() *Interval {
if this.symbol == nil {
return TreeINVALID_INTERVAL
}

View File

@ -252,7 +252,7 @@ func NewAltDict() *AltDict {
return d
}
func (this *AltDict) get(key string) interface{} {
func (this *AltDict) Get(key string) interface{} {
key = "k-" + key
return this.data[key]
}
@ -282,7 +282,7 @@ func NewDoubleDict() *DoubleDict {
return dd
}
func (this *DoubleDict) get(a string, b string) interface{} {
func (this *DoubleDict) Get(a string, b string) interface{} {
var d = this.data[a]
if d == nil {

View File

@ -52,7 +52,7 @@ package org.antlr.v4.runtime;
* Note: lexer rules which use the {@code ->skip} lexer command or call
* {@link Lexer#skip} do not produce tokens at all, so input text matched by
* such a rule will not be available as part of the token stream, regardless of
* channel.</p>
* channel.</p>we
*/
public class CommonTokenStream extends BufferedTokenStream {
/**

View File

@ -101,7 +101,7 @@ var <superClass> = require('./<superClass>').<superClass> // TODO
var parserATN = <atn>
var deserializer = antlr4.NewATNDeserializer(nil)
var deserializedATN = deserializer.Deserialize( []rune( parserATN ) )
var deserializedATN = deserializer.DeserializeFromUInt16( parserATN )
var literalNames = []string{ <parser.literalNames:{t | <t>}; null="\"\"", separator=", ", wrap, anchor> }
var symbolicNames = []string{ <parser.symbolicNames:{t | <t>}; null="\"\"", separator=", ", wrap, anchor> }
@ -151,7 +151,7 @@ const (
func (p *<parser.name>) Sempred(localctx, ruleIndex int, predIndex int) {
switch ruleIndex {
<parser.sempredFuncs.values:{f | case <f.ruleIndex>:
return p.<f.name>_Sempred(localctx, predIndex);}; separator="\n">
return p.<f.name; format="cap">_Sempred(localctx, predIndex);}; separator="\n">
default:
panic("No predicate with index:" + ruleIndex)
}
@ -168,7 +168,7 @@ func (l *<lexer.name>) Action(localctx, ruleIndex int, actionIndex int) {
switch ruleIndex {
<recog.actionFuncs.values:{f|
case <f.ruleIndex>:
p.<f.name>_Action(localctx, actionIndex)
p.<f.name; format="cap">_Action(localctx, actionIndex)
}; separator="\n">
default:
panic("No registered action for:" + ruleIndex)
@ -197,7 +197,7 @@ func (l *<lexer.name>) Sempred(localctx, ruleIndex, predIndex) {
*/
RuleActionFunction(r, actions) ::= <<
func (l *<lexer.name>) <r.name>_Action(localctx , actionIndex) {
func (l *<lexer.name>) <r.name; format="cap">_Action(localctx , actionIndex) {
switch actionIndex {
<actions:{index|
case <index>:
@ -213,7 +213,7 @@ case <index>:
* overriding implementation impossible to maintain.
*/
RuleSempredFunction(r, actions) ::= <<
func (s *<if(parser)><parser.name><else><lexer.name><endif>) <r.name>_Sempred(localctx, predIndex int) {
func (s *<if(parser)><parser.name><else><lexer.name><endif>) <r.name; format="cap">_Sempred(localctx, predIndex int) {
switch predIndex {
<actions:{index| case <index>:
return <actions.(index)>;}; separator="\n">
@ -232,7 +232,7 @@ RuleFunction(currentRule,args,code,locals,ruleCtx,altLabelCtxs,namedActions,fina
<altLabelCtxs:{l | <altLabelCtxs.(l)>}; separator="\n">
func (p *<parser.name>) <currentRule.name>(<currentRule.args:{a | <a.name>}; separator=", ">) *<currentRule.ctxType> {
func (p *<parser.name>) <currentRule.name; format="cap">(<currentRule.args:{a | <a.name>}; separator=", ">) *<currentRule.ctxType> {
localctx := New<currentRule.ctxType>(p, p.GetParserRuleContext(), p.GetState()<currentRule.args:{a | , <a.name>}>)
p.EnterRule(localctx, <currentRule.startState>, <parser.name>RULE_<currentRule.name>)
@ -276,7 +276,7 @@ LeftRecursiveRuleFunction(currentRule,args,code,locals,ruleCtx,altLabelCtxs,
<ruleCtx>
<altLabelCtxs:{l | <altLabelCtxs.(l)>}; separator="\n">
func (p *<parser.name>) <currentRule.name>(_p<if(currentRule.args)>, <args:{a | , <a>}><endif>) *<currentRule.ctxType> {
func (p *<parser.name>) <currentRule.name; format="cap">(_p<if(currentRule.args)>, <args:{a | , <a>}><endif>) *<currentRule.ctxType> {
_parentctx := p.GetParent().(IParserRuleContext)
_parentState := p.GetState()
@ -459,7 +459,7 @@ testShiftInRange(shiftAmount) ::= <<
// produces smaller bytecode only when bits.ttypes contains more than two items
bitsetBitfieldComparison(s, bits) ::= <%
(<testShiftInRange({<offsetShiftVar(s.varName, bits.shift)>})> && ((1 \<\< <offsetShiftVar(s.varName, bits.shift)>) & (<bits.ttypes:{ttype | (1 \<\< <offsetShiftType(ttype, bits.shift)>)}; separator=" | ">)) != 0)
(<testShiftInRange({<offsetShiftVar(s.varName, bits.shift)>})> && ((1 \<\< uint(<offsetShiftVar(s.varName, bits.shift)>)) & (<bits.ttypes:{ttype | (1 \<\< <offsetShiftType(ttype, bits.shift)>)}; separator=" | ">)) != 0)
%>
isZero ::= [
@ -486,7 +486,7 @@ cases(ttypes) ::= <<
InvokeRule(r, argExprsChunks) ::= <<
p.SetState(<r.stateNumber>)
<if(r.labels)><r.labels:{l | <labelref(l)> = }><endif>p.<r.name>(<if(r.ast.options.p)><r.ast.options.p><if(argExprsChunks)>,<endif><endif><argExprsChunks>)
<if(r.labels)><r.labels:{l | <labelref(l)> = }><endif>p.<r.name; format="cap">(<if(r.ast.options.p)><r.ast.options.p><if(argExprsChunks)>,<endif><endif><argExprsChunks>)
>>
MatchToken(m) ::= <<
@ -535,13 +535,13 @@ catch (<catchArg>) {
// lexer actions are not associated with model objects
LexerSkipCommand() ::= "p.skip()"
LexerMoreCommand() ::= "p.more()"
LexerPopModeCommand() ::= "p.popMode()"
LexerTypeCommand(arg) ::= "p._type = <arg>"
LexerChannelCommand(arg) ::= "p._channel = <arg>"
LexerModeCommand(arg) ::= "p._mode = <arg>"
LexerPushModeCommand(arg) ::= "p.pushMode(<arg>)"
LexerSkipCommand() ::= "p.Skip()"
LexerMoreCommand() ::= "p.More()"
LexerPopModeCommand() ::= "p.PopMode()"
LexerTypeCommand(arg) ::= "p.SetType(<arg>)"
LexerChannelCommand(arg) ::= "p.SetChannel(<arg>)"
LexerModeCommand(arg) ::= "p.SetMode(<arg>)"
LexerPushModeCommand(arg) ::= "p.PushMode(<arg>)"
ActionText(t) ::= "<t.text>"
ActionTemplate(t) ::= "<t.st>"
@ -578,8 +578,8 @@ ThisRulePropertyRef_text(r) ::= "p.GetTokenStream().GetTextFromInterval(NewInte
ThisRulePropertyRef_ctx(r) ::= "localctx"
ThisRulePropertyRef_parser(r) ::= "p"
NonLocalAttrRef(s) ::= "getInvokingContext(<s.ruleIndex>).<s.name>"
SetNonLocalAttr(s, rhsChunks) ::= "getInvokingContext(<s.ruleIndex>).<s.name> = <rhsChunks>"
NonLocalAttrRef(s) ::= "GetInvokingContext(<s.ruleIndex>).<s.name>"
SetNonLocalAttr(s, rhsChunks) ::= "GetInvokingContext(<s.ruleIndex>).<s.name> = <rhsChunks>"
AddToLabelList(a) ::= "<ctx(a.label)>.<a.listName> = append(<ctx(a.label)>.<a.listName>, push(<labelref(a.label)>)"
@ -590,7 +590,7 @@ RuleContextDecl(r) ::= "p.<r.name> = nil // reflect.TypeOf((*<r.ctxName>)(nil)).
RuleContextListDecl(rdecl) ::= "p.<rdecl.name> = [] // of <rdecl.ctxName>s"
ContextTokenGetterDecl(t) ::= <<
<t.name>() interface{} {
<t.name; format="cap">() interface{} {
return s.GetToken(<parser.name><t.name>, 0)
}
>>
@ -602,7 +602,7 @@ def <t.name>_list(self):
>>
ContextTokenListIndexedGetterDecl(t) ::= <<
<t.name>(i int) interface{} {
<t.name; format="cap">(i int) interface{} {
if i \< 0 {
return s.GetTokens(<parser.name><t.name>)
} else {
@ -613,20 +613,20 @@ ContextTokenListIndexedGetterDecl(t) ::= <<
>>
ContextRuleGetterDecl(r) ::= <<
<r.name>() interface{} {
<r.name; format="cap">() interface{} {
return s.GetTypedRuleContext(reflect.TypeOf((*<r.ctxName>)(nil)).Elem(),0)
}
>>
// should never be called
ContextRuleListGetterDecl(r) ::= <<
func <r.name>_list(self):
func <r.name; format="cap">_list(self):
return s.GetTypedRuleContexts(reflect.TypeOf((*<r.ctxName>)(nil)).Elem())
>>
ContextRuleListIndexedGetterDecl(r) ::= <<
<r.name>(i int) interface{} {
<r.name; format="cap">(i int) interface{} {
if i \< 0 {
return s.GetTypedRuleContexts(reflect.TypeOf((*<r.ctxName>)(nil)).Elem())
} else {
@ -721,7 +721,7 @@ func (s *<struct.name>) <if(method.isEnter)>Enter<else>Exit<endif>Rule(listener
VisitorDispatchMethod(method) ::= <<
func (s *<struct.name>) accept(Visitor antlr4.ParseTreeVisitor) interface{} {
func (s *<struct.name>) Accept(Visitor antlr4.ParseTreeVisitor) interface{} {
switch t := listener.(type) {
case *<parser.grammarName>Listener:
@ -799,7 +799,7 @@ Lexer(lexer, atn, actionFuncs, sempredFuncs, superClass) ::= <<
var serializedLexerAtn = <atn>
var lexerDeserializer = antlr4.NewATNDeserializer(nil)
var lexerAtn = lexerDeserializer.Deserialize( []rune( serializedLexerAtn ) )
var lexerAtn = lexerDeserializer.DeserializeFromUInt16( serializedLexerAtn )
var lexerModeNames = []string{ <lexer.modes:{m| "<m>"}; separator=", ", wrap, anchor> }
var lexerLiteralNames = []string{ <lexer.literalNames:{t | <t>}; null="\"\"", separator=", ", wrap, anchor> }
@ -853,7 +853,7 @@ const (
SerializedATN(model) ::= <<
<! only one segment, can be inlined !>
""
[]uint16{ <model.serialized; wrap={<\n> }> }
>>

View File

@ -53,105 +53,15 @@ public class GoTarget extends Target {
badWords.add("parserRule");
}
// /**
// * {@inheritDoc}
// * <p/>
// * For Java, this is the translation {@code 'a\n"'} &rarr; {@code "a\n\""}.
// * Expect single quotes around the incoming literal. Just flip the quotes
// * and replace double quotes with {@code \"}.
// * <p/>
// * Note that we have decided to allow people to use '\"' without penalty, so
// * we must build the target string in a loop as {@link String#replace}
// * cannot handle both {@code \"} and {@code "} without a lot of messing
// * around.
// */
// @Override
// public String getTargetStringLiteralFromANTLRStringLiteral(
// CodeGenerator generator,
// String literal, boolean addQuotes)
// {
// System.out.println(literal);
// System.out.println("GO TARGET!");
//
// StringBuilder sb = new StringBuilder();
// String is = literal;
//
// if ( addQuotes ) sb.append('"');
//
// for (int i = 1; i < is.length() -1; i++) {
// if (is.charAt(i) == '\\') {
// // Anything escaped is what it is! We assume that
// // people know how to escape characters correctly. However
// // we catch anything that does not need an escape in Java (which
// // is what the default implementation is dealing with and remove
// // the escape. The C target does this for instance.
// //
// switch (is.charAt(i+1)) {
// // Pass through any escapes that Java also needs
// //
// case '"':
// case 'n':
// case 'r':
// case 't':
// case 'b':
// case 'f':
// case '\\':
// // Pass the escape through
// sb.append('\\');
// break;
//
// case 'u': // Assume unnnn
// // Pass the escape through as double \\
// // so that Java leaves as \u0000 string not char
// sb.append('\\');
// sb.append('\\');
// break;
//
// default:
// // Remove the escape by virtue of not adding it here
// // Thus \' becomes ' and so on
// break;
// }
//
// // Go past the \ character
// i++;
// } else {
// // Characters that don't need \ in ANTLR 'strings' but do in Java
// if (is.charAt(i) == '"') {
// // We need to escape " in Java
// sb.append('\\');
// }
// }
// // Add in the next character, which may have been escaped
// sb.append(is.charAt(i));
// }
//
// if ( addQuotes ) sb.append('"');
//
// String s = sb.toString();
// System.out.println("AfTER: " + s);
// return s;
// }
private final static String ZEROES = "0000";
// @Override
// public String encodeIntAsCharEscape(int v) {
// if (v < Character.MIN_VALUE || v > Character.MAX_VALUE) {
// throw new IllegalArgumentException(String.format("Cannot encode the specified value: %d", v));
// }
//
// if (v >= 0 && v < targetCharValueEscape.length && targetCharValueEscape[v] != null) {
// return targetCharValueEscape[v];
// }
//
// if (v >= 0x20 && v < 127) {
// return String.valueOf((char)v);
// }
//
// String hex = Integer.toHexString(v|0x10000).substring(1,5);
// String h2 = "\\u"+hex;
//
// return h2;
// }
@Override
public String encodeIntAsCharEscape(int v) {
// we encode as uint16 in hex format
String s = Integer.toString(v, 16);
String intAsString = s.length() <= 4 ? ZEROES.substring(s.length()) + s : s;
return "0x" + intAsString + ",";
}
@Override
public int getSerializedATNSegmentLimit() {