[git-p4: depot-paths = "//depot/code/antlr4/main/": change = 8637]
This commit is contained in:
parrt 2011-06-14 16:29:02 -08:00
parent 55f384237b
commit f6e54b3327
33 changed files with 19899 additions and 0 deletions

View File

@ -0,0 +1,14 @@
parser grammar HTMLParser;
options { tokenVocab=HTMLLexer; }
file : ( TAG_START (starttag | endtag) | TEXT
{System.out.println("TEXT "+$TEXT);} )+ EOF ;
starttag : ID attr* TAG_STOP ;
attr : ID (EQ (ID|STRING))? ;
endtag
: END_TAG {System.out.println("END tag "+$END_TAG);}
;

204
tool/playground/JavaLexer.g Normal file
View File

@ -0,0 +1,204 @@
lexer grammar JavaLexer;
@members {
protected boolean enumIsKeyword = true;
protected boolean assertIsKeyword = true;
}
T__25 : 'package' ;
T__26 : ';' ;
T__27 : 'import' ;
T__28 : 'static' ;
T__29 : '.' ;
T__30 : '*' ;
T__31 : 'public' ;
T__32 : 'protected' ;
T__33 : 'private' ;
T__34 : 'abstract' ;
T__35 : 'final' ;
T__36 : 'strictfp' ;
T__37 : 'class' ;
T__38 : 'extends' ;
T__39 : 'implements' ;
T__40 : '<' ;
T__41 : ',' ;
T__42 : '>' ;
T__43 : '&' ;
T__44 : '{' ;
T__45 : '}' ;
T__46 : 'interface' ;
T__47 : 'void' ;
T__48 : '[' ;
T__49 : ']' ;
T__50 : 'throws' ;
T__51 : '=' ;
T__52 : 'native' ;
T__53 : 'synchronized' ;
T__54 : 'transient' ;
T__55 : 'volatile' ;
T__56 : 'boolean' ;
T__57 : 'char' ;
T__58 : 'byte' ;
T__59 : 'short' ;
T__60 : 'int' ;
T__61 : 'long' ;
T__62 : 'float' ;
T__63 : 'double' ;
T__64 : '?' ;
T__65 : 'super' ;
T__66 : '(' ;
T__67 : ')' ;
T__68 : '...' ;
T__69 : 'this' ;
T__70 : 'null' ;
T__71 : 'true' ;
T__72 : 'false' ;
T__73 : '@' ;
T__74 : 'default' ;
T__75 : ':' ;
T__76 : 'if' ;
T__77 : 'else' ;
T__78 : 'for' ;
T__79 : 'while' ;
T__80 : 'do' ;
T__81 : 'try' ;
T__82 : 'finally' ;
T__83 : 'switch' ;
T__84 : 'return' ;
T__85 : 'throw' ;
T__86 : 'break' ;
T__87 : 'continue' ;
T__88 : 'catch' ;
T__89 : 'case' ;
T__90 : '+=' ;
T__91 : '-=' ;
T__92 : '*=' ;
T__93 : '/=' ;
T__94 : '&=' ;
T__95 : '|=' ;
T__96 : '^=' ;
T__97 : '%=' ;
T__98 : '||' ;
T__99 : '&&' ;
T__100 : '|' ;
T__101 : '^' ;
T__102 : '==' ;
T__103 : '!=' ;
T__104 : 'instanceof' ;
T__105 : '+' ;
T__106 : '-' ;
T__107 : '/' ;
T__108 : '%' ;
T__109 : '++' ;
T__110 : '--' ;
T__111 : '~' ;
T__112 : '!' ;
T__113 : 'new' ;
// $ANTLR src "JavaCombined.g" 911
HexLiteral : '0' ('x'|'X') HexDigit+ IntegerTypeSuffix? ;// $ANTLR src "JavaCombined.g" 913
DecimalLiteral : ('0' | '1'..'9' '0'..'9'*) IntegerTypeSuffix? ;// $ANTLR src "JavaCombined.g" 915
OctalLiteral : '0' ('0'..'7')+ IntegerTypeSuffix? ;// $ANTLR src "JavaCombined.g" 917
fragment
HexDigit : ('0'..'9'|'a'..'f'|'A'..'F') ;// $ANTLR src "JavaCombined.g" 920
fragment
IntegerTypeSuffix : ('l'|'L') ;// $ANTLR src "JavaCombined.g" 923
FloatingPointLiteral
: ('0'..'9')+ '.' ('0'..'9')* Exponent? FloatTypeSuffix?
| '.' ('0'..'9')+ Exponent? FloatTypeSuffix?
| ('0'..'9')+ Exponent FloatTypeSuffix?
| ('0'..'9')+ FloatTypeSuffix
| ('0x' | '0X') (HexDigit )*
('.' (HexDigit)*)?
( 'p' | 'P' )
( '+' | '-' )?
( '0' .. '9' )+
FloatTypeSuffix?
;// $ANTLR src "JavaCombined.g" 930
fragment
Exponent : ('e'|'E') ('+'|'-')? ('0'..'9')+ ;// $ANTLR src "JavaCombined.g" 933
fragment
FloatTypeSuffix : ('f'|'F'|'d'|'D') ;// $ANTLR src "JavaCombined.g" 936
CharacterLiteral
: '\'' ( EscapeSequence | ~('\''|'\\') ) '\''
;// $ANTLR src "JavaCombined.g" 940
StringLiteral
: '"' ( EscapeSequence | ~('\\'|'"') )* '"'
;// $ANTLR src "JavaCombined.g" 944
fragment
EscapeSequence
: '\\' ('b'|'t'|'n'|'f'|'r'|'\"'|'\''|'\\')
| UnicodeEscape
| OctalEscape
;// $ANTLR src "JavaCombined.g" 951
fragment
OctalEscape
: '\\' ('0'..'3') ('0'..'7') ('0'..'7')
| '\\' ('0'..'7') ('0'..'7')
| '\\' ('0'..'7')
;// $ANTLR src "JavaCombined.g" 958
fragment
UnicodeEscape
: '\\' 'u' HexDigit HexDigit HexDigit HexDigit
;// $ANTLR src "JavaCombined.g" 963
ENUM: 'enum' {if (!enumIsKeyword) state.type=Identifier;}
;// $ANTLR src "JavaCombined.g" 966
ASSERT
: 'assert' {if (!assertIsKeyword) state.type=Identifier;}
;// $ANTLR src "JavaCombined.g" 970
Identifier
: Letter (Letter|JavaIDDigit)*
;// $ANTLR src "JavaCombined.g" 974
/**I found this char range in JavaCC's grammar, but Letter and Digit overlap.
Still works, but...
*/
fragment
Letter
: '\u0024' |
'\u0041'..'\u005a' |
'\u005f' |
'\u0061'..'\u007a' |
'\u00c0'..'\u00d6' |
'\u00d8'..'\u00f6' |
'\u00f8'..'\u00ff' |
'\u0100'..'\u1fff' |
'\u3040'..'\u318f' |
'\u3300'..'\u337f' |
'\u3400'..'\u3d2d' |
'\u4e00'..'\u9fff' |
'\uf900'..'\ufaff'
;// $ANTLR src "JavaCombined.g" 994
fragment
JavaIDDigit
: '\u0030'..'\u0039' |
'\u0660'..'\u0669' |
'\u06f0'..'\u06f9' |
'\u0966'..'\u096f' |
'\u09e6'..'\u09ef' |
'\u0a66'..'\u0a6f' |
'\u0ae6'..'\u0aef' |
'\u0b66'..'\u0b6f' |
'\u0be7'..'\u0bef' |
'\u0c66'..'\u0c6f' |
'\u0ce6'..'\u0cef' |
'\u0d66'..'\u0d6f' |
'\u0e50'..'\u0e59' |
'\u0ed0'..'\u0ed9' |
'\u1040'..'\u1049'
;// $ANTLR src "JavaCombined.g" 1013
WS : (' '|'\r'|'\t'|'\u000C'|'\n')+ {skip();}
;
LINE_COMMENT
: '//' ~('\n'|'\r')* '\r'? '\n' {skip();}
;
COMMENT_START
: '/*' {pushMode(COMMENT_MODE); more();}
;
mode COMMENT_MODE;
COMMENT : '*/' {skip(); popMode();} ;
COMMENT_INSIDE : . {more();} ;

View File

@ -0,0 +1,612 @@
// $ANTLR ANTLRVersion> JavaLexer.java generatedTimestamp>
import org.antlr.v4.runtime.Lexer;
import org.antlr.v4.runtime.CharStream;
import org.antlr.v4.runtime.LexerSharedState;
import org.antlr.v4.runtime.Token;
import org.antlr.v4.runtime.TokenStream;
import org.antlr.v4.runtime.*;
import org.antlr.v4.runtime.atn.*;
import org.antlr.v4.runtime.dfa.DFA;
import org.antlr.v4.runtime.misc.*;
import org.antlr.runtime.*;
public class JavaLexer extends Lexer {
public static final int
EOR=1, T__25=4, T__26=5, T__27=6, T__28=7, T__29=8, T__30=9, T__31=10,
T__32=11, T__33=12, T__34=13, T__35=14, T__36=15, T__37=16, T__38=17,
T__39=18, T__40=19, T__41=20, T__42=21, T__43=22, T__44=23, T__45=24,
T__46=25, T__47=26, T__48=27, T__49=28, T__50=29, T__51=30, T__52=31,
T__53=32, T__54=33, T__55=34, T__56=35, T__57=36, T__58=37, T__59=38,
T__60=39, T__61=40, T__62=41, T__63=42, T__64=43, T__65=44, T__66=45,
T__67=46, T__68=47, T__69=48, T__70=49, T__71=50, T__72=51, T__73=52,
T__74=53, T__75=54, T__76=55, T__77=56, T__78=57, T__79=58, T__80=59,
T__81=60, T__82=61, T__83=62, T__84=63, T__85=64, T__86=65, T__87=66,
T__88=67, T__89=68, T__90=69, T__91=70, T__92=71, T__93=72, T__94=73,
T__95=74, T__96=75, T__97=76, T__98=77, T__99=78, T__100=79, T__101=80,
T__102=81, T__103=82, T__104=83, T__105=84, T__106=85, T__107=86,
T__108=87, T__109=88, T__110=89, T__111=90, T__112=91, T__113=92,
HexLiteral=93, DecimalLiteral=94, OctalLiteral=95, FloatingPointLiteral=96,
CharacterLiteral=97, StringLiteral=98, ENUM=99, ASSERT=100, Identifier=101,
WS=102, LINE_COMMENT=103, COMMENT_START=104, COMMENT=105, COMMENT_INSIDE=106;
public static final int DEFAULT_MODE = 0;
public static final int COMMENT_MODE = 1;
public static final String[] tokenNames = {
"<INVALID>", "<INVALID>", "<INVALID>",
"EOR", "'package'", "';'", "'import'", "'static'", "'.'", "'*'",
"'public'", "'protected'", "'private'", "'abstract'", "'final'",
"'strictfp'", "'class'", "'extends'", "'implements'", "'<'", "','",
"'>'", "'&'", "'{'", "'}'", "'interface'", "'void'", "'['", "']'",
"'throws'", "'='", "'native'", "'synchronized'", "'transient'",
"'volatile'", "'boolean'", "'char'", "'byte'", "'short'", "'int'",
"'long'", "'float'", "'double'", "'?'", "'super'", "'('", "')'",
"'...'", "'this'", "'null'", "'true'", "'false'", "'@'", "'default'",
"':'", "'if'", "'else'", "'for'", "'while'", "'do'", "'try'", "'finally'",
"'switch'", "'return'", "'throw'", "'break'", "'continue'", "'catch'",
"'case'", "'+='", "'-='", "'*='", "'/='", "'&='", "'|='", "'^='",
"'%='", "'||'", "'&&'", "'|'", "'^'", "'=='", "'!='", "'instanceof'",
"'+'", "'-'", "'/'", "'%'", "'++'", "'--'", "'~'", "'!'", "'new'",
"HexLiteral", "DecimalLiteral", "OctalLiteral", "FloatingPointLiteral",
"CharacterLiteral", "StringLiteral", "ENUM", "ASSERT", "Identifier",
"WS", "LINE_COMMENT", "COMMENT_START", "COMMENT", "COMMENT_INSIDE"
};
public static final String[] ruleNames = {
"<INVALID>",
"T__25", "T__26", "T__27", "T__28", "T__29", "T__30", "T__31", "T__32",
"T__33", "T__34", "T__35", "T__36", "T__37", "T__38", "T__39", "T__40",
"T__41", "T__42", "T__43", "T__44", "T__45", "T__46", "T__47", "T__48",
"T__49", "T__50", "T__51", "T__52", "T__53", "T__54", "T__55", "T__56",
"T__57", "T__58", "T__59", "T__60", "T__61", "T__62", "T__63", "T__64",
"T__65", "T__66", "T__67", "T__68", "T__69", "T__70", "T__71", "T__72",
"T__73", "T__74", "T__75", "T__76", "T__77", "T__78", "T__79", "T__80",
"T__81", "T__82", "T__83", "T__84", "T__85", "T__86", "T__87", "T__88",
"T__89", "T__90", "T__91", "T__92", "T__93", "T__94", "T__95", "T__96",
"T__97", "T__98", "T__99", "T__100", "T__101", "T__102", "T__103",
"T__104", "T__105", "T__106", "T__107", "T__108", "T__109", "T__110",
"T__111", "T__112", "T__113", "HexLiteral", "DecimalLiteral", "OctalLiteral",
"HexDigit", "IntegerTypeSuffix", "FloatingPointLiteral", "Exponent",
"FloatTypeSuffix", "CharacterLiteral", "StringLiteral", "EscapeSequence",
"OctalEscape", "UnicodeEscape", "ENUM", "ASSERT", "Identifier",
"Letter", "JavaIDDigit", "WS", "LINE_COMMENT", "COMMENT_START",
"COMMENT", "COMMENT_INSIDE"
};
protected boolean enumIsKeyword = true;
protected boolean assertIsKeyword = true;
public JavaLexer(CharStream input) {
this(input, new LexerSharedState());
}
public JavaLexer(CharStream input, LexerSharedState state) {
super(input,state);
_interp = new LexerInterpreter(this,_ATN);
}
public String getGrammarFileName() { return "JavaLexer.java"; }
@Override
public String[] getTokenNames() { return tokenNames; }
@Override
public String[] getRuleNames() { return ruleNames; }
@Override
public ATN getATN() { return _ATN; }
public void action(int ruleIndex, int actionIndex) {
switch ( actionIndex ) {
case 1 : if (!enumIsKeyword) state.type=Identifier; break;
case 2 : if (!assertIsKeyword) state.type=Identifier; break;
case 3 : skip(); break;
case 4 : skip(); break;
case 5 : pushMode(COMMENT_MODE); more(); break;
case 6 : skip(); popMode(); break;
case 7 : more(); break;
}
}
public static final String _serializedATN =
"\030\155\u041e\06\00\06\00\02\01\07\01\02\02\07\02\02\03\07\03\02\04"+
"\07\04\02\05\07\05\02\06\07\06\02\07\07\07\02\010\07\010\02\011\07"+
"\011\02\012\07\012\02\013\07\013\02\014\07\014\02\015\07\015\02\016"+
"\07\016\02\017\07\017\02\020\07\020\02\021\07\021\02\022\07\022\02"+
"\023\07\023\02\024\07\024\02\025\07\025\02\026\07\026\02\027\07\027"+
"\02\030\07\030\02\031\07\031\02\032\07\032\02\033\07\033\02\034\07"+
"\034\02\035\07\035\02\036\07\036\02\037\07\037\02\040\07\040\02\041"+
"\07\041\02\042\07\042\02\043\07\043\02\044\07\044\02\045\07\045\02"+
"\046\07\046\02\047\07\047\02\050\07\050\02\051\07\051\02\052\07\052"+
"\02\053\07\053\02\054\07\054\02\055\07\055\02\056\07\056\02\057\07"+
"\057\02\060\07\060\02\061\07\061\02\062\07\062\02\063\07\063\02\064"+
"\07\064\02\065\07\065\02\066\07\066\02\067\07\067\02\070\07\070\02"+
"\071\07\071\02\072\07\072\02\073\07\073\02\074\07\074\02\075\07\075"+
"\02\076\07\076\02\077\07\077\02\100\07\100\02\101\07\101\02\102\07"+
"\102\02\103\07\103\02\104\07\104\02\105\07\105\02\106\07\106\02\107"+
"\07\107\02\110\07\110\02\111\07\111\02\112\07\112\02\113\07\113\02"+
"\114\07\114\02\115\07\115\02\116\07\116\02\117\07\117\02\120\07\120"+
"\02\121\07\121\02\122\07\122\02\123\07\123\02\124\07\124\02\125\07"+
"\125\02\126\07\126\02\127\07\127\02\130\07\130\02\131\07\131\02\132"+
"\07\132\02\133\07\133\02\134\07\134\02\135\07\135\02\136\07\136\02"+
"\137\07\137\02\140\07\140\02\141\07\141\02\142\07\142\02\143\07\143"+
"\02\144\07\144\02\145\07\145\02\146\07\146\02\147\07\147\02\150\07"+
"\150\02\151\07\151\02\152\07\152\02\153\07\153\02\154\07\154\02\155"+
"\07\155\02\156\07\156\02\157\07\157\02\160\07\160\01\01\01\01\01\01"+
"\01\01\01\01\01\01\01\01\01\01\01\02\01\02\01\03\01\03\01\03\01\03"+
"\01\03\01\03\01\03\01\04\01\04\01\04\01\04\01\04\01\04\01\04\01\05"+
"\01\05\01\06\01\06\01\07\01\07\01\07\01\07\01\07\01\07\01\07\01\010"+
"\01\010\01\010\01\010\01\010\01\010\01\010\01\010\01\010\01\010\01"+
"\011\01\011\01\011\01\011\01\011\01\011\01\011\01\011\01\012\01\012"+
"\01\012\01\012\01\012\01\012\01\012\01\012\01\012\01\013\01\013\01"+
"\013\01\013\01\013\01\013\01\014\01\014\01\014\01\014\01\014\01\014"+
"\01\014\01\014\01\014\01\015\01\015\01\015\01\015\01\015\01\015\01"+
"\016\01\016\01\016\01\016\01\016\01\016\01\016\01\016\01\017\01\017"+
"\01\017\01\017\01\017\01\017\01\017\01\017\01\017\01\017\01\017\01"+
"\020\01\020\01\021\01\021\01\022\01\022\01\023\01\023\01\024\01\024"+
"\01\025\01\025\01\026\01\026\01\026\01\026\01\026\01\026\01\026\01"+
"\026\01\026\01\026\01\027\01\027\01\027\01\027\01\027\01\030\01\030"+
"\01\031\01\031\01\032\01\032\01\032\01\032\01\032\01\032\01\032\01"+
"\033\01\033\01\034\01\034\01\034\01\034\01\034\01\034\01\034\01\035"+
"\01\035\01\035\01\035\01\035\01\035\01\035\01\035\01\035\01\035\01"+
"\035\01\035\01\035\01\036\01\036\01\036\01\036\01\036\01\036\01\036"+
"\01\036\01\036\01\036\01\037\01\037\01\037\01\037\01\037\01\037\01"+
"\037\01\037\01\037\01\040\01\040\01\040\01\040\01\040\01\040\01\040"+
"\01\040\01\041\01\041\01\041\01\041\01\041\01\042\01\042\01\042\01"+
"\042\01\042\01\043\01\043\01\043\01\043\01\043\01\043\01\044\01\044"+
"\01\044\01\044\01\045\01\045\01\045\01\045\01\045\01\046\01\046\01"+
"\046\01\046\01\046\01\046\01\047\01\047\01\047\01\047\01\047\01\047"+
"\01\047\01\050\01\050\01\051\01\051\01\051\01\051\01\051\01\051\01"+
"\052\01\052\01\053\01\053\01\054\01\054\01\054\01\054\01\055\01\055"+
"\01\055\01\055\01\055\01\056\01\056\01\056\01\056\01\056\01\057\01"+
"\057\01\057\01\057\01\057\01\060\01\060\01\060\01\060\01\060\01\060"+
"\01\061\01\061\01\062\01\062\01\062\01\062\01\062\01\062\01\062\01"+
"\062\01\063\01\063\01\064\01\064\01\064\01\065\01\065\01\065\01\065"+
"\01\065\01\066\01\066\01\066\01\066\01\067\01\067\01\067\01\067\01"+
"\067\01\067\01\070\01\070\01\070\01\071\01\071\01\071\01\071\01\072"+
"\01\072\01\072\01\072\01\072\01\072\01\072\01\072\01\073\01\073\01"+
"\073\01\073\01\073\01\073\01\073\01\074\01\074\01\074\01\074\01\074"+
"\01\074\01\074\01\075\01\075\01\075\01\075\01\075\01\075\01\076\01"+
"\076\01\076\01\076\01\076\01\076\01\077\01\077\01\077\01\077\01\077"+
"\01\077\01\077\01\077\01\077\01\100\01\100\01\100\01\100\01\100\01"+
"\100\01\101\01\101\01\101\01\101\01\101\01\102\01\102\01\102\01\103"+
"\01\103\01\103\01\104\01\104\01\104\01\105\01\105\01\105\01\106\01"+
"\106\01\106\01\107\01\107\01\107\01\110\01\110\01\110\01\111\01\111"+
"\01\111\01\112\01\112\01\112\01\113\01\113\01\113\01\114\01\114\01"+
"\115\01\115\01\116\01\116\01\116\01\117\01\117\01\117\01\120\01\120"+
"\01\120\01\120\01\120\01\120\01\120\01\120\01\120\01\120\01\120\01"+
"\121\01\121\01\122\01\122\01\123\01\123\01\124\01\124\01\125\01\125"+
"\01\125\01\126\01\126\01\126\01\127\01\127\01\130\01\130\01\131\01"+
"\131\01\131\01\131\01\132\01\132\01\132\01\132\01\132\01\132\03\132"+
"\010\132\01\132\01\132\04\132\010\132\012\132\01\132\01\132\01\132"+
"\03\132\010\132\01\133\01\133\01\133\01\133\01\133\01\133\05\133\010"+
"\133\011\133\01\133\03\133\010\133\01\133\01\133\03\133\010\133\01"+
"\134\01\134\01\134\01\134\04\134\010\134\012\134\01\134\01\134\01"+
"\134\03\134\010\134\01\135\01\135\01\135\01\135\01\135\01\135\03\135"+
"\010\135\01\136\01\136\01\136\01\136\03\136\010\136\01\137\01\137"+
"\04\137\010\137\012\137\01\137\01\137\01\137\01\137\01\137\05\137"+
"\010\137\011\137\01\137\01\137\01\137\03\137\010\137\01\137\01\137"+
"\03\137\010\137\01\137\01\137\01\137\01\137\04\137\010\137\012\137"+
"\01\137\01\137\01\137\03\137\010\137\01\137\01\137\03\137\010\137"+
"\01\137\01\137\04\137\010\137\012\137\01\137\01\137\01\137\01\137"+
"\01\137\03\137\010\137\01\137\01\137\04\137\010\137\012\137\01\137"+
"\01\137\01\137\01\137\01\137\01\137\01\137\01\137\01\137\03\137\010"+
"\137\01\137\01\137\05\137\010\137\011\137\01\137\01\137\01\137\01"+
"\137\01\137\05\137\010\137\011\137\01\137\03\137\010\137\01\137\01"+
"\137\01\137\01\137\03\137\010\137\01\137\01\137\01\137\01\137\03\137"+
"\010\137\01\137\01\137\04\137\010\137\012\137\01\137\01\137\01\137"+
"\03\137\010\137\03\137\010\137\01\140\01\140\01\140\01\140\03\140"+
"\010\140\01\140\01\140\01\140\01\140\03\140\010\140\01\140\01\140"+
"\04\140\010\140\012\140\01\140\01\141\01\141\01\141\01\141\01\141"+
"\01\141\01\141\01\141\03\141\010\141\01\142\01\142\01\142\01\142\01"+
"\142\01\142\03\142\010\142\01\142\01\142\01\143\01\143\01\143\01\143"+
"\01\143\01\143\05\143\010\143\011\143\01\143\01\143\01\143\01\144"+
"\01\144\01\144\01\144\01\144\01\144\01\144\01\144\01\144\01\144\01"+
"\144\01\144\01\144\01\144\01\144\01\144\01\144\01\144\03\144\010\144"+
"\01\144\01\144\01\144\01\144\03\144\010\144\01\145\01\145\01\145\01"+
"\145\01\145\01\145\01\145\01\145\01\145\01\145\01\145\01\145\01\145"+
"\01\145\01\145\01\145\01\145\01\145\03\145\010\145\01\146\01\146\01"+
"\146\01\146\01\146\01\146\01\146\01\146\01\146\01\146\01\146\01\146"+
"\01\147\01\147\01\147\01\147\01\147\01\147\01\150\01\150\01\150\01"+
"\150\01\150\01\150\01\150\01\150\01\151\01\151\01\151\01\151\01\151"+
"\01\151\05\151\010\151\011\151\01\151\01\152\01\152\01\152\01\152"+
"\01\152\01\152\01\152\01\152\01\152\01\152\01\152\01\152\01\152\01"+
"\152\01\152\01\152\01\152\01\152\01\152\01\152\01\152\01\152\01\152"+
"\01\152\01\152\01\152\03\152\010\152\01\153\01\153\01\153\01\153\01"+
"\153\01\153\01\153\01\153\01\153\01\153\01\153\01\153\01\153\01\153"+
"\01\153\01\153\01\153\01\153\01\153\01\153\01\153\01\153\01\153\01"+
"\153\01\153\01\153\01\153\01\153\01\153\01\153\03\153\010\153\01\154"+
"\01\154\01\154\01\154\01\154\01\154\01\154\01\154\01\154\01\154\04"+
"\154\010\154\012\154\01\154\01\154\01\155\01\155\01\155\01\155\01"+
"\155\05\155\010\155\011\155\01\155\01\155\01\155\03\155\010\155\01"+
"\155\01\155\01\155\01\156\01\156\01\156\01\156\01\157\01\157\01\157"+
"\01\157\01\160\01\160\01\160\160\02\04\00\04\05\00\06\06\00\010\07"+
"\00\012\010\00\014\011\00\016\012\00\020\013\00\022\014\00\024\015"+
"\00\026\016\00\030\017\00\032\020\00\034\021\00\036\022\00\040\023"+
"\00\042\024\00\044\025\00\046\026\00\050\027\00\052\030\00\054\031"+
"\00\056\032\00\060\033\00\062\034\00\064\035\00\066\036\00\070\037"+
"\00\072\040\00\074\041\00\076\042\00\100\043\00\102\044\00\104\045"+
"\00\106\046\00\110\047\00\112\050\00\114\051\00\116\052\00\120\053"+
"\00\122\054\00\124\055\00\126\056\00\130\057\00\132\060\00\134\061"+
"\00\136\062\00\140\063\00\142\064\00\144\065\00\146\066\00\150\067"+
"\00\152\070\00\154\071\00\156\072\00\160\073\00\162\074\00\164\075"+
"\00\166\076\00\170\077\00\172\100\00\174\101\00\176\102\00\u0080\103"+
"\00\u0082\104\00\u0084\105\00\u0086\106\00\u0088\107\00\u008a\110"+
"\00\u008c\111\00\u008e\112\00\u0090\113\00\u0092\114\00\u0094\115"+
"\00\u0096\116\00\u0098\117\00\u009a\120\00\u009c\121\00\u009e\122"+
"\00\u00a0\123\00\u00a2\124\00\u00a4\125\00\u00a6\126\00\u00a8\127"+
"\00\u00aa\130\00\u00ac\131\00\u00ae\132\00\u00b0\133\00\u00b2\134"+
"\00\u00b4\135\00\u00b6\136\00\u00b8\137\00\u00ba\uffff\00\u00bc\uffff"+
"\00\u00be\140\00\u00c0\uffff\00\u00c2\uffff\00\u00c4\141\00\u00c6"+
"\142\00\u00c8\uffff\00\u00ca\uffff\00\u00cc\uffff\00\u00ce\143\01"+
"\u00d0\144\02\u00d2\145\00\u00d4\uffff\00\u00d6\uffff\00\u00d8\146"+
"\03\u00da\147\04\u00dc\150\05\u00de\151\06\u00e0\152\07\02\00\01\03"+
"\02\047\047\134\134\02\042\042\134\134\02\012\012\015\015\u0471\00"+
"\02\01\00\00\00\04\01\00\00\00\06\01\00\00\00\010\01\00\00\00\012"+
"\01\00\00\00\014\01\00\00\00\016\01\00\00\00\020\01\00\00\00\022\01"+
"\00\00\00\024\01\00\00\00\026\01\00\00\00\030\01\00\00\00\032\01\00"+
"\00\00\034\01\00\00\00\036\01\00\00\00\040\01\00\00\00\042\01\00\00"+
"\00\044\01\00\00\00\046\01\00\00\00\050\01\00\00\00\052\01\00\00\00"+
"\054\01\00\00\00\056\01\00\00\00\060\01\00\00\00\062\01\00\00\00\064"+
"\01\00\00\00\066\01\00\00\00\070\01\00\00\00\072\01\00\00\00\074\01"+
"\00\00\00\076\01\00\00\00\100\01\00\00\00\102\01\00\00\00\104\01\00"+
"\00\00\106\01\00\00\00\110\01\00\00\00\112\01\00\00\00\114\01\00\00"+
"\00\116\01\00\00\00\120\01\00\00\00\122\01\00\00\00\124\01\00\00\00"+
"\126\01\00\00\00\130\01\00\00\00\132\01\00\00\00\134\01\00\00\00\136"+
"\01\00\00\00\140\01\00\00\00\142\01\00\00\00\144\01\00\00\00\146\01"+
"\00\00\00\150\01\00\00\00\152\01\00\00\00\154\01\00\00\00\156\01\00"+
"\00\00\160\01\00\00\00\162\01\00\00\00\164\01\00\00\00\166\01\00\00"+
"\00\170\01\00\00\00\172\01\00\00\00\174\01\00\00\00\176\01\00\00\00"+
"\u0080\01\00\00\00\u0082\01\00\00\00\u0084\01\00\00\00\u0086\01\00"+
"\00\00\u0088\01\00\00\00\u008a\01\00\00\00\u008c\01\00\00\00\u008e"+
"\01\00\00\00\u0090\01\00\00\00\u0092\01\00\00\00\u0094\01\00\00\00"+
"\u0096\01\00\00\00\u0098\01\00\00\00\u009a\01\00\00\00\u009c\01\00"+
"\00\00\u009e\01\00\00\00\u00a0\01\00\00\00\u00a2\01\00\00\00\u00a4"+
"\01\00\00\00\u00a6\01\00\00\00\u00a8\01\00\00\00\u00aa\01\00\00\00"+
"\u00ac\01\00\00\00\u00ae\01\00\00\00\u00b0\01\00\00\00\u00b2\01\00"+
"\00\00\u00b4\01\00\00\00\u00b6\01\00\00\00\u00b8\01\00\00\00\u00be"+
"\01\00\00\00\u00c4\01\00\00\00\u00c6\01\00\00\00\u00ce\01\00\00\00"+
"\u00d0\01\00\00\00\u00d2\01\00\00\00\u00d8\01\00\00\00\u00da\01\00"+
"\00\00\u00dc\01\00\00\01\u00de\01\00\00\01\u00e0\01\00\00\02\u00e2"+
"\01\00\00\04\u00ea\01\00\00\06\u00ec\01\00\00\010\u00f3\01\00\00\012"+
"\u00fa\01\00\00\014\u00fc\01\00\00\016\u00fe\01\00\00\020\u0105\01"+
"\00\00\022\u010f\01\00\00\024\u0117\01\00\00\026\u0120\01\00\00\030"+
"\u0126\01\00\00\032\u012f\01\00\00\034\u0135\01\00\00\036\u013d\01"+
"\00\00\040\u0148\01\00\00\042\u014a\01\00\00\044\u014c\01\00\00\046"+
"\u014e\01\00\00\050\u0150\01\00\00\052\u0152\01\00\00\054\u0154\01"+
"\00\00\056\u015e\01\00\00\060\u0163\01\00\00\062\u0165\01\00\00\064"+
"\u0167\01\00\00\066\u016e\01\00\00\070\u0170\01\00\00\072\u0177\01"+
"\00\00\074\u0184\01\00\00\076\u018e\01\00\00\100\u0197\01\00\00\102"+
"\u019f\01\00\00\104\u01a4\01\00\00\106\u01a9\01\00\00\110\u01af\01"+
"\00\00\112\u01b3\01\00\00\114\u01b8\01\00\00\116\u01be\01\00\00\120"+
"\u01c5\01\00\00\122\u01c7\01\00\00\124\u01cd\01\00\00\126\u01cf\01"+
"\00\00\130\u01d1\01\00\00\132\u01d5\01\00\00\134\u01da\01\00\00\136"+
"\u01df\01\00\00\140\u01e4\01\00\00\142\u01ea\01\00\00\144\u01ec\01"+
"\00\00\146\u01f4\01\00\00\150\u01f6\01\00\00\152\u01f9\01\00\00\154"+
"\u01fe\01\00\00\156\u0202\01\00\00\160\u0208\01\00\00\162\u020b\01"+
"\00\00\164\u020f\01\00\00\166\u0217\01\00\00\170\u021e\01\00\00\172"+
"\u0225\01\00\00\174\u022b\01\00\00\176\u0231\01\00\00\u0080\u023a"+
"\01\00\00\u0082\u0240\01\00\00\u0084\u0245\01\00\00\u0086\u0248\01"+
"\00\00\u0088\u024b\01\00\00\u008a\u024e\01\00\00\u008c\u0251\01\00"+
"\00\u008e\u0254\01\00\00\u0090\u0257\01\00\00\u0092\u025a\01\00\00"+
"\u0094\u025d\01\00\00\u0096\u0260\01\00\00\u0098\u0263\01\00\00\u009a"+
"\u0265\01\00\00\u009c\u0267\01\00\00\u009e\u026a\01\00\00\u00a0\u026d"+
"\01\00\00\u00a2\u0278\01\00\00\u00a4\u027a\01\00\00\u00a6\u027c\01"+
"\00\00\u00a8\u027e\01\00\00\u00aa\u0280\01\00\00\u00ac\u0283\01\00"+
"\00\u00ae\u0286\01\00\00\u00b0\u0288\01\00\00\u00b2\u028a\01\00\00"+
"\u00b4\u028e\01\00\00\u00b6\u02aa\01\00\00\u00b8\u02b0\01\00\00\u00ba"+
"\u02c2\01\00\00\u00bc\u02c8\01\00\00\u00be\u0332\01\00\00\u00c0\u0338"+
"\01\00\00\u00c2\u034e\01\00\00\u00c4\u0350\01\00\00\u00c6\u035a\01"+
"\00\00\u00c8\u037e\01\00\00\u00ca\u0392\01\00\00\u00cc\u0394\01\00"+
"\00\u00ce\u03a0\01\00\00\u00d0\u03a6\01\00\00\u00d2\u03ae\01\00\00"+
"\u00d4\u03d2\01\00\00\u00d6\u03f2\01\00\00\u00d8\u03fe\01\00\00\u00da"+
"\u0403\01\00\00\u00dc\u0413\01\00\00\u00de\u0417\01\00\00\u00e0\u041b"+
"\01\00\00\u00e2\u00e3\05\160\00\u00e3\u00e4\05\141\00\u00e4\u00e5"+
"\05\143\00\u00e5\u00e6\05\153\00\u00e6\u00e7\05\141\00\u00e7\u00e8"+
"\05\147\00\u00e8\u00e9\05\145\00\u00e9\03\01\00\00\u00ea\u00eb\05"+
"\073\00\u00eb\05\01\00\00\u00ec\u00ed\05\151\00\u00ed\u00ee\05\155"+
"\00\u00ee\u00ef\05\160\00\u00ef\u00f0\05\157\00\u00f0\u00f1\05\162"+
"\00\u00f1\u00f2\05\164\00\u00f2\07\01\00\00\u00f3\u00f4\05\163\00"+
"\u00f4\u00f5\05\164\00\u00f5\u00f6\05\141\00\u00f6\u00f7\05\164\00"+
"\u00f7\u00f8\05\151\00\u00f8\u00f9\05\143\00\u00f9\011\01\00\00\u00fa"+
"\u00fb\05\056\00\u00fb\013\01\00\00\u00fc\u00fd\05\052\00\u00fd\015"+
"\01\00\00\u00fe\u00ff\05\160\00\u00ff\u0100\05\165\00\u0100\u0101"+
"\05\142\00\u0101\u0102\05\154\00\u0102\u0103\05\151\00\u0103\u0104"+
"\05\143\00\u0104\017\01\00\00\u0105\u0106\05\160\00\u0106\u0107\05"+
"\162\00\u0107\u0108\05\157\00\u0108\u0109\05\164\00\u0109\u010a\05"+
"\145\00\u010a\u010b\05\143\00\u010b\u010c\05\164\00\u010c\u010d\05"+
"\145\00\u010d\u010e\05\144\00\u010e\021\01\00\00\u010f\u0110\05\160"+
"\00\u0110\u0111\05\162\00\u0111\u0112\05\151\00\u0112\u0113\05\166"+
"\00\u0113\u0114\05\141\00\u0114\u0115\05\164\00\u0115\u0116\05\145"+
"\00\u0116\023\01\00\00\u0117\u0118\05\141\00\u0118\u0119\05\142\00"+
"\u0119\u011a\05\163\00\u011a\u011b\05\164\00\u011b\u011c\05\162\00"+
"\u011c\u011d\05\141\00\u011d\u011e\05\143\00\u011e\u011f\05\164\00"+
"\u011f\025\01\00\00\u0120\u0121\05\146\00\u0121\u0122\05\151\00\u0122"+
"\u0123\05\156\00\u0123\u0124\05\141\00\u0124\u0125\05\154\00\u0125"+
"\027\01\00\00\u0126\u0127\05\163\00\u0127\u0128\05\164\00\u0128\u0129"+
"\05\162\00\u0129\u012a\05\151\00\u012a\u012b\05\143\00\u012b\u012c"+
"\05\164\00\u012c\u012d\05\146\00\u012d\u012e\05\160\00\u012e\031\01"+
"\00\00\u012f\u0130\05\143\00\u0130\u0131\05\154\00\u0131\u0132\05"+
"\141\00\u0132\u0133\05\163\00\u0133\u0134\05\163\00\u0134\033\01\00"+
"\00\u0135\u0136\05\145\00\u0136\u0137\05\170\00\u0137\u0138\05\164"+
"\00\u0138\u0139\05\145\00\u0139\u013a\05\156\00\u013a\u013b\05\144"+
"\00\u013b\u013c\05\163\00\u013c\035\01\00\00\u013d\u013e\05\151\00"+
"\u013e\u013f\05\155\00\u013f\u0140\05\160\00\u0140\u0141\05\154\00"+
"\u0141\u0142\05\145\00\u0142\u0143\05\155\00\u0143\u0144\05\145\00"+
"\u0144\u0145\05\156\00\u0145\u0146\05\164\00\u0146\u0147\05\163\00"+
"\u0147\037\01\00\00\u0148\u0149\05\074\00\u0149\041\01\00\00\u014a"+
"\u014b\05\054\00\u014b\043\01\00\00\u014c\u014d\05\076\00\u014d\045"+
"\01\00\00\u014e\u014f\05\046\00\u014f\047\01\00\00\u0150\u0151\05"+
"\173\00\u0151\051\01\00\00\u0152\u0153\05\175\00\u0153\053\01\00\00"+
"\u0154\u0155\05\151\00\u0155\u0156\05\156\00\u0156\u0157\05\164\00"+
"\u0157\u0158\05\145\00\u0158\u0159\05\162\00\u0159\u015a\05\146\00"+
"\u015a\u015b\05\141\00\u015b\u015c\05\143\00\u015c\u015d\05\145\00"+
"\u015d\055\01\00\00\u015e\u015f\05\166\00\u015f\u0160\05\157\00\u0160"+
"\u0161\05\151\00\u0161\u0162\05\144\00\u0162\057\01\00\00\u0163\u0164"+
"\05\133\00\u0164\061\01\00\00\u0165\u0166\05\135\00\u0166\063\01\00"+
"\00\u0167\u0168\05\164\00\u0168\u0169\05\150\00\u0169\u016a\05\162"+
"\00\u016a\u016b\05\157\00\u016b\u016c\05\167\00\u016c\u016d\05\163"+
"\00\u016d\065\01\00\00\u016e\u016f\05\075\00\u016f\067\01\00\00\u0170"+
"\u0171\05\156\00\u0171\u0172\05\141\00\u0172\u0173\05\164\00\u0173"+
"\u0174\05\151\00\u0174\u0175\05\166\00\u0175\u0176\05\145\00\u0176"+
"\071\01\00\00\u0177\u0178\05\163\00\u0178\u0179\05\171\00\u0179\u017a"+
"\05\156\00\u017a\u017b\05\143\00\u017b\u017c\05\150\00\u017c\u017d"+
"\05\162\00\u017d\u017e\05\157\00\u017e\u017f\05\156\00\u017f\u0180"+
"\05\151\00\u0180\u0181\05\172\00\u0181\u0182\05\145\00\u0182\u0183"+
"\05\144\00\u0183\073\01\00\00\u0184\u0185\05\164\00\u0185\u0186\05"+
"\162\00\u0186\u0187\05\141\00\u0187\u0188\05\156\00\u0188\u0189\05"+
"\163\00\u0189\u018a\05\151\00\u018a\u018b\05\145\00\u018b\u018c\05"+
"\156\00\u018c\u018d\05\164\00\u018d\075\01\00\00\u018e\u018f\05\166"+
"\00\u018f\u0190\05\157\00\u0190\u0191\05\154\00\u0191\u0192\05\141"+
"\00\u0192\u0193\05\164\00\u0193\u0194\05\151\00\u0194\u0195\05\154"+
"\00\u0195\u0196\05\145\00\u0196\077\01\00\00\u0197\u0198\05\142\00"+
"\u0198\u0199\05\157\00\u0199\u019a\05\157\00\u019a\u019b\05\154\00"+
"\u019b\u019c\05\145\00\u019c\u019d\05\141\00\u019d\u019e\05\156\00"+
"\u019e\101\01\00\00\u019f\u01a0\05\143\00\u01a0\u01a1\05\150\00\u01a1"+
"\u01a2\05\141\00\u01a2\u01a3\05\162\00\u01a3\103\01\00\00\u01a4\u01a5"+
"\05\142\00\u01a5\u01a6\05\171\00\u01a6\u01a7\05\164\00\u01a7\u01a8"+
"\05\145\00\u01a8\105\01\00\00\u01a9\u01aa\05\163\00\u01aa\u01ab\05"+
"\150\00\u01ab\u01ac\05\157\00\u01ac\u01ad\05\162\00\u01ad\u01ae\05"+
"\164\00\u01ae\107\01\00\00\u01af\u01b0\05\151\00\u01b0\u01b1\05\156"+
"\00\u01b1\u01b2\05\164\00\u01b2\111\01\00\00\u01b3\u01b4\05\154\00"+
"\u01b4\u01b5\05\157\00\u01b5\u01b6\05\156\00\u01b6\u01b7\05\147\00"+
"\u01b7\113\01\00\00\u01b8\u01b9\05\146\00\u01b9\u01ba\05\154\00\u01ba"+
"\u01bb\05\157\00\u01bb\u01bc\05\141\00\u01bc\u01bd\05\164\00\u01bd"+
"\115\01\00\00\u01be\u01bf\05\144\00\u01bf\u01c0\05\157\00\u01c0\u01c1"+
"\05\165\00\u01c1\u01c2\05\142\00\u01c2\u01c3\05\154\00\u01c3\u01c4"+
"\05\145\00\u01c4\117\01\00\00\u01c5\u01c6\05\077\00\u01c6\121\01\00"+
"\00\u01c7\u01c8\05\163\00\u01c8\u01c9\05\165\00\u01c9\u01ca\05\160"+
"\00\u01ca\u01cb\05\145\00\u01cb\u01cc\05\162\00\u01cc\123\01\00\00"+
"\u01cd\u01ce\05\050\00\u01ce\125\01\00\00\u01cf\u01d0\05\051\00\u01d0"+
"\127\01\00\00\u01d1\u01d2\05\056\00\u01d2\u01d3\05\056\00\u01d3\u01d4"+
"\05\056\00\u01d4\131\01\00\00\u01d5\u01d6\05\164\00\u01d6\u01d7\05"+
"\150\00\u01d7\u01d8\05\151\00\u01d8\u01d9\05\163\00\u01d9\133\01\00"+
"\00\u01da\u01db\05\156\00\u01db\u01dc\05\165\00\u01dc\u01dd\05\154"+
"\00\u01dd\u01de\05\154\00\u01de\135\01\00\00\u01df\u01e0\05\164\00"+
"\u01e0\u01e1\05\162\00\u01e1\u01e2\05\165\00\u01e2\u01e3\05\145\00"+
"\u01e3\137\01\00\00\u01e4\u01e5\05\146\00\u01e5\u01e6\05\141\00\u01e6"+
"\u01e7\05\154\00\u01e7\u01e8\05\163\00\u01e8\u01e9\05\145\00\u01e9"+
"\141\01\00\00\u01ea\u01eb\05\100\00\u01eb\143\01\00\00\u01ec\u01ed"+
"\05\144\00\u01ed\u01ee\05\145\00\u01ee\u01ef\05\146\00\u01ef\u01f0"+
"\05\141\00\u01f0\u01f1\05\165\00\u01f1\u01f2\05\154\00\u01f2\u01f3"+
"\05\164\00\u01f3\145\01\00\00\u01f4\u01f5\05\072\00\u01f5\147\01\00"+
"\00\u01f6\u01f7\05\151\00\u01f7\u01f8\05\146\00\u01f8\151\01\00\00"+
"\u01f9\u01fa\05\145\00\u01fa\u01fb\05\154\00\u01fb\u01fc\05\163\00"+
"\u01fc\u01fd\05\145\00\u01fd\153\01\00\00\u01fe\u01ff\05\146\00\u01ff"+
"\u0200\05\157\00\u0200\u0201\05\162\00\u0201\155\01\00\00\u0202\u0203"+
"\05\167\00\u0203\u0204\05\150\00\u0204\u0205\05\151\00\u0205\u0206"+
"\05\154\00\u0206\u0207\05\145\00\u0207\157\01\00\00\u0208\u0209\05"+
"\144\00\u0209\u020a\05\157\00\u020a\161\01\00\00\u020b\u020c\05\164"+
"\00\u020c\u020d\05\162\00\u020d\u020e\05\171\00\u020e\163\01\00\00"+
"\u020f\u0210\05\146\00\u0210\u0211\05\151\00\u0211\u0212\05\156\00"+
"\u0212\u0213\05\141\00\u0213\u0214\05\154\00\u0214\u0215\05\154\00"+
"\u0215\u0216\05\171\00\u0216\165\01\00\00\u0217\u0218\05\163\00\u0218"+
"\u0219\05\167\00\u0219\u021a\05\151\00\u021a\u021b\05\164\00\u021b"+
"\u021c\05\143\00\u021c\u021d\05\150\00\u021d\167\01\00\00\u021e\u021f"+
"\05\162\00\u021f\u0220\05\145\00\u0220\u0221\05\164\00\u0221\u0222"+
"\05\165\00\u0222\u0223\05\162\00\u0223\u0224\05\156\00\u0224\171\01"+
"\00\00\u0225\u0226\05\164\00\u0226\u0227\05\150\00\u0227\u0228\05"+
"\162\00\u0228\u0229\05\157\00\u0229\u022a\05\167\00\u022a\173\01\00"+
"\00\u022b\u022c\05\142\00\u022c\u022d\05\162\00\u022d\u022e\05\145"+
"\00\u022e\u022f\05\141\00\u022f\u0230\05\153\00\u0230\175\01\00\00"+
"\u0231\u0232\05\143\00\u0232\u0233\05\157\00\u0233\u0234\05\156\00"+
"\u0234\u0235\05\164\00\u0235\u0236\05\151\00\u0236\u0237\05\156\00"+
"\u0237\u0238\05\165\00\u0238\u0239\05\145\00\u0239\177\01\00\00\u023a"+
"\u023b\05\143\00\u023b\u023c\05\141\00\u023c\u023d\05\164\00\u023d"+
"\u023e\05\143\00\u023e\u023f\05\150\00\u023f\u0081\01\00\00\u0240"+
"\u0241\05\143\00\u0241\u0242\05\141\00\u0242\u0243\05\163\00\u0243"+
"\u0244\05\145\00\u0244\u0083\01\00\00\u0245\u0246\05\053\00\u0246"+
"\u0247\05\075\00\u0247\u0085\01\00\00\u0248\u0249\05\055\00\u0249"+
"\u024a\05\075\00\u024a\u0087\01\00\00\u024b\u024c\05\052\00\u024c"+
"\u024d\05\075\00\u024d\u0089\01\00\00\u024e\u024f\05\057\00\u024f"+
"\u0250\05\075\00\u0250\u008b\01\00\00\u0251\u0252\05\046\00\u0252"+
"\u0253\05\075\00\u0253\u008d\01\00\00\u0254\u0255\05\174\00\u0255"+
"\u0256\05\075\00\u0256\u008f\01\00\00\u0257\u0258\05\136\00\u0258"+
"\u0259\05\075\00\u0259\u0091\01\00\00\u025a\u025b\05\045\00\u025b"+
"\u025c\05\075\00\u025c\u0093\01\00\00\u025d\u025e\05\174\00\u025e"+
"\u025f\05\174\00\u025f\u0095\01\00\00\u0260\u0261\05\046\00\u0261"+
"\u0262\05\046\00\u0262\u0097\01\00\00\u0263\u0264\05\174\00\u0264"+
"\u0099\01\00\00\u0265\u0266\05\136\00\u0266\u009b\01\00\00\u0267\u0268"+
"\05\075\00\u0268\u0269\05\075\00\u0269\u009d\01\00\00\u026a\u026b"+
"\05\041\00\u026b\u026c\05\075\00\u026c\u009f\01\00\00\u026d\u026e"+
"\05\151\00\u026e\u026f\05\156\00\u026f\u0270\05\163\00\u0270\u0271"+
"\05\164\00\u0271\u0272\05\141\00\u0272\u0273\05\156\00\u0273\u0274"+
"\05\143\00\u0274\u0275\05\145\00\u0275\u0276\05\157\00\u0276\u0277"+
"\05\146\00\u0277\u00a1\01\00\00\u0278\u0279\05\053\00\u0279\u00a3"+
"\01\00\00\u027a\u027b\05\055\00\u027b\u00a5\01\00\00\u027c\u027d\05"+
"\057\00\u027d\u00a7\01\00\00\u027e\u027f\05\045\00\u027f\u00a9\01"+
"\00\00\u0280\u0281\05\053\00\u0281\u0282\05\053\00\u0282\u00ab\01"+
"\00\00\u0283\u0284\05\055\00\u0284\u0285\05\055\00\u0285\u00ad\01"+
"\00\00\u0286\u0287\05\176\00\u0287\u00af\01\00\00\u0288\u0289\05\041"+
"\00\u0289\u00b1\01\00\00\u028a\u028b\05\156\00\u028b\u028c\05\145"+
"\00\u028c\u028d\05\167\00\u028d\u00b3\01\00\00\u028e\u028f\05\060"+
"\00\u028f\u0294\01\00\00\u0290\u0291\05\170\00\u0291\u0295\01\00\00"+
"\u0292\u0293\05\130\00\u0293\u0295\01\00\00\u0294\u0290\01\00\00\u0294"+
"\u0292\01\00\00\u0295\u0298\01\00\00\u0296\u0297\03\u00ba\135\u0297"+
"\u0299\01\00\00\u0298\u0296\01\00\00\u0299\u029a\01\00\00\u029a\u0296"+
"\01\00\00\u029a\u029b\01\00\00\u029b\u029e\01\00\00\u029c\u029d\03"+
"\u00bc\136\u029d\u029f\01\00\00\u029e\u029c\01\00\00\u029e\u029f\01"+
"\00\00\u029f\u00b5\01\00\00\u02a0\u02a1\05\060\00\u02a1\u02ab\01\00"+
"\00\u02a2\u02a3\02\061\071\u02a3\u02a6\01\00\00\u02a4\u02a5\02\060"+
"\071\u02a5\u02a7\01\00\00\u02a6\u02a4\01\00\00\u02a6\u02a9\01\00\00"+
"\u02a7\u02a8\01\00\00\u02a8\u02a6\01\00\00\u02a9\u02ab\01\00\00\u02aa"+
"\u02a0\01\00\00\u02aa\u02a2\01\00\00\u02ab\u02ae\01\00\00\u02ac\u02ad"+
"\03\u00bc\136\u02ad\u02af\01\00\00\u02ae\u02ac\01\00\00\u02ae\u02af"+
"\01\00\00\u02af\u00b7\01\00\00\u02b0\u02b1\05\060\00\u02b1\u02b4\01"+
"\00\00\u02b2\u02b3\02\060\067\u02b3\u02b5\01\00\00\u02b4\u02b2\01"+
"\00\00\u02b5\u02b6\01\00\00\u02b6\u02b2\01\00\00\u02b6\u02b7\01\00"+
"\00\u02b7\u02ba\01\00\00\u02b8\u02b9\03\u00bc\136\u02b9\u02bb\01\00"+
"\00\u02ba\u02b8\01\00\00\u02ba\u02bb\01\00\00\u02bb\u00b9\01\00\00"+
"\u02bc\u02bd\02\060\071\u02bd\u02c3\01\00\00\u02be\u02bf\02\141\146"+
"\u02bf\u02c3\01\00\00\u02c0\u02c1\02\101\106\u02c1\u02c3\01\00\00"+
"\u02c2\u02bc\01\00\00\u02c2\u02be\01\00\00\u02c2\u02c0\01\00\00\u02c3"+
"\u00bb\01\00\00\u02c4\u02c5\05\154\00\u02c5\u02c9\01\00\00\u02c6\u02c7"+
"\05\114\00\u02c7\u02c9\01\00\00\u02c8\u02c4\01\00\00\u02c8\u02c6\01"+
"\00\00\u02c9\u00bd\01\00\00\u02ca\u02cb\02\060\071\u02cb\u02cd\01"+
"\00\00\u02cc\u02ca\01\00\00\u02cd\u02ce\01\00\00\u02ce\u02ca\01\00"+
"\00\u02ce\u02cf\01\00\00\u02cf\u02d0\01\00\00\u02d0\u02d1\05\056\00"+
"\u02d1\u02d4\01\00\00\u02d2\u02d3\02\060\071\u02d3\u02d5\01\00\00"+
"\u02d4\u02d2\01\00\00\u02d4\u02d7\01\00\00\u02d5\u02d6\01\00\00\u02d6"+
"\u02d4\01\00\00\u02d7\u02da\01\00\00\u02d8\u02d9\03\u00c0\140\u02d9"+
"\u02db\01\00\00\u02da\u02d8\01\00\00\u02da\u02db\01\00\00\u02db\u02de"+
"\01\00\00\u02dc\u02dd\03\u00c2\141\u02dd\u02df\01\00\00\u02de\u02dc"+
"\01\00\00\u02de\u02df\01\00\00\u02df\u0333\01\00\00\u02e0\u02e1\05"+
"\056\00\u02e1\u02e4\01\00\00\u02e2\u02e3\02\060\071\u02e3\u02e5\01"+
"\00\00\u02e4\u02e2\01\00\00\u02e5\u02e6\01\00\00\u02e6\u02e2\01\00"+
"\00\u02e6\u02e7\01\00\00\u02e7\u02ea\01\00\00\u02e8\u02e9\03\u00c0"+
"\140\u02e9\u02eb\01\00\00\u02ea\u02e8\01\00\00\u02ea\u02eb\01\00\00"+
"\u02eb\u02ee\01\00\00\u02ec\u02ed\03\u00c2\141\u02ed\u02ef\01\00\00"+
"\u02ee\u02ec\01\00\00\u02ee\u02ef\01\00\00\u02ef\u0333\01\00\00\u02f0"+
"\u02f1\02\060\071\u02f1\u02f3\01\00\00\u02f2\u02f0\01\00\00\u02f3"+
"\u02f4\01\00\00\u02f4\u02f0\01\00\00\u02f4\u02f5\01\00\00\u02f5\u02f6"+
"\01\00\00\u02f6\u02f7\03\u00c0\140\u02f7\u02fa\01\00\00\u02f8\u02f9"+
"\03\u00c2\141\u02f9\u02fb\01\00\00\u02fa\u02f8\01\00\00\u02fa\u02fb"+
"\01\00\00\u02fb\u0333\01\00\00\u02fc\u02fd\02\060\071\u02fd\u02ff"+
"\01\00\00\u02fe\u02fc\01\00\00\u02ff\u0300\01\00\00\u0300\u02fc\01"+
"\00\00\u0300\u0301\01\00\00\u0301\u0302\01\00\00\u0302\u0303\03\u00c2"+
"\141\u0303\u0333\01\00\00\u0304\u0305\05\060\00\u0305\u0306\05\170"+
"\00\u0306\u030b\01\00\00\u0307\u0308\05\060\00\u0308\u0309\05\130"+
"\00\u0309\u030b\01\00\00\u030a\u0304\01\00\00\u030a\u0307\01\00\00"+
"\u030b\u030e\01\00\00\u030c\u030d\03\u00ba\135\u030d\u030f\01\00\00"+
"\u030e\u030c\01\00\00\u030e\u0311\01\00\00\u030f\u0310\01\00\00\u0310"+
"\u030e\01\00\00\u0311\u031a\01\00\00\u0312\u0313\05\056\00\u0313\u0316"+
"\01\00\00\u0314\u0315\03\u00ba\135\u0315\u0317\01\00\00\u0316\u0314"+
"\01\00\00\u0316\u0319\01\00\00\u0317\u0318\01\00\00\u0318\u0316\01"+
"\00\00\u0319\u031b\01\00\00\u031a\u0312\01\00\00\u031a\u031b\01\00"+
"\00\u031b\u0320\01\00\00\u031c\u031d\05\160\00\u031d\u0321\01\00\00"+
"\u031e\u031f\05\120\00\u031f\u0321\01\00\00\u0320\u031c\01\00\00\u0320"+
"\u031e\01\00\00\u0321\u0326\01\00\00\u0322\u0323\05\053\00\u0323\u0327"+
"\01\00\00\u0324\u0325\05\055\00\u0325\u0327\01\00\00\u0326\u0322\01"+
"\00\00\u0326\u0324\01\00\00\u0326\u0327\01\00\00\u0327\u032a\01\00"+
"\00\u0328\u0329\02\060\071\u0329\u032b\01\00\00\u032a\u0328\01\00"+
"\00\u032b\u032c\01\00\00\u032c\u0328\01\00\00\u032c\u032d\01\00\00"+
"\u032d\u0330\01\00\00\u032e\u032f\03\u00c2\141\u032f\u0331\01\00\00"+
"\u0330\u032e\01\00\00\u0330\u0331\01\00\00\u0331\u0333\01\00\00\u0332"+
"\u02cc\01\00\00\u0332\u02e0\01\00\00\u0332\u02f2\01\00\00\u0332\u02fe"+
"\01\00\00\u0332\u030a\01\00\00\u0333\u00bf\01\00\00\u0334\u0335\05"+
"\145\00\u0335\u0339\01\00\00\u0336\u0337\05\105\00\u0337\u0339\01"+
"\00\00\u0338\u0334\01\00\00\u0338\u0336\01\00\00\u0339\u033e\01\00"+
"\00\u033a\u033b\05\053\00\u033b\u033f\01\00\00\u033c\u033d\05\055"+
"\00\u033d\u033f\01\00\00\u033e\u033a\01\00\00\u033e\u033c\01\00\00"+
"\u033e\u033f\01\00\00\u033f\u0342\01\00\00\u0340\u0341\02\060\071"+
"\u0341\u0343\01\00\00\u0342\u0340\01\00\00\u0343\u0344\01\00\00\u0344"+
"\u0340\01\00\00\u0344\u0345\01\00\00\u0345\u00c1\01\00\00\u0346\u0347"+
"\05\146\00\u0347\u034f\01\00\00\u0348\u0349\05\106\00\u0349\u034f"+
"\01\00\00\u034a\u034b\05\144\00\u034b\u034f\01\00\00\u034c\u034d\05"+
"\104\00\u034d\u034f\01\00\00\u034e\u0346\01\00\00\u034e\u0348\01\00"+
"\00\u034e\u034a\01\00\00\u034e\u034c\01\00\00\u034f\u00c3\01\00\00"+
"\u0350\u0351\05\047\00\u0351\u0356\01\00\00\u0352\u0353\03\u00c8\144"+
"\u0353\u0357\01\00\00\u0354\u0355\012\00\00\u0355\u0357\01\00\00\u0356"+
"\u0352\01\00\00\u0356\u0354\01\00\00\u0357\u0358\01\00\00\u0358\u0359"+
"\05\047\00\u0359\u00c5\01\00\00\u035a\u035b\05\042\00\u035b\u0360"+
"\01\00\00\u035c\u035d\03\u00c8\144\u035d\u0361\01\00\00\u035e\u035f"+
"\012\01\00\u035f\u0361\01\00\00\u0360\u035c\01\00\00\u0360\u035e\01"+
"\00\00\u0360\u0363\01\00\00\u0361\u0362\01\00\00\u0362\u0360\01\00"+
"\00\u0363\u0364\01\00\00\u0364\u0365\05\042\00\u0365\u00c7\01\00\00"+
"\u0366\u0367\05\134\00\u0367\u0378\01\00\00\u0368\u0369\05\142\00"+
"\u0369\u0379\01\00\00\u036a\u036b\05\164\00\u036b\u0379\01\00\00\u036c"+
"\u036d\05\156\00\u036d\u0379\01\00\00\u036e\u036f\05\146\00\u036f"+
"\u0379\01\00\00\u0370\u0371\05\162\00\u0371\u0379\01\00\00\u0372\u0373"+
"\05\042\00\u0373\u0379\01\00\00\u0374\u0375\05\047\00\u0375\u0379"+
"\01\00\00\u0376\u0377\05\134\00\u0377\u0379\01\00\00\u0378\u0368\01"+
"\00\00\u0378\u036a\01\00\00\u0378\u036c\01\00\00\u0378\u036e\01\00"+
"\00\u0378\u0370\01\00\00\u0378\u0372\01\00\00\u0378\u0374\01\00\00"+
"\u0378\u0376\01\00\00\u0379\u037f\01\00\00\u037a\u037b\03\u00cc\146"+
"\u037b\u037f\01\00\00\u037c\u037d\03\u00ca\145\u037d\u037f\01\00\00"+
"\u037e\u0366\01\00\00\u037e\u037a\01\00\00\u037e\u037c\01\00\00\u037f"+
"\u00c9\01\00\00\u0380\u0381\05\134\00\u0381\u0382\01\00\00\u0382\u0383"+
"\02\060\063\u0383\u0384\01\00\00\u0384\u0385\02\060\067\u0385\u0386"+
"\01\00\00\u0386\u0387\02\060\067\u0387\u0393\01\00\00\u0388\u0389"+
"\05\134\00\u0389\u038a\01\00\00\u038a\u038b\02\060\067\u038b\u038c"+
"\01\00\00\u038c\u038d\02\060\067\u038d\u0393\01\00\00\u038e\u038f"+
"\05\134\00\u038f\u0390\01\00\00\u0390\u0391\02\060\067\u0391\u0393"+
"\01\00\00\u0392\u0380\01\00\00\u0392\u0388\01\00\00\u0392\u038e\01"+
"\00\00\u0393\u00cb\01\00\00\u0394\u0395\05\134\00\u0395\u0396\01\00"+
"\00\u0396\u0397\05\165\00\u0397\u0398\01\00\00\u0398\u0399\03\u00ba"+
"\135\u0399\u039a\01\00\00\u039a\u039b\03\u00ba\135\u039b\u039c\01"+
"\00\00\u039c\u039d\03\u00ba\135\u039d\u039e\01\00\00\u039e\u039f\03"+
"\u00ba\135\u039f\u00cd\01\00\00\u03a0\u03a1\05\145\00\u03a1\u03a2"+
"\05\156\00\u03a2\u03a3\05\165\00\u03a3\u03a4\05\155\00\u03a4\u03a5"+
"\01\00\00\u03a5\u00cf\01\00\00\u03a6\u03a7\05\141\00\u03a7\u03a8\05"+
"\163\00\u03a8\u03a9\05\163\00\u03a9\u03aa\05\145\00\u03aa\u03ab\05"+
"\162\00\u03ab\u03ac\05\164\00\u03ac\u03ad\01\00\00\u03ad\u00d1\01"+
"\00\00\u03ae\u03af\03\u00d4\152\u03af\u03b4\01\00\00\u03b0\u03b1\03"+
"\u00d4\152\u03b1\u03b5\01\00\00\u03b2\u03b3\03\u00d6\153\u03b3\u03b5"+
"\01\00\00\u03b4\u03b0\01\00\00\u03b4\u03b2\01\00\00\u03b4\u03b7\01"+
"\00\00\u03b5\u03b6\01\00\00\u03b6\u03b4\01\00\00\u03b7\u00d3\01\00"+
"\00\u03b8\u03b9\05\044\00\u03b9\u03d3\01\00\00\u03ba\u03bb\02\101"+
"\132\u03bb\u03d3\01\00\00\u03bc\u03bd\05\137\00\u03bd\u03d3\01\00"+
"\00\u03be\u03bf\02\141\172\u03bf\u03d3\01\00\00\u03c0\u03c1\02\u00c0"+
"\u00d6\u03c1\u03d3\01\00\00\u03c2\u03c3\02\u00d8\u00f6\u03c3\u03d3"+
"\01\00\00\u03c4\u03c5\02\u00f8\u00ff\u03c5\u03d3\01\00\00\u03c6\u03c7"+
"\02\u0100\u1fff\u03c7\u03d3\01\00\00\u03c8\u03c9\02\u3040\u318f\u03c9"+
"\u03d3\01\00\00\u03ca\u03cb\02\u3300\u337f\u03cb\u03d3\01\00\00\u03cc"+
"\u03cd\02\u3400\u3d2d\u03cd\u03d3\01\00\00\u03ce\u03cf\02\u4e00\u9fff"+
"\u03cf\u03d3\01\00\00\u03d0\u03d1\02\uf900\ufaff\u03d1\u03d3\01\00"+
"\00\u03d2\u03b8\01\00\00\u03d2\u03ba\01\00\00\u03d2\u03bc\01\00\00"+
"\u03d2\u03be\01\00\00\u03d2\u03c0\01\00\00\u03d2\u03c2\01\00\00\u03d2"+
"\u03c4\01\00\00\u03d2\u03c6\01\00\00\u03d2\u03c8\01\00\00\u03d2\u03ca"+
"\01\00\00\u03d2\u03cc\01\00\00\u03d2\u03ce\01\00\00\u03d2\u03d0\01"+
"\00\00\u03d3\u00d5\01\00\00\u03d4\u03d5\02\060\071\u03d5\u03f3\01"+
"\00\00\u03d6\u03d7\02\u0660\u0669\u03d7\u03f3\01\00\00\u03d8\u03d9"+
"\02\u06f0\u06f9\u03d9\u03f3\01\00\00\u03da\u03db\02\u0966\u096f\u03db"+
"\u03f3\01\00\00\u03dc\u03dd\02\u09e6\u09ef\u03dd\u03f3\01\00\00\u03de"+
"\u03df\02\u0a66\u0a6f\u03df\u03f3\01\00\00\u03e0\u03e1\02\u0ae6\u0aef"+
"\u03e1\u03f3\01\00\00\u03e2\u03e3\02\u0b66\u0b6f\u03e3\u03f3\01\00"+
"\00\u03e4\u03e5\02\u0be7\u0bef\u03e5\u03f3\01\00\00\u03e6\u03e7\02"+
"\u0c66\u0c6f\u03e7\u03f3\01\00\00\u03e8\u03e9\02\u0ce6\u0cef\u03e9"+
"\u03f3\01\00\00\u03ea\u03eb\02\u0d66\u0d6f\u03eb\u03f3\01\00\00\u03ec"+
"\u03ed\02\u0e50\u0e59\u03ed\u03f3\01\00\00\u03ee\u03ef\02\u0ed0\u0ed9"+
"\u03ef\u03f3\01\00\00\u03f0\u03f1\02\u1040\u1049\u03f1\u03f3\01\00"+
"\00\u03f2\u03d4\01\00\00\u03f2\u03d6\01\00\00\u03f2\u03d8\01\00\00"+
"\u03f2\u03da\01\00\00\u03f2\u03dc\01\00\00\u03f2\u03de\01\00\00\u03f2"+
"\u03e0\01\00\00\u03f2\u03e2\01\00\00\u03f2\u03e4\01\00\00\u03f2\u03e6"+
"\01\00\00\u03f2\u03e8\01\00\00\u03f2\u03ea\01\00\00\u03f2\u03ec\01"+
"\00\00\u03f2\u03ee\01\00\00\u03f2\u03f0\01\00\00\u03f3\u00d7\01\00"+
"\00\u03f4\u03f5\05\040\00\u03f5\u03ff\01\00\00\u03f6\u03f7\05\015"+
"\00\u03f7\u03ff\01\00\00\u03f8\u03f9\05\011\00\u03f9\u03ff\01\00\00"+
"\u03fa\u03fb\05\014\00\u03fb\u03ff\01\00\00\u03fc\u03fd\05\012\00"+
"\u03fd\u03ff\01\00\00\u03fe\u03f4\01\00\00\u03fe\u03f6\01\00\00\u03fe"+
"\u03f8\01\00\00\u03fe\u03fa\01\00\00\u03fe\u03fc\01\00\00\u03ff\u0400"+
"\01\00\00\u0400\u03f4\01\00\00\u0400\u03f6\01\00\00\u0400\u03f8\01"+
"\00\00\u0400\u03fa\01\00\00\u0400\u03fc\01\00\00\u0400\u0401\01\00"+
"\00\u0401\u0402\01\00\00\u0402\u00d9\01\00\00\u0403\u0404\05\057\00"+
"\u0404\u0405\05\057\00\u0405\u0408\01\00\00\u0406\u0407\012\02\00"+
"\u0407\u0409\01\00\00\u0408\u0406\01\00\00\u0408\u040b\01\00\00\u0409"+
"\u040a\01\00\00\u040a\u0408\01\00\00\u040b\u040e\01\00\00\u040c\u040d"+
"\05\015\00\u040d\u040f\01\00\00\u040e\u040c\01\00\00\u040e\u040f\01"+
"\00\00\u040f\u0410\01\00\00\u0410\u0411\05\012\00\u0411\u0412\01\00"+
"\00\u0412\u00db\01\00\00\u0413\u0414\05\057\00\u0414\u0415\05\052"+
"\00\u0415\u0416\01\00\00\u0416\u00dd\01\00\00\u0417\u0418\05\052\00"+
"\u0418\u0419\05\057\00\u0419\u041a\01\00\00\u041a\u00df\01\00\00\u041b"+
"\u041c\013\00\00\u041c\u041d\01\00\00\u041d\u00e1\01\00\00\100\00"+
"\01\u0294\u0298\u0298\u029a\u029e\u02a6\u02aa\u02ae\u02b4\u02b4\u02b6"+
"\u02ba\u02c2\u02c8\u02cc\u02cc\u02ce\u02d4\u02da\u02de\u02e4\u02e4"+
"\u02e6\u02ea\u02ee\u02f2\u02f2\u02f4\u02fa\u02fe\u02fe\u0300\u030a"+
"\u030e\u0316\u031a\u0320\u0326\u032a\u032a\u032c\u0330\u0332\u0338"+
"\u033e\u0342\u0342\u0344\u034e\u0356\u0360\u0378\u037e\u0392\u03b4"+
"\u03d2\u03f2\u03fe\u03fe\u0400\u0408\u040e";
public static final ATN _ATN =
ATNInterpreter.deserialize(_serializedATN.toCharArray());
static {
org.antlr.v4.tool.DOTGenerator dot = new org.antlr.v4.tool.DOTGenerator(null);
//System.out.println(dot.getDOT(_ATN.decisionToATNState.get(0)));
}
}

View File

@ -0,0 +1,756 @@
ûparser grammar JavaParser;
options {backtrack=true; memoize=true; tokenVocab=JavaLexer;}
// starting point for parsing a java file
/* The annotations are separated out to make parsing faster, but must be associated with
a packageDeclaration or a typeDeclaration (and not an empty one). */
compilationUnit
: annotations
( packageDeclaration importDeclaration* typeDeclaration*
| classOrInterfaceDeclaration typeDeclaration*
)
| packageDeclaration? importDeclaration* typeDeclaration*
;
packageDeclaration
: 'package' qualifiedName ';'
;
importDeclaration
: 'import' 'static'? qualifiedName ('.' '*')? ';'
;
typeDeclaration
: classOrInterfaceDeclaration
| ';'
;
classOrInterfaceDeclaration
: classOrInterfaceModifiers (classDeclaration | interfaceDeclaration)
;
classOrInterfaceModifiers
: classOrInterfaceModifier*
;
classOrInterfaceModifier
: annotation // class or interface
| 'public' // class or interface
| 'protected' // class or interface
| 'private' // class or interface
| 'abstract' // class or interface
| 'static' // class or interface
| 'final' // class only -- does not apply to interfaces
| 'strictfp' // class or interface
;
modifiers
: modifier*
;
classDeclaration
: normalClassDeclaration
| enumDeclaration
;
normalClassDeclaration
: 'class' Identifier typeParameters?
('extends' type)?
('implements' typeList)?
classBody
;
typeParameters
: '<' typeParameter (',' typeParameter)* '>'
;
typeParameter
: Identifier ('extends' typeBound)?
;
typeBound
: type ('&' type)*
;
enumDeclaration
: ENUM Identifier ('implements' typeList)? enumBody
;
enumBody
: '{' enumConstants? ','? enumBodyDeclarations? '}'
;
enumConstants
: enumConstant (',' enumConstant)*
;
enumConstant
: annotations? Identifier arguments? classBody?
;
enumBodyDeclarations
: ';' (classBodyDeclaration)*
;
interfaceDeclaration
: normalInterfaceDeclaration
| annotationTypeDeclaration
;
normalInterfaceDeclaration
: 'interface' Identifier typeParameters? ('extends' typeList)? interfaceBody
;
typeList
: type (',' type)*
;
classBody
: '{' classBodyDeclaration* '}'
;
interfaceBody
: '{' interfaceBodyDeclaration* '}'
;
classBodyDeclaration
: ';'
| 'static'? block
| modifiers memberDecl
;
memberDecl
: genericMethodOrConstructorDecl
| memberDeclaration
| 'void' Identifier voidMethodDeclaratorRest
| Identifier constructorDeclaratorRest
| interfaceDeclaration
| classDeclaration
;
memberDeclaration
: type (methodDeclaration | fieldDeclaration)
;
genericMethodOrConstructorDecl
: typeParameters genericMethodOrConstructorRest
;
genericMethodOrConstructorRest
: (type | 'void') Identifier methodDeclaratorRest
| Identifier constructorDeclaratorRest
;
methodDeclaration
: Identifier methodDeclaratorRest
;
fieldDeclaration
: variableDeclarators ';'
;
interfaceBodyDeclaration
: modifiers interfaceMemberDecl
| ';'
;
interfaceMemberDecl
: interfaceMethodOrFieldDecl
| interfaceGenericMethodDecl
| 'void' Identifier voidInterfaceMethodDeclaratorRest
| interfaceDeclaration
| classDeclaration
;
interfaceMethodOrFieldDecl
: type Identifier interfaceMethodOrFieldRest
;
interfaceMethodOrFieldRest
: constantDeclaratorsRest ';'
| interfaceMethodDeclaratorRest
;
methodDeclaratorRest
: formalParameters ('[' ']')*
('throws' qualifiedNameList)?
( methodBody
| ';'
)
;
voidMethodDeclaratorRest
: formalParameters ('throws' qualifiedNameList)?
( methodBody
| ';'
)
;
interfaceMethodDeclaratorRest
: formalParameters ('[' ']')* ('throws' qualifiedNameList)? ';'
;
interfaceGenericMethodDecl
: typeParameters (type | 'void') Identifier
interfaceMethodDeclaratorRest
;
voidInterfaceMethodDeclaratorRest
: formalParameters ('throws' qualifiedNameList)? ';'
;
constructorDeclaratorRest
: formalParameters ('throws' qualifiedNameList)? constructorBody
;
constantDeclarator
: Identifier constantDeclaratorRest
;
variableDeclarators
: variableDeclarator (',' variableDeclarator)*
;
variableDeclarator
: variableDeclaratorId ('=' variableInitializer)?
;
constantDeclaratorsRest
: constantDeclaratorRest (',' constantDeclarator)*
;
constantDeclaratorRest
: ('[' ']')* '=' variableInitializer
;
variableDeclaratorId
: Identifier ('[' ']')*
;
variableInitializer
: arrayInitializer
| expression
;
arrayInitializer
: '{' (variableInitializer (',' variableInitializer)* (',')? )? '}'
;
modifier
: annotation
| 'public'
| 'protected'
| 'private'
| 'static'
| 'abstract'
| 'final'
| 'native'
| 'synchronized'
| 'transient'
| 'volatile'
| 'strictfp'
;
packageOrTypeName
: qualifiedName
;
enumConstantName
: Identifier
;
typeName
: qualifiedName
;
type
: classOrInterfaceType ('[' ']')*
| primitiveType ('[' ']')*
;
classOrInterfaceType
: Identifier typeArguments? ('.' Identifier typeArguments? )*
;
primitiveType
: 'boolean'
| 'char'
| 'byte'
| 'short'
| 'int'
| 'long'
| 'float'
| 'double'
;
variableModifier
: 'final'
| annotation
;
typeArguments
: '<' typeArgument (',' typeArgument)* '>'
;
typeArgument
: type
| '?' (('extends' | 'super') type)?
;
qualifiedNameList
: qualifiedName (',' qualifiedName)*
;
formalParameters
: '(' formalParameterDecls? ')'
;
formalParameterDecls
: variableModifiers type formalParameterDeclsRest
;
formalParameterDeclsRest
: variableDeclaratorId (',' formalParameterDecls)?
| '...' variableDeclaratorId
;
methodBody
: block
;
constructorBody
: '{' explicitConstructorInvocation? blockStatement* '}'
;
explicitConstructorInvocation
: nonWildcardTypeArguments? ('this' | 'super') arguments ';'
| primary '.' nonWildcardTypeArguments? 'super' arguments ';'
;
qualifiedName
: Identifier ('.' Identifier)*
;
literal
: integerLiteral
| FloatingPointLiteral
| CharacterLiteral
| StringLiteral
| booleanLiteral
| 'null'
;
integerLiteral
: HexLiteral
| OctalLiteral
| DecimalLiteral
;
booleanLiteral
: 'true'
| 'false'
;
// ANNOTATIONS
annotations
: annotation+
;
annotation
: '@' annotationName ( '(' ( elementValuePairs | elementValue )? ')' )?
;
annotationName
: Identifier ('.' Identifier)*
;
elementValuePairs
: elementValuePair (',' elementValuePair)*
;
elementValuePair
: Identifier '=' elementValue
;
elementValue
: conditionalExpression
| annotation
| elementValueArrayInitializer
;
elementValueArrayInitializer
: '{' (elementValue (',' elementValue)*)? (',')? '}'
;
annotationTypeDeclaration
: '@' 'interface' Identifier annotationTypeBody
;
annotationTypeBody
: '{' (annotationTypeElementDeclaration)* '}'
;
annotationTypeElementDeclaration
: modifiers annotationTypeElementRest
;
annotationTypeElementRest
: type annotationMethodOrConstantRest ';'
| normalClassDeclaration ';'?
| normalInterfaceDeclaration ';'?
| enumDeclaration ';'?
| annotationTypeDeclaration ';'?
;
annotationMethodOrConstantRest
: annotationMethodRest
| annotationConstantRest
;
annotationMethodRest
: Identifier '(' ')' defaultValue?
;
annotationConstantRest
: variableDeclarators
;
defaultValue
: 'default' elementValue
;
// STATEMENTS / BLOCKS
block
: '{' blockStatement* '}'
;
blockStatement
: localVariableDeclarationStatement
| classOrInterfaceDeclaration
| statement
;
localVariableDeclarationStatement
: localVariableDeclaration ';'
;
localVariableDeclaration
: variableModifiers type variableDeclarators
;
variableModifiers
: variableModifier*
;
statement
: block
| ASSERT expression (':' expression)? ';'
| 'if' parExpression statement (options {k=1;}:'else' statement)?
| 'for' '(' forControl ')' statement
| 'while' parExpression statement
| 'do' statement 'while' parExpression ';'
| 'try' block
( catches 'finally' block
| catches
| 'finally' block
)
| 'switch' parExpression '{' switchBlockStatementGroups '}'
| 'synchronized' parExpression block
| 'return' expression? ';'
| 'throw' expression ';'
| 'break' Identifier? ';'
| 'continue' Identifier? ';'
| ';'
| statementExpression ';'
| Identifier ':' statement
;
catches
: catchClause (catchClause)*
;
catchClause
: 'catch' '(' formalParameter ')' block
;
formalParameter
: variableModifiers type variableDeclaratorId
;
switchBlockStatementGroups
: (switchBlockStatementGroup)*
;
/* The change here (switchLabel -> switchLabel+) technically makes this grammar
ambiguous; but with appropriately greedy parsing it yields the most
appropriate AST, one in which each group, except possibly the last one, has
labels and statements. */
switchBlockStatementGroup
: switchLabel+ blockStatement*
;
switchLabel
: 'case' constantExpression ':'
| 'case' enumConstantName ':'
| 'default' ':'
;
forControl
options {k=3;} // be efficient for common case: for (ID ID : ID) ...
: enhancedForControl
| forInit? ';' expression? ';' forUpdate?
;
forInit
: localVariableDeclaration
| expressionList
;
enhancedForControl
: variableModifiers type Identifier ':' expression
;
forUpdate
: expressionList
;
// EXPRESSIONS
parExpression
: '(' expression ')'
;
expressionList
: expression (',' expression)*
;
statementExpression
: expression
;
constantExpression
: expression
;
expression
: conditionalExpression (assignmentOperator expression)?
;
assignmentOperator
: '='
| '+='
| '-='
| '*='
| '/='
| '&='
| '|='
| '^='
| '%='
| '<' '<' '='
| '>' '>' '>' '='
| '>' '>' '='
/*
| ('<' '<' '=')=> t1='<' t2='<' t3='='
{ $t1.getLine() == $t2.getLine() &&
$t1.getCharPositionInLine() + 1 == $t2.getCharPositionInLine() &&
$t2.getLine() == $t3.getLine() &&
$t2.getCharPositionInLine() + 1 == $t3.getCharPositionInLine() }?
| ('>' '>' '>' '=')=> t1='>' t2='>' t3='>' t4='='
{ $t1.getLine() == $t2.getLine() &&
$t1.getCharPositionInLine() + 1 == $t2.getCharPositionInLine() &&
$t2.getLine() == $t3.getLine() &&
$t2.getCharPositionInLine() + 1 == $t3.getCharPositionInLine() &&
$t3.getLine() == $t4.getLine() &&
$t3.getCharPositionInLine() + 1 == $t4.getCharPositionInLine() }?
| ('>' '>' '=')=> t1='>' t2='>' t3='='
{ $t1.getLine() == $t2.getLine() &&
$t1.getCharPositionInLine() + 1 == $t2.getCharPositionInLine() &&
$t2.getLine() == $t3.getLine() &&
$t2.getCharPositionInLine() + 1 == $t3.getCharPositionInLine() }?
*/
;
conditionalExpression
: conditionalOrExpression ( '?' conditionalExpression ':' conditionalExpression )?
;
conditionalOrExpression
: conditionalAndExpression ( '||' conditionalAndExpression )*
;
conditionalAndExpression
: inclusiveOrExpression ( '&&' inclusiveOrExpression )*
;
inclusiveOrExpression
: exclusiveOrExpression ( '|' exclusiveOrExpression )*
;
exclusiveOrExpression
: andExpression ( '^' andExpression )*
;
andExpression
: equalityExpression ( '&' equalityExpression )*
;
equalityExpression
: instanceOfExpression ( ('==' | '!=') instanceOfExpression )*
;
instanceOfExpression
: relationalExpression ('instanceof' type)?
;
relationalExpression
: shiftExpression ( relationalOp shiftExpression )*
;
relationalOp
: '<' '='
| '>' '='
| '<'
| '>'
;
/*
relationalOp
: ('<' '=')=> t1='<' t2='='
{ $t1.getLine() == $t2.getLine() &&
$t1.getCharPositionInLine() + 1 == $t2.getCharPositionInLine() }?
| ('>' '=')=> t1='>' t2='='
{ $t1.getLine() == $t2.getLine() &&
$t1.getCharPositionInLine() + 1 == $t2.getCharPositionInLine() }?
| '<'
| '>'
;
*/
shiftExpression
: additiveExpression ( shiftOp additiveExpression )*
;
shiftOp
: '<' '<'
| '>' '>' '>'
| '>' '>'
;
/*
shiftOp
: ('<' '<')=> t1='<' t2='<'
{ $t1.getLine() == $t2.getLine() &&
$t1.getCharPositionInLine() + 1 == $t2.getCharPositionInLine() }?
| ('>' '>' '>')=> t1='>' t2='>' t3='>'
{ $t1.getLine() == $t2.getLine() &&
$t1.getCharPositionInLine() + 1 == $t2.getCharPositionInLine() &&
$t2.getLine() == $t3.getLine() &&
$t2.getCharPositionInLine() + 1 == $t3.getCharPositionInLine() }?
| ('>' '>')=> t1='>' t2='>'
{ $t1.getLine() == $t2.getLine() &&
$t1.getCharPositionInLine() + 1 == $t2.getCharPositionInLine() }?
;
*/
additiveExpression
: multiplicativeExpression ( ('+' | '-') multiplicativeExpression )*
;
multiplicativeExpression
: unaryExpression ( ( '*' | '/' | '%' ) unaryExpression )*
;
unaryExpression
: '+' unaryExpression
| '-' unaryExpression
| '++' unaryExpression
| '--' unaryExpression
| unaryExpressionNotPlusMinus
;
unaryExpressionNotPlusMinus
: '~' unaryExpression
| '!' unaryExpression
| castExpression
| primary selector* ('++'|'--')?
;
castExpression
: '(' primitiveType ')' unaryExpression
| '(' (type | expression) ')' unaryExpressionNotPlusMinus
;
primary
: parExpression
| 'this' ('.' Identifier)* identifierSuffix?
| 'super' superSuffix
| literal
| 'new' creator
| Identifier ('.' Identifier)* identifierSuffix?
| primitiveType ('[' ']')* '.' 'class'
| 'void' '.' 'class'
;
identifierSuffix
: ('[' ']')+ '.' 'class'
// | ('[' expression ']')+ // can also be matched by selector, but do here
| arguments
| '.' 'class'
| '.' explicitGenericInvocation
| '.' 'this'
| '.' 'super' arguments
| '.' 'new' innerCreator
;
creator
: nonWildcardTypeArguments createdName classCreatorRest
| createdName (arrayCreatorRest | classCreatorRest)
;
createdName
: classOrInterfaceType
| primitiveType
;
innerCreator
: nonWildcardTypeArguments? Identifier classCreatorRest
;
arrayCreatorRest
: '['
( ']' ('[' ']')* arrayInitializer
| expression ']' ('[' expression ']')* ('[' ']')*
)
;
classCreatorRest
: arguments classBody?
;
explicitGenericInvocation
: nonWildcardTypeArguments Identifier arguments
;
nonWildcardTypeArguments
: '<' typeList '>'
;
selector
: '.' Identifier arguments?
| '.' 'this'
| '.' 'super' superSuffix
| '.' 'new' innerCreator
| '[' expression ']'
;
superSuffix
: arguments
| '.' Identifier arguments?
;
arguments
: '(' expressionList? ')'
;

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,756 @@
parser grammar JavaParser;
options {backtrack=true; memoize=true; tokenVocab=JavaLexer;}
// starting point for parsing a java file
/* The annotations are separated out to make parsing faster, but must be associated with
a packageDeclaration or a typeDeclaration (and not an empty one). */
compilationUnit
: annotations
( packageDeclaration importDeclaration* typeDeclaration*
| classOrInterfaceDeclaration typeDeclaration*
)
| packageDeclaration? importDeclaration* typeDeclaration*
;
packageDeclaration
: 'package' qualifiedName ';'
;
importDeclaration
: 'import' 'static'? qualifiedName ('.' '*')? ';'
;
typeDeclaration
: classOrInterfaceDeclaration
| ';'
;
classOrInterfaceDeclaration
: classOrInterfaceModifiers (classDeclaration | interfaceDeclaration)
;
classOrInterfaceModifiers
: classOrInterfaceModifier*
;
classOrInterfaceModifier
: annotation // class or interface
| 'public' // class or interface
| 'protected' // class or interface
| 'private' // class or interface
| 'abstract' // class or interface
| 'static' // class or interface
| 'final' // class only -- does not apply to interfaces
| 'strictfp' // class or interface
;
modifiers
: modifier*
;
classDeclaration
: normalClassDeclaration
| enumDeclaration
;
normalClassDeclaration
: 'class' Identifier typeParameters?
('extends' type)?
('implements' typeList)?
classBody
;
typeParameters
: '<' typeParameter (',' typeParameter)* '>'
;
typeParameter
: Identifier ('extends' typeBound)?
;
typeBound
: type ('&' type)*
;
enumDeclaration
: ENUM Identifier ('implements' typeList)? enumBody
;
enumBody
: '{' enumConstants? ','? enumBodyDeclarations? '}'
;
enumConstants
: enumConstant (',' enumConstant)*
;
enumConstant
: annotations? Identifier arguments? classBody?
;
enumBodyDeclarations
: ';' (classBodyDeclaration)*
;
interfaceDeclaration
: normalInterfaceDeclaration
| annotationTypeDeclaration
;
normalInterfaceDeclaration
: 'interface' Identifier typeParameters? ('extends' typeList)? interfaceBody
;
typeList
: type (',' type)*
;
classBody
: '{' classBodyDeclaration* '}'
;
interfaceBody
: '{' interfaceBodyDeclaration* '}'
;
classBodyDeclaration
: ';'
| 'static'? block
| modifiers memberDecl
;
memberDecl
: genericMethodOrConstructorDecl
| memberDeclaration
| 'void' Identifier voidMethodDeclaratorRest
| Identifier constructorDeclaratorRest
| interfaceDeclaration
| classDeclaration
;
memberDeclaration
: type (methodDeclaration | fieldDeclaration)
;
genericMethodOrConstructorDecl
: typeParameters genericMethodOrConstructorRest
;
genericMethodOrConstructorRest
: (type | 'void') Identifier methodDeclaratorRest
| Identifier constructorDeclaratorRest
;
methodDeclaration
: Identifier methodDeclaratorRest
;
fieldDeclaration
: variableDeclarators ';'
;
interfaceBodyDeclaration
: modifiers interfaceMemberDecl
| ';'
;
interfaceMemberDecl
: interfaceMethodOrFieldDecl
| interfaceGenericMethodDecl
| 'void' Identifier voidInterfaceMethodDeclaratorRest
| interfaceDeclaration
| classDeclaration
;
interfaceMethodOrFieldDecl
: type Identifier interfaceMethodOrFieldRest
;
interfaceMethodOrFieldRest
: constantDeclaratorsRest ';'
| interfaceMethodDeclaratorRest
;
methodDeclaratorRest
: formalParameters ('[' ']')*
('throws' qualifiedNameList)?
( methodBody
| ';'
)
;
voidMethodDeclaratorRest
: formalParameters ('throws' qualifiedNameList)?
( methodBody
| ';'
)
;
interfaceMethodDeclaratorRest
: formalParameters ('[' ']')* ('throws' qualifiedNameList)? ';'
;
interfaceGenericMethodDecl
: typeParameters (type | 'void') Identifier
interfaceMethodDeclaratorRest
;
voidInterfaceMethodDeclaratorRest
: formalParameters ('throws' qualifiedNameList)? ';'
;
constructorDeclaratorRest
: formalParameters ('throws' qualifiedNameList)? constructorBody
;
constantDeclarator
: Identifier constantDeclaratorRest
;
variableDeclarators
: variableDeclarator (',' variableDeclarator)*
;
variableDeclarator
: variableDeclaratorId ('=' variableInitializer)?
;
constantDeclaratorsRest
: constantDeclaratorRest (',' constantDeclarator)*
;
constantDeclaratorRest
: ('[' ']')* '=' variableInitializer
;
variableDeclaratorId
: Identifier ('[' ']')*
;
variableInitializer
: arrayInitializer
| expression
;
arrayInitializer
: '{' (variableInitializer (',' variableInitializer)* (',')? )? '}'
;
modifier
: annotation
| 'public'
| 'protected'
| 'private'
| 'static'
| 'abstract'
| 'final'
| 'native'
| 'synchronized'
| 'transient'
| 'volatile'
| 'strictfp'
;
packageOrTypeName
: qualifiedName
;
enumConstantName
: Identifier
;
typeName
: qualifiedName
;
type
: classOrInterfaceType ('[' ']')*
| primitiveType ('[' ']')*
;
classOrInterfaceType
: Identifier typeArguments? ('.' Identifier typeArguments? )*
;
primitiveType
: 'boolean'
| 'char'
| 'byte'
| 'short'
| 'int'
| 'long'
| 'float'
| 'double'
;
variableModifier
: 'final'
| annotation
;
typeArguments
: '<' typeArgument (',' typeArgument)* '>'
;
typeArgument
: type
| '?' (('extends' | 'super') type)?
;
qualifiedNameList
: qualifiedName (',' qualifiedName)*
;
formalParameters
: '(' formalParameterDecls? ')'
;
formalParameterDecls
: variableModifiers type formalParameterDeclsRest
;
formalParameterDeclsRest
: variableDeclaratorId (',' formalParameterDecls)?
| '...' variableDeclaratorId
;
methodBody
: block
;
constructorBody
: '{' explicitConstructorInvocation? blockStatement* '}'
;
explicitConstructorInvocation
: nonWildcardTypeArguments? ('this' | 'super') arguments ';'
| primary '.' nonWildcardTypeArguments? 'super' arguments ';'
;
qualifiedName
: Identifier ('.' Identifier)*
;
literal
: integerLiteral
| FloatingPointLiteral
| CharacterLiteral
| StringLiteral
| booleanLiteral
| 'null'
;
integerLiteral
: HexLiteral
| OctalLiteral
| DecimalLiteral
;
booleanLiteral
: 'true'
| 'false'
;
// ANNOTATIONS
annotations
: annotation+
;
annotation
: '@' annotationName ( '(' ( elementValuePairs | elementValue )? ')' )?
;
annotationName
: Identifier ('.' Identifier)*
;
elementValuePairs
: elementValuePair (',' elementValuePair)*
;
elementValuePair
: Identifier '=' elementValue
;
elementValue
: conditionalExpression
| annotation
| elementValueArrayInitializer
;
elementValueArrayInitializer
: '{' (elementValue (',' elementValue)*)? (',')? '}'
;
annotationTypeDeclaration
: '@' 'interface' Identifier annotationTypeBody
;
annotationTypeBody
: '{' (annotationTypeElementDeclaration)* '}'
;
annotationTypeElementDeclaration
: modifiers annotationTypeElementRest
;
annotationTypeElementRest
: type annotationMethodOrConstantRest ';'
| normalClassDeclaration ';'?
| normalInterfaceDeclaration ';'?
| enumDeclaration ';'?
| annotationTypeDeclaration ';'?
;
annotationMethodOrConstantRest
: annotationMethodRest
| annotationConstantRest
;
annotationMethodRest
: Identifier '(' ')' defaultValue?
;
annotationConstantRest
: variableDeclarators
;
defaultValue
: 'default' elementValue
;
// STATEMENTS / BLOCKS
block
: '{' blockStatement* '}'
;
blockStatement
: localVariableDeclarationStatement
| classOrInterfaceDeclaration
| statement
;
localVariableDeclarationStatement
: localVariableDeclaration ';'
;
localVariableDeclaration
: variableModifiers type variableDeclarators
;
variableModifiers
: variableModifier*
;
statement
: block
| ASSERT expression (':' expression)? ';'
| 'if' parExpression statement (options {k=1;}:'else' statement)?
| 'for' '(' forControl ')' statement
| 'while' parExpression statement
| 'do' statement 'while' parExpression ';'
| 'try' block
( catches 'finally' block
| catches
| 'finally' block
)
| 'switch' parExpression '{' switchBlockStatementGroups '}'
| 'synchronized' parExpression block
| 'return' expression? ';'
| 'throw' expression ';'
| 'break' Identifier? ';'
| 'continue' Identifier? ';'
| ';'
| statementExpression ';'
| Identifier ':' statement
;
catches
: catchClause (catchClause)*
;
catchClause
: 'catch' '(' formalParameter ')' block
;
formalParameter
: variableModifiers type variableDeclaratorId
;
switchBlockStatementGroups
: (switchBlockStatementGroup)*
;
/* The change here (switchLabel -> switchLabel+) technically makes this grammar
ambiguous; but with appropriately greedy parsing it yields the most
appropriate AST, one in which each group, except possibly the last one, has
labels and statements. */
switchBlockStatementGroup
: switchLabel+ blockStatement*
;
switchLabel
: 'case' constantExpression ':'
| 'case' enumConstantName ':'
| 'default' ':'
;
forControl
options {k=3;} // be efficient for common case: for (ID ID : ID) ...
: enhancedForControl
| forInit? ';' expression? ';' forUpdate?
;
forInit
: localVariableDeclaration
| expressionList
;
enhancedForControl
: variableModifiers type Identifier ':' expression
;
forUpdate
: expressionList
;
// EXPRESSIONS
parExpression
: '(' expression ')'
;
expressionList
: expression (',' expression)*
;
statementExpression
: expression
;
constantExpression
: expression
;
expression
: conditionalExpression (assignmentOperator expression)?
;
assignmentOperator
: '='
| '+='
| '-='
| '*='
| '/='
| '&='
| '|='
| '^='
| '%='
| '<' '<' '='
| '>' '>' '>' '='
| '>' '>' '='
/*
| ('<' '<' '=')=> t1='<' t2='<' t3='='
{ $t1.getLine() == $t2.getLine() &&
$t1.getCharPositionInLine() + 1 == $t2.getCharPositionInLine() &&
$t2.getLine() == $t3.getLine() &&
$t2.getCharPositionInLine() + 1 == $t3.getCharPositionInLine() }?
| ('>' '>' '>' '=')=> t1='>' t2='>' t3='>' t4='='
{ $t1.getLine() == $t2.getLine() &&
$t1.getCharPositionInLine() + 1 == $t2.getCharPositionInLine() &&
$t2.getLine() == $t3.getLine() &&
$t2.getCharPositionInLine() + 1 == $t3.getCharPositionInLine() &&
$t3.getLine() == $t4.getLine() &&
$t3.getCharPositionInLine() + 1 == $t4.getCharPositionInLine() }?
| ('>' '>' '=')=> t1='>' t2='>' t3='='
{ $t1.getLine() == $t2.getLine() &&
$t1.getCharPositionInLine() + 1 == $t2.getCharPositionInLine() &&
$t2.getLine() == $t3.getLine() &&
$t2.getCharPositionInLine() + 1 == $t3.getCharPositionInLine() }?
*/
;
conditionalExpression
: conditionalOrExpression ( '?' conditionalExpression ':' conditionalExpression )?
;
conditionalOrExpression
: conditionalAndExpression ( '||' conditionalAndExpression )*
;
conditionalAndExpression
: inclusiveOrExpression ( '&&' inclusiveOrExpression )*
;
inclusiveOrExpression
: exclusiveOrExpression ( '|' exclusiveOrExpression )*
;
exclusiveOrExpression
: andExpression ( '^' andExpression )*
;
andExpression
: equalityExpression ( '&' equalityExpression )*
;
equalityExpression
: instanceOfExpression ( ('==' | '!=') instanceOfExpression )*
;
instanceOfExpression
: relationalExpression ('instanceof' type)?
;
relationalExpression
: shiftExpression ( relationalOp shiftExpression )*
;
relationalOp
: '<' '='
| '>' '='
| '<'
| '>'
;
/*
relationalOp
: ('<' '=')=> t1='<' t2='='
{ $t1.getLine() == $t2.getLine() &&
$t1.getCharPositionInLine() + 1 == $t2.getCharPositionInLine() }?
| ('>' '=')=> t1='>' t2='='
{ $t1.getLine() == $t2.getLine() &&
$t1.getCharPositionInLine() + 1 == $t2.getCharPositionInLine() }?
| '<'
| '>'
;
*/
shiftExpression
: additiveExpression ( shiftOp additiveExpression )*
;
shiftOp
: '<' '<'
| '>' '>' '>'
| '>' '>'
;
/*
shiftOp
: ('<' '<')=> t1='<' t2='<'
{ $t1.getLine() == $t2.getLine() &&
$t1.getCharPositionInLine() + 1 == $t2.getCharPositionInLine() }?
| ('>' '>' '>')=> t1='>' t2='>' t3='>'
{ $t1.getLine() == $t2.getLine() &&
$t1.getCharPositionInLine() + 1 == $t2.getCharPositionInLine() &&
$t2.getLine() == $t3.getLine() &&
$t2.getCharPositionInLine() + 1 == $t3.getCharPositionInLine() }?
| ('>' '>')=> t1='>' t2='>'
{ $t1.getLine() == $t2.getLine() &&
$t1.getCharPositionInLine() + 1 == $t2.getCharPositionInLine() }?
;
*/
additiveExpression
: multiplicativeExpression ( ('+' | '-') multiplicativeExpression )*
;
multiplicativeExpression
: unaryExpression ( ( '*' | '/' | '%' ) unaryExpression )*
;
unaryExpression
: '+' unaryExpression
| '-' unaryExpression
| '++' unaryExpression
| '--' unaryExpression
| unaryExpressionNotPlusMinus
;
unaryExpressionNotPlusMinus
: '~' unaryExpression
| '!' unaryExpression
| castExpression
| primary selector* ('++'|'--')?
;
castExpression
: '(' primitiveType ')' unaryExpression
| '(' (type | expression) ')' unaryExpressionNotPlusMinus
;
primary
: parExpression
| 'this' ('.' Identifier)* identifierSuffix?
| 'super' superSuffix
| literal
| 'new' creator
| Identifier ('.' Identifier)* identifierSuffix?
| primitiveType ('[' ']')* '.' 'class'
| 'void' '.' 'class'
;
identifierSuffix
: ('[' ']')+ '.' 'class'
| ('[' expression ']')+ // can also be matched by selector, but do here
| arguments
| '.' 'class'
| '.' explicitGenericInvocation
| '.' 'this'
| '.' 'super' arguments
| '.' 'new' innerCreator
;
creator
: nonWildcardTypeArguments createdName classCreatorRest
| createdName (arrayCreatorRest | classCreatorRest)
;
createdName
: classOrInterfaceType
| primitiveType
;
innerCreator
: nonWildcardTypeArguments? Identifier classCreatorRest
;
arrayCreatorRest
: '['
( ']' ('[' ']')* arrayInitializer
| expression ']' ('[' expression ']')* ('[' ']')*
)
;
classCreatorRest
: arguments classBody?
;
explicitGenericInvocation
: nonWildcardTypeArguments Identifier arguments
;
nonWildcardTypeArguments
: '<' typeList '>'
;
selector
: '.' Identifier arguments?
| '.' 'this'
| '.' 'super' superSuffix
| '.' 'new' innerCreator
| '[' expression ']'
;
superSuffix
: arguments
| '.' Identifier arguments?
;
arguments
: '(' expressionList? ')'
;

7
tool/playground/L.g Normal file
View File

@ -0,0 +1,7 @@
lexer grammar L;
WS : ' '+ {skip();} ;
StringLiteral
: '"' ( ~('\\'|'"') )* '"'
;

14
tool/playground/LL1.g Normal file
View File

@ -0,0 +1,14 @@
grammar LL1;
b : B | C ;
c1 : A? B ;
c2 : (B|C)? D ;
d1 : A* B ;
d2 : ({true}? B | {true}? A)* D {System.out.println("works!");} ;
e1 : A+ B ;
e2 : (B|A)+ D ;

11
tool/playground/T.g Normal file
View File

@ -0,0 +1,11 @@
lexer grammar T;
STRING_START : '"' {pushMode(STRING_MODE); more();} ;
WS : ' ' | '\n' {skip();} ;
mode STRING_MODE;
STRING : '"' {popMode();} ;
ANY : . {more();} ;

79
tool/playground/T.java Normal file
View File

@ -0,0 +1,79 @@
/*
* @(#)SerializationTester.java 1.5 03/12/19
*
* Copyright 2004 Sun Microsystems, Inc. All rights reserved.
* SUN PROPRIETARY/CONFIDENTIAL. Use is subject to license terms.
*/
import org.antlr.v4.runtime.Lexer;
import org.antlr.v4.runtime.CharStream;
import org.antlr.v4.runtime.LexerSharedState;
import org.antlr.v4.runtime.Token;
import org.antlr.v4.runtime.TokenStream;
import org.antlr.v4.runtime.*;
import org.antlr.v4.runtime.atn.*;
import org.antlr.v4.runtime.dfa.DFA;
import org.antlr.v4.runtime.misc.*;
import org.antlr.runtime.*;
public class T extends Lexer {
public static final int
EOR=1, STRING_START=4, WS=5, STRING=6, ANY=7;
public static final int DEFAULT_MODE = 0;
public static final int STRING_MODE = 1;
public static final String[] tokenNames = {
"<INVALID>", "<INVALID>", "<INVALID>",
"EOR", "STRING_START", "WS", "STRING", "ANY"
};
public static final String[] ruleNames = {
"<INVALID>",
"STRING_START", "WS", "STRING", "ANY"
};
public T(CharStream input) {
this(input, new LexerSharedState());
}
public T(CharStream input, LexerSharedState state) {
super(input,state);
_interp = new LexerInterpreter(this,_ATN);
}
public String getGrammarFileName() { return "T.java"; }
@Override
public String[] getTokenNames() { return tokenNames; }
@Override
public String[] getRuleNames() { return ruleNames; }
@Override
public ATN getATN() { return _ATN; }
public void action(int ruleIndex, int actionIndex) {
switch ( actionIndex ) {
case 1 : pushMode(STRING_MODE); more(); break;
case 2 : skip(); break;
case 3 : popMode(); break;
case 4 : more(); break;
}
}
public static final String _serializedATN =
"\030\012\032\06\00\06\00\02\01\07\01\02\02\07\02\02\03\07\03\02\04"+
"\07\04\01\01\01\01\01\01\01\02\01\02\01\02\01\02\01\02\03\02\010\02"+
"\01\03\01\03\01\03\01\04\01\04\01\04\04\02\04\01\04\05\02\06\06\03"+
"\010\07\04\02\00\01\00\031\00\02\01\00\00\00\04\01\00\00\01\06\01"+
"\00\00\01\010\01\00\00\02\012\01\00\00\04\022\01\00\00\06\024\01\00"+
"\00\010\027\01\00\00\012\013\05\042\00\013\014\01\00\00\014\03\01"+
"\00\00\015\016\05\040\00\016\023\01\00\00\017\020\05\012\00\020\021"+
"\01\00\00\021\023\01\00\00\022\015\01\00\00\022\017\01\00\00\023\05"+
"\01\00\00\024\025\05\042\00\025\026\01\00\00\026\07\01\00\00\027\030"+
"\013\00\00\030\031\01\00\00\031\011\01\00\00\03\00\01\022";
public static final ATN _ATN =
ATNInterpreter.deserialize(_serializedATN.toCharArray());
static {
org.antlr.v4.tool.DOTGenerator dot = new org.antlr.v4.tool.DOTGenerator(null);
//System.out.println(dot.getDOT(_ATN.decisionToATNState.get(0)));
}
}

120
tool/playground/Test.java Normal file
View File

@ -0,0 +1,120 @@
import org.antlr.v4.Tool;
import org.antlr.v4.automata.ParserATNFactory;
import org.antlr.v4.runtime.CommonToken;
import org.antlr.v4.runtime.Token;
import org.antlr.v4.runtime.TokenSource;
import org.antlr.v4.runtime.TokenStream;
import org.antlr.v4.runtime.atn.ATN;
import org.antlr.v4.semantics.SemanticPipeline;
import org.antlr.v4.tool.DOTGenerator;
import org.antlr.v4.tool.Grammar;
import java.util.List;
public class Test {
public static void main(String[] args) throws Exception {
// T t = new T(new ANTLRFileStream(args[0]));
// CommonTokenStream tokens = new CommonTokenStream(t);
// tokens.fill();
// for (Object tok : tokens.getTokens()) {
// System.out.println(tok);
// }
}
public static void dump() throws Exception {
Grammar g = new Grammar(
"grammar T;\n" +
"\n" +
"a : A | b ;\n" +
"\n" +
"b : B C | B D ;\n" +
"\n" +
"c : (B C)? B D ;\n" +
"\n" +
"d : (B C|B A)* B D ;\n" +
"\n" +
"e : (B C|B A)+ B D ;"
);
if ( g.ast!=null && !g.ast.hasErrors ) {
System.out.println(g.ast.toStringTree());
Tool antlr = new Tool();
SemanticPipeline sem = new SemanticPipeline(g);
sem.process();
if ( g.getImportedGrammars()!=null ) { // process imported grammars (if any)
for (Grammar imp : g.getImportedGrammars()) {
antlr.process(imp);
}
}
}
ParserATNFactory f = new ParserATNFactory(g);
ATN atn = f.createATN();
DOTGenerator dot = new DOTGenerator(g);
System.out.println(dot.getDOT(atn.ruleToStartState.get(g.getRule("d"))));
}
public static class IntTokenStream implements TokenStream {
List<Integer> types;
int p=0;
public IntTokenStream(List<Integer> types) { this.types = types; }
public void consume() { p++; }
public int LA(int i) { return LT(i).getType(); }
public int mark() {
return index();
}
public int index() { return p; }
public void rewind(int marker) {
}
public void rewind() {
}
public void release(int marker) {
seek(marker);
}
public void seek(int index) {
p = index;
}
public int size() {
return types.size();
}
public String getSourceName() {
return null;
}
public Token LT(int i) {
if ( (p+i-1)>=types.size() ) return new CommonToken(-1);
return new CommonToken(types.get(p+i-1));
}
public int range() {
return 0;
}
public Token get(int i) {
return new CommonToken(types.get(i));
}
public TokenSource getTokenSource() {
return null;
}
public String toString(int start, int stop) {
return null;
}
public String toString(Token start, Token stop) {
return null;
}
}
}

View File

@ -0,0 +1,117 @@
import org.antlr.runtime.debug.BlankDebugEventListener;
import org.antlr.v4.runtime.ANTLRFileStream;
import org.antlr.v4.runtime.CommonTokenStream;
import org.antlr.v4.runtime.atn.LexerInterpreter;
import org.antlr.v4.runtime.atn.ParserInterpreter;
import java.io.File;
/** Parse a java file or directory of java files using the generated parser
* ANTLR builds from java.g
*/
class TestJava {
public static long lexerTime = 0;
public static boolean profile = false;
public static JavaLexer lexer;
public static JavaParser parser = null;
public static void main(String[] args) {
try {
long start = System.currentTimeMillis();
if (args.length > 0 ) {
// for each directory/file specified on the command line
for(int i=0; i< args.length;i++) {
doFile(new File(args[i])); // parse it
}
}
else {
System.err.println("Usage: java Main <directory or file name>");
}
long stop = System.currentTimeMillis();
System.out.println("Lexer total time " + lexerTime + "ms.");
System.out.println("Total time " + (stop - start) + "ms.");
System.out.println("finished parsing OK");
System.out.println(LexerInterpreter.ATN_failover+" lexer failovers");
System.out.println(LexerInterpreter.match_calls+" lexer match calls");
System.out.println(ParserInterpreter.ATN_failover+" parser failovers");
System.out.println(ParserInterpreter.predict_calls +" parser predict calls");
if ( profile ) {
System.out.println("num decisions "+profiler.numDecisions);
}
}
catch(Exception e) {
System.err.println("exception: "+e);
e.printStackTrace(System.err); // so we can get stack trace
}
}
// This method decides what action to take based on the type of
// file we are looking at
public static void doFile(File f)
throws Exception {
// If this is a directory, walk each file/dir in that directory
if (f.isDirectory()) {
String files[] = f.list();
for(int i=0; i < files.length; i++)
doFile(new File(f, files[i]));
}
// otherwise, if this is a java file, parse it!
else if ( ((f.getName().length()>5) &&
f.getName().substring(f.getName().length()-5).equals(".java"))
|| f.getName().equals("input") )
{
System.err.println("parsing "+f.getAbsolutePath());
parseFile(f.getAbsolutePath());
}
}
static class CountDecisions extends BlankDebugEventListener {
public int numDecisions = 0;
public void enterDecision(int decisionNumber) {
numDecisions++;
}
}
static CountDecisions profiler = new CountDecisions();
// Here's where we do the real work...
public static void parseFile(String f)
throws Exception {
try {
// Create a scanner that reads from the input stream passed to us
if ( lexer==null ) {
lexer = new JavaLexer(null);
}
lexer.setCharStream(new ANTLRFileStream(f));
CommonTokenStream tokens = new CommonTokenStream(lexer);
long start = System.currentTimeMillis();
tokens.fill();
// System.out.println(tokens.getTokens());
long stop = System.currentTimeMillis();
lexerTime += stop-start;
// for (Object t : tokens.getTokens()) {
// System.out.println(t);
// }
if ( true ) {
// Create a parser that reads from the scanner
if ( parser==null ) {
parser = new JavaParser(tokens);
}
parser.setTokenStream(tokens);
// start parsing at the compilationUnit rule
parser.compilationUnit(null);
//System.err.println("finished "+f);
}
}
catch (Exception e) {
System.err.println("parser exception: "+e);
e.printStackTrace(); // so we can get stack trace
}
}
}

View File

@ -0,0 +1,8 @@
public class TestL {
public static void main(String[] args) throws Exception {
// L lexer = new L(new ANTLRFileStream(args[0]));
// CommonTokenStream tokens = new CommonTokenStream(lexer);
// tokens.fill();
// System.out.println(tokens.getTokens());
}
}

View File

@ -0,0 +1,117 @@
import org.antlr.runtime.debug.BlankDebugEventListener;
import org.antlr.v4.runtime.ANTLRFileStream;
import org.antlr.v4.runtime.CommonTokenStream;
import org.antlr.v4.runtime.atn.LexerInterpreter;
import org.antlr.v4.runtime.atn.ParserInterpreter;
import java.io.File;
/** Parse a java file or directory of java files using the generated parser
* ANTLR builds from java.g
*/
class TestYang {
public static long lexerTime = 0;
public static boolean profile = false;
public static YangJavaLexer lexer;
public static YangJavaParser parser = null;
public static void main(String[] args) {
try {
long start = System.currentTimeMillis();
if (args.length > 0 ) {
// for each directory/file specified on the command line
for(int i=0; i< args.length;i++) {
doFile(new File(args[i])); // parse it
}
}
else {
System.err.println("Usage: java Main <directory or file name>");
}
long stop = System.currentTimeMillis();
System.out.println("Lexer total time " + lexerTime + "ms.");
System.out.println("Total time " + (stop - start) + "ms.");
System.out.println("finished parsing OK");
System.out.println(LexerInterpreter.ATN_failover+" lexer failovers");
System.out.println(LexerInterpreter.match_calls+" lexer match calls");
System.out.println(ParserInterpreter.ATN_failover+" parser failovers");
System.out.println(ParserInterpreter.predict_calls +" parser predict calls");
if ( profile ) {
System.out.println("num decisions "+profiler.numDecisions);
}
}
catch(Exception e) {
System.err.println("exception: "+e);
e.printStackTrace(System.err); // so we can get stack trace
}
}
// This method decides what action to take based on the type of
// file we are looking at
public static void doFile(File f)
throws Exception {
// If this is a directory, walk each file/dir in that directory
if (f.isDirectory()) {
String files[] = f.list();
for(int i=0; i < files.length; i++)
doFile(new File(f, files[i]));
}
// otherwise, if this is a java file, parse it!
else if ( ((f.getName().length()>5) &&
f.getName().substring(f.getName().length()-5).equals(".java"))
|| f.getName().equals("input") )
{
System.err.println("parsing "+f.getAbsolutePath());
parseFile(f.getAbsolutePath());
}
}
static class CountDecisions extends BlankDebugEventListener {
public int numDecisions = 0;
public void enterDecision(int decisionNumber) {
numDecisions++;
}
}
static CountDecisions profiler = new CountDecisions();
// Here's where we do the real work...
public static void parseFile(String f)
throws Exception {
try {
// Create a scanner that reads from the input stream passed to us
if ( lexer==null ) {
lexer = new YangJavaLexer(null);
}
lexer.setCharStream(new ANTLRFileStream(f));
CommonTokenStream tokens = new CommonTokenStream(lexer);
long start = System.currentTimeMillis();
tokens.fill();
// System.out.println(tokens.getTokens());
long stop = System.currentTimeMillis();
lexerTime += stop-start;
for (Object t : tokens.getTokens()) {
System.out.println(t);
}
if ( true ) {
// Create a parser that reads from the scanner
if ( parser==null ) {
parser = new YangJavaParser(tokens);
}
parser.setTokenStream(tokens);
// start parsing at the compilationUnit rule
parser.compilationUnit(null);
//System.err.println("finished "+f);
}
}
catch (Exception e) {
System.err.println("parser exception: "+e);
e.printStackTrace(); // so we can get stack trace
}
}
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1 @@
int int int

View File

@ -0,0 +1,491 @@
javaTypeInitMap ::= [
"int":"0",
"long":"0",
"float":"0.0f",
"double":"0.0",
"boolean":"false",
"byte":"0",
"short":"0",
"char":"0",
default:"null" // anything other than an atomic type
]
// args must be <object-model-object>, <fields-resulting-in-STs>
ParserFile(file, parser, namedActions) ::= <<
// $ANTLR ANTLRVersion> <file.fileName> generatedTimestamp>
<namedActions.header>
import org.antlr.v4.runtime.NoViableAltException;
import org.antlr.v4.runtime.Parser;
import org.antlr.v4.runtime.CharStream;
import org.antlr.v4.runtime.EarlyExitException;
import org.antlr.v4.runtime.ParserSharedState;
import org.antlr.v4.runtime.RecognitionException;
import org.antlr.v4.runtime.FailedPredicateException;
import org.antlr.v4.runtime.Token;
import org.antlr.v4.runtime.TokenStream;
import org.antlr.v4.runtime.atn.*;
import org.antlr.v4.runtime.dfa.DFA;
import org.antlr.v4.runtime.*;
import org.antlr.v4.runtime.misc.*;
import org.antlr.runtime.*;
<parser>
>>
Parser(parser, scopes, funcs, atn, actions, sempreds) ::= <<
public class <parser.name> extends Parser {
public static final int
<parser.tokens:{k | <k>=<parser.tokens.(k)>}; separator=", ", wrap, anchor>;
public static final String[] tokenNames = {
"\<INVALID>", "\<INVALID>", "\<INVALID>",
<parser.tokenNames:{k | "<k>"}; separator=", ", wrap, anchor>
};
public static final String[] ruleNames = {
"\<INVALID>",
<parser.ruleNames:{r | "<r>"}; separator=", ", wrap, anchor>
};
<scopes>
<namedActions.members>
<parser:ctor()>
<funcs; separator="\n">
@Override
public String[] getTokenNames() { return tokenNames; }
@Override
public String[] getRuleNames() { return ruleNames; }
@Override
public ATN getATN() { return _ATN; }
<dumpActions(actions,sempreds)>
<atn>
}
>>
dumpActions(actions,sempreds) ::= <<
<if(sempreds)>
public boolean sempred(int ruleIndex, int predIndex) {
switch ( predIndex ) {
<sempreds:{index|
case <index> : return <sempreds.(index)>;}; separator="\n">
}
return true;
}
<endif>
<if(actions)>
public void action(int ruleIndex, int actionIndex) {
switch ( actionIndex ) {
<actions:{index|
case <index> : <actions.(index)> break;}; separator="\n">
}
}
<endif>
>>
ctor(p) ::= <<
public <p.name>(TokenStream input) {
this(input, new ParserSharedState());
}
public <p.name>(TokenStream input, ParserSharedState state) {
super(input, state);
_interp = new ParserInterpreter(this,_ATN);
}
>>
/*
// S.g:5:1: b returns [String q, float x] : A ;
public final S.b_return b() throws RecognitionException {
b_stack.push(new b_scope());
S.b_return retval = new S.b_return();
*/
RuleFunction(currentRule,code,decls,context,scope,namedActions,finallyAction) ::= <<
<context>
<scope>
<if(currentRule.modifiers)><currentRule.modifiers:{f | <f> }><else>public final <endif><currentRule.ctxType> <currentRule.name>(ParserRuleContext _ctx) throws RecognitionException {
_ctx = new ParserRuleContext(_ctx, <currentRule.startState>);
state.ctx = _ctx;
<if(currentRule.scope)>
<currentRule.scope.name>_stack.push(new <currentRule.scope.name>());
<endif>
//System.out.println("enter "+ruleNames[<currentRule.index>]);
<currentRule.globalScopesUsed:{s | <s>_stack.push(new <s>());}; separator="\n">
<namedActions.init>
<decls; separator="\n">
try {
<code>
}
catch (RecognitionException re) {
reportError(re);
recover();
}
finally {
<namedActions.after>
<currentRule.globalScopesUsed:{s | <s>_stack.pop();}; separator="\n">
<if(currentRule.scope)><currentRule.scope.name>_stack.pop();<endif>
<finallyAction>
state.ctx = (ParserRuleContext)_ctx.parent;
//System.out.println("exit "+ruleNames[<currentRule.index>]);
}
return _ctx;
}
>>
/** Convenience method to call from outside */
StartRuleFunction(f) ::= <<
<if(f.modifiers)><f.modifiers:{f | <f> }><else>public final <endif><f.ctxType> <f.name>(<f.args; separator=", ">) throws RecognitionException {
return <f.name>(new <f.ctxType>(<f.args:{a | <a.name>, }>LABitSet.EOF_SET));
}
>>
CodeBlock(c, ops) ::= <<
<ops; separator="\n">
>>
LL1AltBlock(choice, alts, error) ::= <<
switch ( state.input.LA(1) ) {
<choice.altLook,alts:{look,alt| <cases(ttypes=look)>
<alt>
break;}; separator="\n">
default :
<error>
}
>>
LL1OptionalBlock(choice, alts, error) ::= <<
switch ( state.input.LA(1) ) {
<choice.altLook,alts:{look,alt| <cases(ttypes=look)>
<alt>
break;}; separator="\n">
default :
<error>
}
>>
LL1OptionalBlockSingleAlt(choice, expr, alts, preamble, error, followExpr) ::= <<
<preamble; separator="\n">
if ( <expr> ) {
<alts; separator="\n">
}
<!else if ( !(<followExpr>) ) <error>!>
>>
LL1StarBlock(choice, alts, sync) ::= <<
<choice.loopLabel>:
while (true) {
switch ( state.input.LA(1) ) {
<choice.altLook,alts:{look,alt| <cases(look)>
<alt>
break;}; separator="\n">
<cases(choice.exitLook)>
break <choice.loopLabel>;
}
//<sync>
}
>>
LL1StarBlockSingleAlt(choice, loopExpr, alts, preamble, iteration, sync) ::= <<
<preamble; separator="\n">
while ( <loopExpr> ) {
<alts; separator="\n">
<iteration>
//<sync>
}
>>
LL1PlusBlock(choice, alts, iteration, loopExpr, sync, error, iterationSync) ::= <<
//<sync>
do {
switch ( state.input.LA(1) ) {
<choice.altLook,alts:{look,alt| <cases(look)>
<alt>
break;}; separator="\n">
default :
<error>
}
<iteration>
//<iterationSync>
} while ( <loopExpr> );
>>
LL1PlusBlockSingleAlt(choice, loopExpr, alts, preamble, iteration,
sync, iterationSync) ::=
<<
//<sync>
<preamble; separator="\n">
do {
<alts; separator="\n">
<iteration>
// <iterationSync>
} while ( <loopExpr> );
>>
// LL(*) stuff
AltBlock(choice, alts, error) ::= <<
switch ( _interp.adaptivePredict(state.input,<choice.decision>,_ctx) ) {
<alts:{alt |
case <i>:
<alt>
break;}; separator="\n">
default :
<error>
}
>>
OptionalBlock(choice, alts, error) ::= <<
switch ( _interp.adaptivePredict(state.input,<choice.decision>,_ctx) ) {
<alts:{alt |
case <i>:
<alt>
break;}; separator="\n">
}
>>
StarBlock(choice, alts, sync) ::= <<
int _alt<choice.uniqueID> = _interp.adaptivePredict(state.input,<choice.decision>,_ctx);
while ( _alt<choice.uniqueID>!=<choice.exitAlt> ) {
switch ( _alt<choice.uniqueID> ) {
<alts:{alt|
case <i>:
<alt>
break;}; separator="\n">
}
_alt<choice.uniqueID> = _interp.adaptivePredict(state.input,<choice.decision>,_ctx);
}
>>
PlusBlock(choice, alts, error) ::= <<
int _alt<choice.uniqueID> = _interp.adaptivePredict(state.input,<choice.decision>,_ctx);
do {
switch ( _alt<choice.uniqueID> ) {
<alts:{alt|
case <i>:
<alt>
break;}; separator="\n">
default :
<error>
}
_alt<choice.uniqueID> = _interp.adaptivePredict(state.input,<choice.decision>,_ctx);
} while ( _alt<choice.uniqueID>!=<choice.exitAlt> );
>>
Sync(s) ::= "sync(<s.expecting.name>);"
ThrowNoViableAlt(t) ::= "throw new NoViableAltException(this,_ctx);"
TestSet(s) ::= <<
_ctx.s = <s.stateNumber>;
<s.set.name>.member(state.input.LA(1))
>>
TestSetInline(s) ::= <<
<s.ttypes:{ttype | <s.varName>==<ttype>}; separator=" || ">
>>
cases(ttypes) ::= <<
<ttypes:{t | case <t>:}; separator="\n">
>>
InvokeRule(r) ::= <<
_ctx.s = <r.stateNumber>;
<if(r.labels)><r.labels:{l | <l> = }><endif><r.name>(_ctx<r.argExprs:{e| ,<e>}>);
>>
MatchToken(m) ::= <<
_ctx.s = <m.stateNumber>;
<if(m.labels)><m.labels:{l | <l> = }>(Token)<endif>match(<m.name>);
>>
// ACTION STUFF
Action(a, chunks) ::= "<chunks>"
ForcedAction(a, chunks) ::= "<chunks>"
SemPred(p, chunks) ::= <<
if (!(<chunks>)) throw new FailedPredicateException(this, state.input, "<currentRule.name>", ""<!"<chunks>"!>);
>>
ActionText(t) ::= "<t.text>"
ArgRef(a) ::= "_ctx.<a.name>"
RetValueRef(a) ::= "_ctx.<a.name>"
QRetValueRef(a) ::= "<a.dict>.<a.name>"
/** How to translate $tokenLabel */
TokenRef(t) ::= "<t.name>"
SetAttr(s,rhsChunks) ::= "_ctx.<s.name> = <rhsChunks>;"
SetQAttr(s,rhsChunks) ::= "<s.dict>.<s.name> = <rhsChunks>;"
TokenPropertyRef_text(t) ::= "(<t.label>!=null?<t.label>.getText():null)"
TokenPropertyRef_type(t) ::= "(<t.label>!=null?<t.label>.getType():0)"
TokenPropertyRef_line(t) ::= "(<t.label>!=null?<t.label>.getLine():0)"
TokenPropertyRef_pos(t) ::= "(<t.label>!=null?<t.label>.getCharPositionInLine():0)"
TokenPropertyRef_channel(t) ::= "(<t.label>!=null?<t.label>.getChannel():0)"
TokenPropertyRef_index(t) ::= "(<t.label>!=null?<t.label>.getTokenIndex():0)"
TokenPropertyRef_tree(t) ::= "<t.label>_tree"
TokenPropertyRef_int(t) ::= "(<t.label>!=null?Integer.valueOf(<t.label>.getText()):0)"
RulePropertyRef_start(r) ::= "(<r.label>!=null?((<file.TokenLabelType>)<r.label>.start):null)"
RulePropertyRef_stop(r) ::= "(<r.label>!=null?((<file.TokenLabelType>)<r.label>.stop):null)"
RulePropertyRef_tree(r) ::= "(<r.label>!=null?((<file.ASTLabelType>)<r.label>.tree):null)"
RulePropertyRef_text(r) ::= "(<r.label>!=null?((TokenStream)state.input).toString(<r.label>.start,<r.label>.stop):null)"
RulePropertyRef_st(r) ::= "(<r.label>!=null?<r.label>.st:null)"
DynScopeRef(s) ::= "<s.scope>_stack"
DynScopeAttrRef(s) ::= "<s.scope>_stack.peek().<s.attr>"
DynScopeAttrRef_negIndex(s, indexChunks) ::=
"<s.scope>_stack.get(<s.scope>_stack.size()-<indexChunks>-1).<s.attr>"
DynScopeAttrRef_index(s, indexChunks) ::=
"<s.scope>_stack.get(<indexChunks>).<s.attr>"
SetDynScopeAttr(s, rhsChunks) ::=
"<s.scope>_stack.peek().<s.attr> =<rhsChunks>;"
SetDynScopeAttr_negIndex(s, indexChunks, rhsChunks) ::=
"<s.scope>_stack.get(<s.scope>_stack.size()-<indexChunks>-1).<s.attr> =<rhsChunks>;"
SetDynScopeAttr_index(s, indexChunks, rhsChunks) ::=
"<s.scope>_stack.get(<indexChunks>).<s.attr> =<rhsChunks>;"
AddToList(a) ::= "<a.listName>.add(<first(a.opWithResultToAdd.labels)>);"
TokenDecl(t) ::= "Token <t.name>;"
TokenTypeDecl(t) ::= "int <t.name>;"
TokenListDecl(t) ::= "List\<Token> <t.name> = new ArrayList\<Token>();"
RuleContextDecl(r) ::= "<r.ctxName> <r.name>;"
CaptureNextToken(d) ::= "<d.varName> = state.input.LT(1);"
CaptureNextTokenType(d) ::= "<d.varName> = state.input.LA(1);"
StructDecl(s,attrs) ::= <<
public static class <s.name> extends ParserRuleContext {
<attrs:{a | <a>;}; separator="\n">
<if(s.ctorAttrs)>
public <s.name>(<s.ctorAttrs:{a | <a>,}> LABitSet follow) {
super(follow);
<s.ctorAttrs:{a | this.<a.name> = <a.name>;}; separator="\n">
}
<endif>
};
>>
DynamicScopeStruct(d,attrs) ::= <<
public static class <d.name> {
<attrs:{a | <a>;}; separator="\n">
};
public QStack\<<d.name>\> <d.name>_stack = new QStack\<<d.name>\>();
>>
AttributeDecl(d) ::= "<d.decl>"
DFADecl(dfa) ::= <<
// define <dfa.name>
>>
BitSetDecl(b) ::= <<
public static final LABitSet <b.name>=new LABitSet(new long[]{<b.hexWords:{it|<it>L};separator=",">}<if(b.fset.EOF)>, true<endif>);
>>
LexerFile(lexerFile, lexer, namedActions) ::= <<
// $ANTLR ANTLRVersion> <lexerFile.fileName> generatedTimestamp>
<namedActions.header>
import org.antlr.v4.runtime.Lexer;
import org.antlr.v4.runtime.CharStream;
import org.antlr.v4.runtime.LexerSharedState;
import org.antlr.v4.runtime.Token;
import org.antlr.v4.runtime.TokenStream;
import org.antlr.v4.runtime.*;
import org.antlr.v4.runtime.atn.*;
import org.antlr.v4.runtime.dfa.DFA;
import org.antlr.v4.runtime.misc.*;
import org.antlr.runtime.*;
<lexer>
>>
Lexer(lexer, atn, actions, sempreds) ::= <<
public class <lexer.name> extends Lexer {
public static final int
<lexer.tokens:{k | <k>=<lexer.tokens.(k)>}; separator=", ", wrap, anchor>;
<lexer.modes:{m| public static final int <m> = <i0>;}; separator="\n">
public static final String[] tokenNames = {
"\<INVALID>", "\<INVALID>", "\<INVALID>",
<lexer.tokenNames:{k | "<k>"}; separator=", ", wrap, anchor>
};
public static final String[] ruleNames = {
"\<INVALID>",
<lexer.ruleNames:{r | "<r>"}; separator=", ", wrap, anchor>
};
<namedActions.members>
public <lexer.name>(CharStream input) {
this(input, new LexerSharedState());
}
public <lexer.name>(CharStream input, LexerSharedState state) {
super(input,state);
_interp = new LexerInterpreter(this,_ATN);
}
public String getGrammarFileName() { return "<lexerFile.fileName>"; }
@Override
public String[] getTokenNames() { return tokenNames; }
@Override
public String[] getRuleNames() { return ruleNames; }
@Override
public ATN getATN() { return _ATN; }
<lexer.namedActions.members>
<dumpActions(actions,sempreds)>
<atn>
}
>>
SerializedATN(model) ::= <<
public static final String _serializedATN =
"<model.serialized; wrap={"+<\n>"}, anchor>";
public static final ATN _ATN =
ATNInterpreter.deserialize(_serializedATN.toCharArray());
static {
org.antlr.v4.tool.DOTGenerator dot = new org.antlr.v4.tool.DOTGenerator(null);
//System.out.println(dot.getDOT(_ATN.decisionToATNState.get(0)));
}
>>
actionMethod(name, ruleIndex, actions) ::= <<
public void <name>_actions(int action) {
System.out.println("exec action "+action);
switch ( action ) {
<actions:{a |
case <i0> :
<a>
break;
}>
}
}<\n>
>>
sempredMethod(name, ruleIndex, preds) ::= <<
public boolean <name>_sempreds(int pred) {
switch ( pred ) {
<preds:{p |
case <i0> :
return <p>;
}>
default : return false;
}
}<\n>
>>
/** Using a type to init value map, try to init a type; if not in table
* must be an object, default value is "null".
*/
initValue(typeName) ::= <<
<javaTypeInitMap.(typeName)>
>>
codeFileExtension() ::= ".java"
true() ::= "true"
false() ::= "false"

View File

@ -0,0 +1,12 @@
/** templates used to generate make-compatible dependencies */
group depend;
/** Generate "f : x, y, z" dependencies for input
* dependencies and generated files. in and out
* are File objects. For example, you can say
* <f.canonicalPath>
*/
dependencies(grammarFileName,in,out) ::= <<
<if(in)><grammarFileName>: <in; separator=", "><endif>
<out:{f | <f> : <grammarFileName>}; separator="\n">
>>

View File

@ -0,0 +1,3 @@
action-edge(src,target,label,arrowhead) ::= <<
<src> -> <target> [fontsize=11, fontname="Courier", arrowsize=.7, label = "<label>"<if(arrowhead)>, arrowhead = <arrowhead><endif>];
>>

View File

@ -0,0 +1,8 @@
atn(startState, states, edges, rankdir, decisionRanks, useBox) ::= <<
digraph ATN {
rankdir=LR;
<decisionRanks; separator="\n">
<states; separator="\n">
<edges; separator="\n">
}
>>

View File

@ -0,0 +1,4 @@
decision-rank(states) ::= <<
{rank=same; rankdir=TB; <states:{s | s<s>}; separator="; ">}
>>

View File

@ -0,0 +1,8 @@
dfa(name, startState, states, edges, rankdir, decisionRanks, useBox) ::= <<
digraph <name> {
<if(rankdir)>rankdir=<rankdir>;<endif>
<decisionRanks; separator="\n">
<states; separator="\n">
<edges; separator="\n">
}
>>

View File

@ -0,0 +1,4 @@
edge(src,target,label,arrowhead) ::= <<
<src> -> <target> [fontsize=11, fontname="Courier", arrowsize=.7, label = "<label>"<if(arrowhead)>, arrowhead = <arrowhead><endif>];
>>

View File

@ -0,0 +1,3 @@
epsilon-edge(src,label,target,arrowhead) ::= <<
<src> -> <target> [fontname="Times-Italic", label = "e"];
>>

View File

@ -0,0 +1,3 @@
state(state, label, name) ::= <<
node [fontsize=11, label="<label>", <if(useBox)>shape=box, fixedsize=false<else>shape=circle, fixedsize=true, width=.4<endif>, peripheries=1]; <name>
>>

View File

@ -0,0 +1,3 @@
stopstate(name,label,actionIndex,useBox) ::= <<
node [fontsize=11, label="<label><if(actionIndex)>,\naction:<actionIndex><endif>", <if(useBox)>shape=polygon,sides=4,peripheries=2,fixedsize=false<else>shape=doublecircle, fixedsize=true, width=.6<endif>]; <name>
>>

View File

@ -0,0 +1,40 @@
/*
[The "BSD licence"]
Copyright (c) 2006 Kay Roepke
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. The name of the author may not be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/*
This file contains the actual layout of the messages emitted by ANTLR.
The text itself is coming out of the languages/*stg files, according to the chosen locale.
This file contains the default format ANTLR uses.
*/
location(file, line, column) ::= "<file>:<line>:<column>:"
message(id, text) ::= "(<id>) <text>"
report(location, message, type) ::= "<type>(<message.id>): <location> <message.text>"
wantsSingleLineMessage() ::= "false"

View File

@ -0,0 +1,40 @@
/*
[The "BSD licence"]
Copyright (c) 2006 Kay Roepke
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. The name of the author may not be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/*
This file contains the actual layout of the messages emitted by ANTLR.
The text itself is coming out of the languages/*stg files, according to the chosen locale.
This file contains the format that mimicks GCC output.
*/
location(file, line, column) ::= "<file>:<line>:"
message(id, text) ::= "<text> (<id>)"
report(location, message, type) ::= "<location> <type>: <message>"
wantsSingleLineMessage() ::= "true"

View File

@ -0,0 +1,40 @@
/*
[The "BSD licence"]
Copyright (c) 2006 Kay Roepke
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. The name of the author may not be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/*
This file contains the actual layout of the messages emitted by ANTLR.
The text itself is coming out of the languages/*stg files, according to the chosen locale.
This file contains the default format ANTLR uses.
*/
location(file, line, column) ::= "<file>(<line>,<column>)"
message(id, text) ::= "error <id> : <text>"
report(location, message, type) ::= "<location> : <type> <message.id> : <message.text>"
wantsSingleLineMessage() ::= "true"

View File

@ -0,0 +1,316 @@
/*
[The "BSD licence"]
Copyright (c) 2005-2006 Terence Parr
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. The name of the author may not be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/*
This file only contains the messages in English, but no
information about which file, line, or column it occurred in.
The location and message ids are taken out of the formats directory.
Kay Roepke
*/
INVALID() ::= <<this can never be printed>>
// TOOL ERRORS
// file errors
CANNOT_WRITE_FILE(arg,arg2) ::= <<
cannot write file <arg>: <exception>
<stackTrace; separator="\n">
>>
CANNOT_CLOSE_FILE(arg,exception,stackTrace) ::= "cannot close file <arg>: <exception>"
CANNOT_FIND_TOKENS_FILE(arg) ::= "cannot find tokens file <arg>"
ERROR_READING_TOKENS_FILE(arg,exception,stackTrace) ::= <<
problem reading token vocabulary file <arg>: <exception>
<stackTrace; separator="\n">
>>
DIR_NOT_FOUND(arg,exception,stackTrace) ::= "directory not found: <arg>"
OUTPUT_DIR_IS_FILE(arg,arg2) ::= "output directory is a file: <arg>"
CANNOT_OPEN_FILE(arg,arg2) ::= "cannot find or open file: <arg><if(arg2)>; reason: <arg2><endif>"
CIRCULAR_DEPENDENCY() ::= "your grammars contain a circular dependency and cannot be sorted into a valid build order."
INTERNAL_ERROR(arg,arg2,exception,stackTrace) ::= <<
internal error: <arg> <arg2><if(exception)>: <exception><endif>
<stackTrace; separator="\n">
>>
INTERNAL_WARNING(arg) ::= "internal warning: <arg>"
ERROR_CREATING_ARTIFICIAL_RULE(arg,exception,stackTrace) ::= <<
problems creating lexer rule listing all tokens: <exception>
<stackTrace; separator="\n">
>>
TOKENS_FILE_SYNTAX_ERROR(arg,arg2) ::=
"problems parsing token vocabulary file <arg> on line <arg2>"
CANNOT_GEN_DOT_FILE(arg,exception,stackTrace) ::=
"cannot write DFA DOT file <arg>: <exception>"
BAD_ACTION_AST_STRUCTURE(exception,stackTrace) ::=
"bad internal tree structure for action '<arg>': <exception>"
BAD_AST_STRUCTURE(arg,msg) ::= <<
bad internal tree structure '<arg>': <msg>
>>
FILE_AND_GRAMMAR_NAME_DIFFER(arg,arg2) ::=
"file <arg2> contains grammar <arg>; names must be identical"
FILENAME_EXTENSION_ERROR(arg) ::=
"file <arg> must end in a file extension, normally .g"
// code gen errors
MISSING_CODE_GEN_TEMPLATES(arg) ::=
"cannot find code generation templates for language <arg>"
MISSING_CYCLIC_DFA_CODE_GEN_TEMPLATES() ::=
"cannot find code generation cyclic DFA templates for language <arg>"
CODE_GEN_TEMPLATES_INCOMPLETE(arg) ::=
"missing code generation template <arg>"
CANNOT_CREATE_TARGET_GENERATOR(arg,exception,stackTrace) ::=
"cannot create target <arg> code generator: <exception>"
CODE_TEMPLATE_ARG_ISSUE(arg,arg2) ::=
"code generation template <arg> has missing, misnamed, or incomplete arg list: <arg2>"
NO_MODEL_TO_TEMPLATE_MAPPING(arg) ::=
"no mapping to template name for output model class <arg>"
CANNOT_COMPUTE_SAMPLE_INPUT_SEQ() ::=
"cannot generate a sample input sequence from lookahead DFA"
// grammar interpretation errors
/*
NO_VIABLE_DFA_ALT(arg,arg2) ::=
"no viable transition from state <arg> on <arg2> while interpreting DFA"
*/
// GRAMMAR ERRORS
SYNTAX_ERROR(arg) ::= "<arg>"
RULE_REDEFINITION(arg) ::= "rule <arg> redefinition"
SCOPE_REDEFINITION(arg) ::= "scope <arg> redefinition"
LEXER_RULES_NOT_ALLOWED(arg) ::= "lexer rule <arg> not allowed in parser"
PARSER_RULES_NOT_ALLOWED(arg) ::= "parser rule <arg> not allowed in lexer"
MODE_NOT_IN_LEXER(arg,arg2) ::= "lexical modes are only allowed in lexer grammars"
TOKEN_NAMES_MUST_START_UPPER(arg) ::=
"token names must start with an uppercase letter: <arg>"
CANNOT_FIND_ATTRIBUTE_NAME_IN_DECL(arg) ::=
"cannot find an attribute name in attribute declaration"
NO_TOKEN_DEFINITION(arg) ::=
"no lexer rule corresponding to token: <arg>"
UNDEFINED_RULE_REF(arg) ::=
"reference to undefined rule: <arg>"
LITERAL_NOT_ASSOCIATED_WITH_LEXER_RULE(arg) ::=
"literal has no associated lexer rule: <arg>"
CANNOT_ALIAS_TOKENS(arg) ::=
"can't assign string value to token name <arg> in non-combined grammar"
ATTRIBUTE_REF_NOT_IN_RULE(arg,arg2) ::=
"reference to attribute outside of a rule: <arg><if(arg2)>.<arg2><endif>"
UNKNOWN_ATTRIBUTE_IN_SCOPE(arg,arg2) ::=
"attribute <arg> isn't a valid property in <arg2>"
UNKNOWN_RULE_ATTRIBUTE(arg,arg2,arg3) ::=
"unknown attribute <arg> for rule <arg2> in <arg3>"
UNKNOWN_SIMPLE_ATTRIBUTE(arg,arg2) ::=
"unknown attribute reference <arg> in <arg2>"
ISOLATED_RULE_REF(arg,arg2) ::=
"missing attribute access on rule reference <arg> in <arg2>"
INVALID_RULE_PARAMETER_REF(arg,arg2) ::=
"cannot access rule <arg>'s parameter: <arg2>"
INVALID_RULE_SCOPE_ATTRIBUTE_REF(arg,arg2) ::=
"cannot access rule <arg>'s dynamically-scoped attribute: <arg2>"
SYMBOL_CONFLICTS_WITH_GLOBAL_SCOPE(arg) ::=
"symbol <arg> conflicts with global dynamic scope with same name"
WRITE_TO_READONLY_ATTR(arg,arg2,arg3) ::=
"cannot write to read only attribute: $<arg><if(arg2)>.<arg2><endif>"
LABEL_CONFLICTS_WITH_RULE(arg) ::=
"label <arg> conflicts with rule with same name"
LABEL_CONFLICTS_WITH_TOKEN(arg) ::=
"label <arg> conflicts with token with same name"
LABEL_CONFLICTS_WITH_RULE_SCOPE_ATTRIBUTE(arg,arg2) ::=
"label <arg> conflicts with rule <arg2>'s dynamically-scoped attribute with same name"
LABEL_CONFLICTS_WITH_RULE_ARG_RETVAL(arg,arg2) ::=
"label <arg> conflicts with rule <arg2>'s return value or parameter with same name"
ATTRIBUTE_CONFLICTS_WITH_RULE(arg,arg2) ::=
"rule <arg2>'s dynamically-scoped attribute <arg> conflicts with the rule name"
ATTRIBUTE_CONFLICTS_WITH_RULE_ARG_RETVAL(arg,arg2) ::=
"rule <arg2>'s dynamically-scoped attribute <arg> conflicts with <arg2>'s return value or parameter with same name"
LABEL_TYPE_CONFLICT(arg,arg2) ::=
"label <arg> type mismatch with previous definition: <arg2>"
ARG_RETVAL_CONFLICT(arg,arg2) ::=
"rule <arg2>'s argument <arg> conflicts a return value with same name"
NONUNIQUE_REF(arg) ::=
"<arg> is a non-unique reference"
FORWARD_ELEMENT_REF(arg) ::=
"illegal forward reference: <arg>"
MISSING_RULE_ARGS(arg) ::=
"missing parameter(s) on rule reference: <arg>"
RULE_HAS_NO_ARGS(arg) ::=
"rule <arg> has no defined parameters"
ARGS_ON_TOKEN_REF(arg) ::=
"token reference <arg> may not have parameters"
/*
NONCHAR_RANGE() ::=
"range operator can only be used in the lexer"
*/
ILLEGAL_OPTION(arg) ::=
"illegal option <arg>"
LIST_LABEL_INVALID_UNLESS_RETVAL_STRUCT(arg) ::=
"rule '+=' list labels are not allowed w/o output option: <arg>"
REWRITE_ELEMENT_NOT_PRESENT_ON_LHS(arg) ::=
"reference to rewrite element <arg> not found to left of ->"
//UNDEFINED_TOKEN_REF_IN_REWRITE(arg) ::=
// "reference to undefined token in rewrite rule: <arg>"
//UNDEFINED_LABEL_REF_IN_REWRITE(arg) ::=
// "reference to undefined label in rewrite rule: $<arg>"
NO_GRAMMAR_START_RULE (arg) ::=
"grammar <arg>: no start rule (no rule can obviously be followed by EOF)"
EMPTY_COMPLEMENT(arg) ::= <<
<if(arg)>
set complement ~<arg> is empty
<else>
set complement is empty
<endif>
>>
REPEATED_PREQUEL(arg) ::=
"repeated grammar prequel spec (option, token, or import); please merge"
UNKNOWN_DYNAMIC_SCOPE(arg, arg2) ::=
"unknown dynamic scope: <arg> in <arg2>"
UNKNOWN_DYNAMIC_SCOPE_ATTRIBUTE(arg,arg2,arg3) ::=
"unknown dynamically-scoped attribute for scope <arg>: <arg2> in <arg3>"
RULE_REF_AMBIG_WITH_RULE_IN_ALT(arg) ::=
"reference $<arg> is ambiguous; rule <arg> is enclosing rule and referenced in the production (assuming enclosing rule)"
ISOLATED_RULE_ATTRIBUTE(arg) ::=
"reference to locally-defined rule scope attribute without rule name: <arg>"
INVALID_ACTION_SCOPE(arg,arg2) ::=
"unknown or invalid action scope for <arg2> grammar: <arg>"
ACTION_REDEFINITION(arg) ::=
"redefinition of <arg> action"
DOUBLE_QUOTES_ILLEGAL(arg) ::=
"string literals must use single quotes (such as \'begin\'): <arg>"
INVALID_TEMPLATE_ACTION(arg) ::=
"invalid StringTemplate % shorthand syntax: '<arg>'"
ARG_INIT_VALUES_ILLEGAL(arg) ::=
"rule parameters may not have init values: <arg>"
REWRITE_OR_OP_WITH_NO_OUTPUT_OPTION(arg) ::=
"<if(arg)>rule <arg> uses <endif>rewrite syntax or operator with no output option"
AST_OP_WITH_NON_AST_OUTPUT_OPTION(arg) ::=
"AST operator with non-AST output option: <arg>"
NO_RULES(arg,arg2) ::= "<if(arg2.implicitLexerOwner)>implicitly generated <endif>grammar <arg> has no rules"
MISSING_AST_TYPE_IN_TREE_GRAMMAR(arg) ::=
"tree grammar <arg> has no ASTLabelType option"
REWRITE_FOR_MULTI_ELEMENT_ALT(arg) ::=
"with rewrite=true, alt <arg> not simple node or obvious tree element; text attribute for rule not guaranteed to be correct"
RULE_INVALID_SET(arg) ::=
"Cannot complement rule <arg>; not a simple set or element"
HETERO_ILLEGAL_IN_REWRITE_ALT(arg) ::=
"alts with rewrites can't use heterogeneous types left of ->"
NO_SUCH_GRAMMAR_SCOPE(arg,arg2) ::=
"reference to undefined grammar in rule reference: <arg>.<arg2>"
NO_SUCH_RULE_IN_SCOPE(arg,arg2) ::=
"rule <arg2> is not defined in grammar <arg>"
TOKEN_ALIAS_CONFLICT(arg,arg2) ::=
"cannot alias <arg>; string already assigned to <arg2>"
TOKEN_ALIAS_REASSIGNMENT(arg,arg2) ::=
"cannot alias <arg>; token name already <if(arg2)>assigned to <arg2><else>defined<endif>"
TOKEN_VOCAB_IN_DELEGATE(arg,arg2) ::=
"tokenVocab option ignored in imported grammar <arg>"
TOKEN_ALIAS_IN_DELEGATE(arg,arg2) ::=
"can't assign string to token name <arg> to string in imported grammar <arg2>"
INVALID_IMPORT(arg,arg2) ::=
"<arg.typeString> grammar <arg.name> cannot import <arg2.typeString> grammar <arg2.name>"
IMPORTED_TOKENS_RULE_EMPTY(arg,arg2) ::=
"no lexer rules contributed to <arg> from imported grammar <arg2>"
IMPORT_NAME_CLASH(arg,arg2) ::=
"<arg.typeString> grammar <arg.name> and imported <arg2.typeString> grammar <arg2.name> both generate <arg2.recognizerName>"
AST_OP_IN_ALT_WITH_REWRITE(arg,arg2) ::=
"rule <arg> alt <arg2> uses rewrite syntax and also an AST operator"
WILDCARD_AS_ROOT(arg) ::= "Wildcard invalid as root; wildcard can itself be a tree"
CONFLICTING_OPTION_IN_TREE_FILTER(arg,arg2) ::=
"option <arg>=<arg2> conflicts with tree grammar filter mode"
// GRAMMAR WARNINGS
AMBIGUITY(arg) ::= <<
Decision can match input such as "<arg.input>" using multiple alternatives:
<arg.conflictingPaths.keys:{alt | alt <alt> via token <arg.conflictingPaths.(alt):{t | <t.text> at line <t.line>:<t.charPositionInLine>}; separator=" then ">}; separator=" and ">
<if(arg.hasPredicateBlockedByAction)><\n>At least one possibly relevant semantic predicate was hidden by action(s).<endif>
>>
/*
GRAMMAR_NONDETERMINISM(input,conflictingAlts,paths,disabled,hasPredicateBlockedByAction) ::=
<<
<if(paths)>
Decision can match input such as "<input>" using multiple alternatives:
<paths:{ alt <it.alt> via ATN path <it.states; separator=","><\n>}>
<else>
Decision can match input such as "<input>" using multiple alternatives: <conflictingAlts; separator=", ">
<endif>
<if(disabled)><\n>As a result, alternative(s) <disabled; separator=","> were disabled for that input<endif><if(hasPredicateBlockedByAction)><\n>Semantic predicates were present but were hidden by actions.<endif>
>>
*/
DANGLING_STATE(danglingAlts,input) ::= <<
the decision cannot distinguish between alternative(s) <danglingAlts; separator=","> for input such as "<input>"
>>
UNREACHABLE_ALTS(arg) ::= <<
The following alternatives can never be matched: <arg.alts; separator=","><\n>
>>
INSUFFICIENT_PREDICATES(arg) ::= <<
Input such as "<arg.input>" is insufficiently covered with predicates at locations: <arg.altToLocations.keys:{alt|alt <alt>: <arg.altToLocations.(alt):{loc| line <loc.line>:<loc.charPositionInLine> at <loc.text>}; separator=", ">}; separator=", ">
<if(arg.hasPredicateBlockedByAction)><\n>At least one possibly relevant semantic predicate was hidden by action(s).<endif>
>>
ANALYSIS_TIMEOUT(arg) ::= <<
ANTLR could not analyze this decision in rule <arg.enclosingRule>; often this is because of recursive rule references visible from the left edge of alternatives. ANTLR will re-analyze the decision with a fixed lookahead of k=1. Consider using "options {k=1;}" for that decision and possibly adding a syntactic predicate
>>
/*
RECURSION_OVERFLOW(arg) ::= <<
Recursion overflow to <arg.targetRule.name> from alternative <arg.alt> of <arg.sourceRule.name> after matching input such as <arg.input>
>>
*/
LEFT_RECURSION_CYCLES(arg) ::= <<
The following sets of rules are mutually left-recursive <arg:{c| [<c:{r|<r.name>}; separator=", ">]}; separator=" and ">
>>
/*
MULTIPLE_RECURSIVE_ALTS(arg) ::= <<
[fatal] rule <arg.ruleName> has non-LL(*) decision due to recursive rule invocations reachable from alts <arg.alts; separator=",">. Resolve by left-factoring or using syntactic predicates or using backtrack=true option.
>>
*/
UNREACHABLE_TOKENS(tokens) ::= <<
The following token definitions can never be matched because prior tokens match the same input: <tokens; separator=",">
>>
DUPLICATE_SET_ENTRY(arg) ::=
"duplicate token type <arg> when collapsing subrule into set"
TOKEN_NONDETERMINISM(input,conflictingTokens,paths,disabled,hasPredicateBlockedByAction) ::=
<<
<if(paths)>
Decision can match input such as "<input>" using multiple alternatives:
<paths:{it | alt <it.alt> via ATN path <it.states; separator=","><\n>}>
<else>
Multiple token rules can match input such as "<input>": <conflictingTokens; separator=", "><\n>
<endif>
<if(disabled)><\n>As a result, token(s) <disabled; separator=","> were disabled for that input<endif><if(hasPredicateBlockedByAction)><\n>Semantic predicates were present but were hidden by actions.<endif>
>>
/* l10n for message levels */
WARNING() ::= "warning"
ERROR() ::= "error"