From 6990edd4d418b60c36af7fba07a5a43883ece2ee Mon Sep 17 00:00:00 2001 From: Alex Mykyta Date: Mon, 28 Dec 2020 20:56:20 -0800 Subject: [PATCH] Improve python3 performance via addition of __slots__ --- runtime/Python3/src/antlr4/BufferedTokenStream.py | 1 + runtime/Python3/src/antlr4/CommonTokenFactory.py | 4 +++- runtime/Python3/src/antlr4/CommonTokenStream.py | 1 + runtime/Python3/src/antlr4/FileStream.py | 1 + runtime/Python3/src/antlr4/InputStream.py | 1 + runtime/Python3/src/antlr4/IntervalSet.py | 1 + runtime/Python3/src/antlr4/LL1Analyzer.py | 1 + runtime/Python3/src/antlr4/Lexer.py | 6 +++++- runtime/Python3/src/antlr4/ListTokenSource.py | 3 ++- runtime/Python3/src/antlr4/Parser.py | 5 +++++ runtime/Python3/src/antlr4/ParserInterpreter.py | 5 +++++ runtime/Python3/src/antlr4/ParserRuleContext.py | 2 +- runtime/Python3/src/antlr4/Recognizer.py | 1 + runtime/Python3/src/antlr4/RuleContext.py | 3 +-- runtime/Python3/src/antlr4/Token.py | 2 +- runtime/Python3/src/antlr4/TokenStreamRewriter.py | 10 +++++++--- runtime/Python3/src/antlr4/atn/ATN.py | 9 +++++++-- runtime/Python3/src/antlr4/atn/ATNConfig.py | 5 +++++ runtime/Python3/src/antlr4/atn/ATNConfigSet.py | 9 ++++++--- .../src/antlr4/atn/ATNDeserializationOptions.py | 2 +- runtime/Python3/src/antlr4/atn/ATNDeserializer.py | 1 + runtime/Python3/src/antlr4/atn/ATNSimulator.py | 2 +- runtime/Python3/src/antlr4/atn/ATNState.py | 12 +++++++++++- runtime/Python3/src/antlr4/atn/LexerATNSimulator.py | 10 ++++++---- runtime/Python3/src/antlr4/atn/LexerAction.py | 9 ++++++++- .../Python3/src/antlr4/atn/LexerActionExecutor.py | 3 ++- .../Python3/src/antlr4/atn/ParserATNSimulator.py | 5 ++++- runtime/Python3/src/antlr4/atn/SemanticContext.py | 5 ++++- runtime/Python3/src/antlr4/atn/Transition.py | 13 ++++++++++++- runtime/Python3/src/antlr4/dfa/DFA.py | 2 +- runtime/Python3/src/antlr4/dfa/DFASerializer.py | 1 + runtime/Python3/src/antlr4/dfa/DFAState.py | 6 ++++++ runtime/Python3/src/antlr4/tree/Chunk.py | 3 ++- runtime/Python3/src/antlr4/tree/ParseTreeMatch.py | 2 +- runtime/Python3/src/antlr4/tree/ParseTreePattern.py | 1 + .../src/antlr4/tree/ParseTreePatternMatcher.py | 1 + runtime/Python3/src/antlr4/tree/RuleTagToken.py | 3 ++- runtime/Python3/src/antlr4/tree/TokenTagToken.py | 2 +- runtime/Python3/src/antlr4/tree/Tree.py | 3 ++- .../v4/tool/templates/codegen/Python3/Python3.stg | 1 + 40 files changed, 124 insertions(+), 33 deletions(-) diff --git a/runtime/Python3/src/antlr4/BufferedTokenStream.py b/runtime/Python3/src/antlr4/BufferedTokenStream.py index 341800abc..2c49de233 100644 --- a/runtime/Python3/src/antlr4/BufferedTokenStream.py +++ b/runtime/Python3/src/antlr4/BufferedTokenStream.py @@ -27,6 +27,7 @@ class TokenStream(object): class BufferedTokenStream(TokenStream): + __slots__ = ('tokenSource', 'tokens', 'index', 'fetchedEOF') def __init__(self, tokenSource:Lexer): # The {@link TokenSource} from which tokens for this stream are fetched. diff --git a/runtime/Python3/src/antlr4/CommonTokenFactory.py b/runtime/Python3/src/antlr4/CommonTokenFactory.py index 17296fab1..0d09ad785 100644 --- a/runtime/Python3/src/antlr4/CommonTokenFactory.py +++ b/runtime/Python3/src/antlr4/CommonTokenFactory.py @@ -15,6 +15,8 @@ class TokenFactory(object): pass class CommonTokenFactory(TokenFactory): + __slots__ = 'copyText' + # # The default {@link CommonTokenFactory} instance. # @@ -56,4 +58,4 @@ class CommonTokenFactory(TokenFactory): t.text = text return t -CommonTokenFactory.DEFAULT = CommonTokenFactory() \ No newline at end of file +CommonTokenFactory.DEFAULT = CommonTokenFactory() diff --git a/runtime/Python3/src/antlr4/CommonTokenStream.py b/runtime/Python3/src/antlr4/CommonTokenStream.py index f08374422..dd0a984e3 100644 --- a/runtime/Python3/src/antlr4/CommonTokenStream.py +++ b/runtime/Python3/src/antlr4/CommonTokenStream.py @@ -35,6 +35,7 @@ from antlr4.Token import Token class CommonTokenStream(BufferedTokenStream): + __slots__ = 'channel' def __init__(self, lexer:Lexer, channel:int=Token.DEFAULT_CHANNEL): super().__init__(lexer) diff --git a/runtime/Python3/src/antlr4/FileStream.py b/runtime/Python3/src/antlr4/FileStream.py index b89238907..1c6ce9fbc 100644 --- a/runtime/Python3/src/antlr4/FileStream.py +++ b/runtime/Python3/src/antlr4/FileStream.py @@ -14,6 +14,7 @@ from antlr4.InputStream import InputStream class FileStream(InputStream): + __slots__ = 'fileName' def __init__(self, fileName:str, encoding:str='ascii', errors:str='strict'): super().__init__(self.readDataFrom(fileName, encoding, errors)) diff --git a/runtime/Python3/src/antlr4/InputStream.py b/runtime/Python3/src/antlr4/InputStream.py index ca63d083a..5d077d135 100644 --- a/runtime/Python3/src/antlr4/InputStream.py +++ b/runtime/Python3/src/antlr4/InputStream.py @@ -12,6 +12,7 @@ from antlr4.Token import Token class InputStream (object): + __slots__ = ('name', 'strdata', '_index', 'data', '_size') def __init__(self, data: str): self.name = "" diff --git a/runtime/Python3/src/antlr4/IntervalSet.py b/runtime/Python3/src/antlr4/IntervalSet.py index 9742426c9..5c1a68725 100644 --- a/runtime/Python3/src/antlr4/IntervalSet.py +++ b/runtime/Python3/src/antlr4/IntervalSet.py @@ -11,6 +11,7 @@ from antlr4.Token import Token IntervalSet = None class IntervalSet(object): + __slots__ = ('intervals', 'readOnly') def __init__(self): self.intervals = None diff --git a/runtime/Python3/src/antlr4/LL1Analyzer.py b/runtime/Python3/src/antlr4/LL1Analyzer.py index 6b398fcd9..a200a5d0e 100644 --- a/runtime/Python3/src/antlr4/LL1Analyzer.py +++ b/runtime/Python3/src/antlr4/LL1Analyzer.py @@ -14,6 +14,7 @@ from antlr4.atn.Transition import WildcardTransition, NotSetTransition, Abstract class LL1Analyzer (object): + __slots__ = 'atn' #* Special value added to the lookahead sets to indicate that we hit # a predicate during analysis if {@code seeThruPreds==false}. diff --git a/runtime/Python3/src/antlr4/Lexer.py b/runtime/Python3/src/antlr4/Lexer.py index 0a96b70af..82accadcb 100644 --- a/runtime/Python3/src/antlr4/Lexer.py +++ b/runtime/Python3/src/antlr4/Lexer.py @@ -28,6 +28,11 @@ class TokenSource(object): class Lexer(Recognizer, TokenSource): + __slots__ = ( + '_input', '_output', '_factory', '_tokenFactorySourcePair', '_token', + '_tokenStartCharIndex', '_tokenStartLine', '_tokenStartColumn', + '_hitEOF', '_channel', '_type', '_modeStack', '_mode', '_text' + ) DEFAULT_MODE = 0 MORE = -2 @@ -322,4 +327,3 @@ class Lexer(Recognizer, TokenSource): else: # TODO: Do we lose character or line position information? self._input.consume() - diff --git a/runtime/Python3/src/antlr4/ListTokenSource.py b/runtime/Python3/src/antlr4/ListTokenSource.py index eebc75641..40bcaf94a 100644 --- a/runtime/Python3/src/antlr4/ListTokenSource.py +++ b/runtime/Python3/src/antlr4/ListTokenSource.py @@ -18,6 +18,7 @@ from antlr4.Token import Token class ListTokenSource(TokenSource): + __slots__ = ('tokens', 'sourceName', 'pos', 'eofToken', '_factory') # Constructs a new {@link ListTokenSource} instance from the specified # collection of {@link Token} objects and source name. @@ -140,4 +141,4 @@ class ListTokenSource(TokenSource): if inputStream is not None: return inputStream.getSourceName() else: - return "List" \ No newline at end of file + return "List" diff --git a/runtime/Python3/src/antlr4/Parser.py b/runtime/Python3/src/antlr4/Parser.py index 11bf41796..081af14ea 100644 --- a/runtime/Python3/src/antlr4/Parser.py +++ b/runtime/Python3/src/antlr4/Parser.py @@ -23,6 +23,7 @@ from antlr4.tree.ParseTreePatternMatcher import ParseTreePatternMatcher from antlr4.tree.Tree import ParseTreeListener, TerminalNode, ErrorNode class TraceListener(ParseTreeListener): + __slots__ = '_parser' def __init__(self, parser): self._parser = parser @@ -44,7 +45,11 @@ class TraceListener(ParseTreeListener): # self is all the parsing support code essentially; most of it is error recovery stuff.# class Parser (Recognizer): + __slots__ = ( + '_input', '_output', '_errHandler', '_precedenceStack', '_ctx', + 'buildParseTrees', '_tracer', '_parseListeners', '_syntaxErrors' + ) # self field maps from the serialized ATN string to the deserialized {@link ATN} with # bypass alternatives. # diff --git a/runtime/Python3/src/antlr4/ParserInterpreter.py b/runtime/Python3/src/antlr4/ParserInterpreter.py index 117f67bab..4de43623c 100644 --- a/runtime/Python3/src/antlr4/ParserInterpreter.py +++ b/runtime/Python3/src/antlr4/ParserInterpreter.py @@ -32,6 +32,11 @@ from antlr4.error.Errors import RecognitionException, UnsupportedOperationExcept class ParserInterpreter(Parser): + __slots__ = ( + 'grammarFileName', 'atn', 'tokenNames', 'ruleNames', 'decisionToDFA', + 'sharedContextCache', '_parentContextStack', + 'pushRecursionContextStates' + ) def __init__(self, grammarFileName:str, tokenNames:list, ruleNames:list, atn:ATN, input:TokenStream): super().__init__(input) diff --git a/runtime/Python3/src/antlr4/ParserRuleContext.py b/runtime/Python3/src/antlr4/ParserRuleContext.py index e4ec5983a..f945c5ba9 100644 --- a/runtime/Python3/src/antlr4/ParserRuleContext.py +++ b/runtime/Python3/src/antlr4/ParserRuleContext.py @@ -34,7 +34,7 @@ from antlr4.tree.Tree import ParseTreeListener, ParseTree, TerminalNodeImpl, Err ParserRuleContext = None class ParserRuleContext(RuleContext): - + __slots__ = ('children', 'start', 'stop', 'exception') def __init__(self, parent:ParserRuleContext = None, invokingStateNumber:int = None ): super().__init__(parent, invokingStateNumber) #* If we are debugging or building a parse tree for a visitor, diff --git a/runtime/Python3/src/antlr4/Recognizer.py b/runtime/Python3/src/antlr4/Recognizer.py index c98228847..28e84f199 100644 --- a/runtime/Python3/src/antlr4/Recognizer.py +++ b/runtime/Python3/src/antlr4/Recognizer.py @@ -11,6 +11,7 @@ from antlr4.error.ErrorListener import ProxyErrorListener, ConsoleErrorListener RecognitionException = None class Recognizer(object): + __slots__ = ('_listeners', '_interp', '_stateNumber') tokenTypeMapCache = dict() ruleIndexMapCache = dict() diff --git a/runtime/Python3/src/antlr4/RuleContext.py b/runtime/Python3/src/antlr4/RuleContext.py index 7f6dd9143..7812ba3b1 100644 --- a/runtime/Python3/src/antlr4/RuleContext.py +++ b/runtime/Python3/src/antlr4/RuleContext.py @@ -33,7 +33,7 @@ RuleContext = None Parser = None class RuleContext(RuleNode): - + __slots__ = ('parentCtx', 'invokingState') EMPTY = None def __init__(self, parent:RuleContext=None, invokingState:int=-1): @@ -225,4 +225,3 @@ class RuleContext(RuleNode): buf.write("]") return buf.getvalue() - diff --git a/runtime/Python3/src/antlr4/Token.py b/runtime/Python3/src/antlr4/Token.py index 6f4d5e262..10a68a8c2 100644 --- a/runtime/Python3/src/antlr4/Token.py +++ b/runtime/Python3/src/antlr4/Token.py @@ -10,6 +10,7 @@ from io import StringIO class Token (object): + __slots__ = ('source', 'type', 'channel', 'start', 'stop', 'tokenIndex', 'line', 'column', '_text') INVALID_TYPE = 0 @@ -68,7 +69,6 @@ class Token (object): class CommonToken(Token): - # An empty {@link Pair} which is used as the default value of # {@link #source} for tokens that do not have a source. EMPTY_SOURCE = (None, None) diff --git a/runtime/Python3/src/antlr4/TokenStreamRewriter.py b/runtime/Python3/src/antlr4/TokenStreamRewriter.py index 04a3af657..59baf8f47 100644 --- a/runtime/Python3/src/antlr4/TokenStreamRewriter.py +++ b/runtime/Python3/src/antlr4/TokenStreamRewriter.py @@ -11,6 +11,8 @@ from antlr4.CommonTokenStream import CommonTokenStream class TokenStreamRewriter(object): + __slots__ = ('tokens', 'programs', 'lastRewriteTokenIndexes') + DEFAULT_PROGRAM_NAME = "default" PROGRAM_INIT_SIZE = 100 MIN_TOKEN_INDEX = 0 @@ -99,7 +101,7 @@ class TokenStreamRewriter(object): def getProgram(self, program_name): return self.programs.setdefault(program_name, []) - + def getDefaultText(self): return self.getText(self.DEFAULT_PROGRAM_NAME, 0, len(self.tokens.tokens) - 1) @@ -195,6 +197,7 @@ class TokenStreamRewriter(object): return reduced class RewriteOperation(object): + __slots__ = ('tokens', 'index', 'text', 'instructionIndex') def __init__(self, tokens, index, text=""): """ @@ -233,8 +236,9 @@ class TokenStreamRewriter(object): class InsertAfterOp(InsertBeforeOp): pass - + class ReplaceOp(RewriteOperation): + __slots__ = 'last_index' def __init__(self, from_idx, to_idx, tokens, text): super(TokenStreamRewriter.ReplaceOp, self).__init__(tokens, from_idx, text) @@ -244,7 +248,7 @@ class TokenStreamRewriter(object): if self.text: buf.write(self.text) return self.last_index + 1 - + def __str__(self): if self.text: return ''.format(self.tokens.get(self.index), self.tokens.get(self.last_index), diff --git a/runtime/Python3/src/antlr4/atn/ATN.py b/runtime/Python3/src/antlr4/atn/ATN.py index 4ef5640b1..2639e3c1e 100644 --- a/runtime/Python3/src/antlr4/atn/ATN.py +++ b/runtime/Python3/src/antlr4/atn/ATN.py @@ -12,6 +12,11 @@ from antlr4.atn.ATNState import ATNState, DecisionState class ATN(object): + __slots__ = ( + 'grammarType', 'maxTokenType', 'states', 'decisionToState', + 'ruleToStartState', 'ruleToStopState', 'modeNameToStartState', + 'ruleToTokenType', 'lexerActions', 'modeToStartState' + ) INVALID_ALT_NUMBER = 0 @@ -58,7 +63,7 @@ class ATN(object): if s.nextTokenWithinRule is not None: return s.nextTokenWithinRule s.nextTokenWithinRule = self.nextTokensInContext(s, None) - s.nextTokenWithinRule.readonly = True + s.nextTokenWithinRule.readOnly = True return s.nextTokenWithinRule def nextTokens(self, s:ATNState, ctx:RuleContext = None): @@ -124,4 +129,4 @@ class ATN(object): ctx = ctx.parentCtx if Token.EPSILON in following: expected.addOne(Token.EOF) - return expected \ No newline at end of file + return expected diff --git a/runtime/Python3/src/antlr4/atn/ATNConfig.py b/runtime/Python3/src/antlr4/atn/ATNConfig.py index e2d8b99cc..e008fb2ef 100644 --- a/runtime/Python3/src/antlr4/atn/ATNConfig.py +++ b/runtime/Python3/src/antlr4/atn/ATNConfig.py @@ -21,6 +21,10 @@ from antlr4.atn.SemanticContext import SemanticContext ATNConfig = None class ATNConfig(object): + __slots__ = ( + 'state', 'alt', 'context', 'semanticContext', 'reachesIntoOuterContext', + 'precedenceFilterSuppressed' + ) def __init__(self, state:ATNState=None, alt:int=None, context:PredictionContext=None, semantic:SemanticContext=None, config:ATNConfig=None): if config is not None: @@ -110,6 +114,7 @@ class ATNConfig(object): LexerATNConfig = None class LexerATNConfig(ATNConfig): + __slots__ = ('lexerActionExecutor', 'passedThroughNonGreedyDecision') def __init__(self, state:ATNState, alt:int=None, context:PredictionContext=None, semantic:SemanticContext=SemanticContext.NONE, lexerActionExecutor:LexerActionExecutor=None, config:LexerATNConfig=None): diff --git a/runtime/Python3/src/antlr4/atn/ATNConfigSet.py b/runtime/Python3/src/antlr4/atn/ATNConfigSet.py index b240c25dc..9e9a512a6 100644 --- a/runtime/Python3/src/antlr4/atn/ATNConfigSet.py +++ b/runtime/Python3/src/antlr4/atn/ATNConfigSet.py @@ -20,6 +20,12 @@ from antlr4.error.Errors import UnsupportedOperationException, IllegalStateExcep ATNSimulator = None class ATNConfigSet(object): + __slots__ = ( + 'configLookup', 'fullCtx', 'readonly', 'configs', 'uniqueAlt', + 'conflictingAlts', 'hasSemanticContext', 'dipsIntoOuterContext', + 'cachedHashCode' + ) + # # The reason that we need this is because we don't want the hash map to use # the standard hash code and equals. We need all configurations with the same @@ -204,6 +210,3 @@ class OrderedATNConfigSet(ATNConfigSet): def __init__(self): super().__init__() - - - diff --git a/runtime/Python3/src/antlr4/atn/ATNDeserializationOptions.py b/runtime/Python3/src/antlr4/atn/ATNDeserializationOptions.py index 9c4e23dec..a2af6b4d0 100644 --- a/runtime/Python3/src/antlr4/atn/ATNDeserializationOptions.py +++ b/runtime/Python3/src/antlr4/atn/ATNDeserializationOptions.py @@ -6,6 +6,7 @@ ATNDeserializationOptions = None class ATNDeserializationOptions(object): + __slots__ = ('readOnly', 'verifyATN', 'generateRuleBypassTransitions') defaultOptions = None @@ -21,4 +22,3 @@ class ATNDeserializationOptions(object): ATNDeserializationOptions.defaultOptions = ATNDeserializationOptions() ATNDeserializationOptions.defaultOptions.readOnly = True - diff --git a/runtime/Python3/src/antlr4/atn/ATNDeserializer.py b/runtime/Python3/src/antlr4/atn/ATNDeserializer.py index cd0bb661a..cc100d05a 100644 --- a/runtime/Python3/src/antlr4/atn/ATNDeserializer.py +++ b/runtime/Python3/src/antlr4/atn/ATNDeserializer.py @@ -31,6 +31,7 @@ SERIALIZED_VERSION = 3 SERIALIZED_UUID = ADDED_UNICODE_SMP class ATNDeserializer (object): + __slots__ = ('deserializationOptions', 'data', 'pos', 'uuid') def __init__(self, options : ATNDeserializationOptions = None): if options is None: diff --git a/runtime/Python3/src/antlr4/atn/ATNSimulator.py b/runtime/Python3/src/antlr4/atn/ATNSimulator.py index 26c0b94af..4f6f53f48 100644 --- a/runtime/Python3/src/antlr4/atn/ATNSimulator.py +++ b/runtime/Python3/src/antlr4/atn/ATNSimulator.py @@ -10,6 +10,7 @@ from antlr4.dfa.DFAState import DFAState class ATNSimulator(object): + __slots__ = ('atn', 'sharedContextCache', '__dict__') # Must distinguish between missing edge and edge we know leads nowhere#/ ERROR = DFAState(configs=ATNConfigSet()) @@ -44,4 +45,3 @@ class ATNSimulator(object): return context visited = dict() return getCachedPredictionContext(context, self.sharedContextCache, visited) - diff --git a/runtime/Python3/src/antlr4/atn/ATNState.py b/runtime/Python3/src/antlr4/atn/ATNState.py index 97ade9559..fbf6a7b94 100644 --- a/runtime/Python3/src/antlr4/atn/ATNState.py +++ b/runtime/Python3/src/antlr4/atn/ATNState.py @@ -69,6 +69,10 @@ from antlr4.atn.Transition import Transition INITIAL_NUM_TRANSITIONS = 4 class ATNState(object): + __slots__ = ( + 'atn', 'stateNumber', 'stateType', 'ruleIndex', 'epsilonOnlyTransitions', + 'transitions', 'nextTokenWithinRule', + ) # constants for serialization INVALID_TYPE = 0 @@ -148,7 +152,7 @@ class BasicState(ATNState): class DecisionState(ATNState): - + __slots__ = ('decision', 'nonGreedy') def __init__(self): super().__init__() self.decision = -1 @@ -156,6 +160,7 @@ class DecisionState(ATNState): # The start of a regular {@code (...)} block. class BlockStartState(DecisionState): + __slots__ = 'endState' def __init__(self): super().__init__() @@ -169,6 +174,7 @@ class BasicBlockStartState(BlockStartState): # Terminal node of a simple {@code (a|b|c)} block. class BlockEndState(ATNState): + __slots__ = 'startState' def __init__(self): super().__init__() @@ -187,6 +193,7 @@ class RuleStopState(ATNState): self.stateType = self.RULE_STOP class RuleStartState(ATNState): + __slots__ = ('stopState', 'isPrecedenceRule') def __init__(self): super().__init__() @@ -209,6 +216,7 @@ class PlusLoopbackState(DecisionState): # real decision-making note for {@code A+}. # class PlusBlockStartState(BlockStartState): + __slots__ = 'loopBackState' def __init__(self): super().__init__() @@ -230,6 +238,7 @@ class StarLoopbackState(ATNState): class StarLoopEntryState(DecisionState): + __slots__ = ('loopBackState', 'isPrecedenceDecision') def __init__(self): super().__init__() @@ -240,6 +249,7 @@ class StarLoopEntryState(DecisionState): # Mark the end of a * or + loop. class LoopEndState(ATNState): + __slots__ = 'loopBackState' def __init__(self): super().__init__() diff --git a/runtime/Python3/src/antlr4/atn/LexerATNSimulator.py b/runtime/Python3/src/antlr4/atn/LexerATNSimulator.py index 4c4468bb6..71201ff5f 100644 --- a/runtime/Python3/src/antlr4/atn/LexerATNSimulator.py +++ b/runtime/Python3/src/antlr4/atn/LexerATNSimulator.py @@ -34,6 +34,7 @@ from antlr4.dfa.DFAState import DFAState from antlr4.error.Errors import LexerNoViableAltException, UnsupportedOperationException class SimState(object): + __slots__ = ('index', 'line', 'column', 'dfaState') def __init__(self): self.reset() @@ -49,6 +50,10 @@ Lexer = None LexerATNSimulator = None class LexerATNSimulator(ATNSimulator): + __slots__ = ( + 'decisionToDFA', 'recog', 'startIndex', 'line', 'column', 'mode', + 'DEFAULT_MODE', 'MAX_CHAR_VALUE', 'prevAccept' + ) debug = False dfa_debug = False @@ -58,8 +63,6 @@ class LexerATNSimulator(ATNSimulator): ERROR = None - match_calls = 0 - def __init__(self, recog:Lexer, atn:ATN, decisionToDFA:list, sharedContextCache:PredictionContextCache): super().__init__(atn, sharedContextCache) self.decisionToDFA = decisionToDFA @@ -89,7 +92,6 @@ class LexerATNSimulator(ATNSimulator): self.startIndex = simulator.startIndex def match(self, input:InputStream , mode:int): - self.match_calls += 1 self.mode = mode mark = input.mark() try: @@ -565,4 +567,4 @@ class LexerATNSimulator(ATNSimulator): LexerATNSimulator.ERROR = DFAState(0x7FFFFFFF, ATNConfigSet()) -del Lexer \ No newline at end of file +del Lexer diff --git a/runtime/Python3/src/antlr4/atn/LexerAction.py b/runtime/Python3/src/antlr4/atn/LexerAction.py index 5d11f21ae..0fa7a895f 100644 --- a/runtime/Python3/src/antlr4/atn/LexerAction.py +++ b/runtime/Python3/src/antlr4/atn/LexerAction.py @@ -22,6 +22,7 @@ class LexerActionType(IntEnum): TYPE = 7 #The type of a {@link LexerTypeAction} action. class LexerAction(object): + __slots__ = ('actionType', 'isPositionDependent') def __init__(self, action:LexerActionType): self.actionType = action @@ -39,7 +40,7 @@ class LexerAction(object): # #

The {@code skip} command does not have any parameters, so this action is # implemented as a singleton instance exposed by {@link #INSTANCE}.

-class LexerSkipAction(LexerAction ): +class LexerSkipAction(LexerAction): # Provides a singleton instance of this parameterless lexer action. INSTANCE = None @@ -58,6 +59,7 @@ LexerSkipAction.INSTANCE = LexerSkipAction() # Implements the {@code type} lexer action by calling {@link Lexer#setType} # with the assigned type. class LexerTypeAction(LexerAction): + __slots__ = 'type' def __init__(self, type:int): super().__init__(LexerActionType.TYPE) @@ -84,6 +86,7 @@ class LexerTypeAction(LexerAction): # Implements the {@code pushMode} lexer action by calling # {@link Lexer#pushMode} with the assigned mode. class LexerPushModeAction(LexerAction): + __slots__ = 'mode' def __init__(self, mode:int): super().__init__(LexerActionType.PUSH_MODE) @@ -152,6 +155,7 @@ LexerMoreAction.INSTANCE = LexerMoreAction() # Implements the {@code mode} lexer action by calling {@link Lexer#mode} with # the assigned mode. class LexerModeAction(LexerAction): + __slots__ = 'mode' def __init__(self, mode:int): super().__init__(LexerActionType.MODE) @@ -186,6 +190,7 @@ class LexerModeAction(LexerAction): # command argument could not be evaluated when the grammar was compiled.

class LexerCustomAction(LexerAction): + __slots__ = ('ruleIndex', 'actionIndex') # Constructs a custom lexer action with the specified rule and action # indexes. @@ -220,6 +225,7 @@ class LexerCustomAction(LexerAction): # Implements the {@code channel} lexer action by calling # {@link Lexer#setChannel} with the assigned channel. class LexerChannelAction(LexerAction): + __slots__ = 'channel' # Constructs a new {@code channel} action with the specified channel value. # @param channel The channel value to pass to {@link Lexer#setChannel}. @@ -255,6 +261,7 @@ class LexerChannelAction(LexerAction): # lexer actions, see {@link LexerActionExecutor#append} and # {@link LexerActionExecutor#fixOffsetBeforeMatch}.

class LexerIndexedCustomAction(LexerAction): + __slots__ = ('offset', 'action') # Constructs a new indexed custom action by associating a character offset # with a {@link LexerAction}. diff --git a/runtime/Python3/src/antlr4/atn/LexerActionExecutor.py b/runtime/Python3/src/antlr4/atn/LexerActionExecutor.py index df125169f..5c6462c3a 100644 --- a/runtime/Python3/src/antlr4/atn/LexerActionExecutor.py +++ b/runtime/Python3/src/antlr4/atn/LexerActionExecutor.py @@ -20,6 +20,7 @@ Lexer = None LexerActionExecutor = None class LexerActionExecutor(object): + __slots__ = ('lexerActions', 'hashCode') def __init__(self, lexerActions:list=list()): self.lexerActions = lexerActions @@ -139,4 +140,4 @@ class LexerActionExecutor(object): return self.hashCode == other.hashCode \ and self.lexerActions == other.lexerActions -del Lexer \ No newline at end of file +del Lexer diff --git a/runtime/Python3/src/antlr4/atn/ParserATNSimulator.py b/runtime/Python3/src/antlr4/atn/ParserATNSimulator.py index 9948f4be3..d1fb3d7ed 100644 --- a/runtime/Python3/src/antlr4/atn/ParserATNSimulator.py +++ b/runtime/Python3/src/antlr4/atn/ParserATNSimulator.py @@ -255,6 +255,10 @@ from antlr4.error.Errors import NoViableAltException class ParserATNSimulator(ATNSimulator): + __slots__ = ( + 'parser', 'decisionToDFA', 'predictionMode', '_input', '_startIndex', + '_outerContext', '_dfa', 'mergeCache' + ) debug = False debug_list_atn_decisions = False @@ -1643,4 +1647,3 @@ class ParserATNSimulator(ATNSimulator): ", input=" + self.parser.getTokenStream().getText(startIndex, stopIndex)) if self.parser is not None: self.parser.getErrorListenerDispatch().reportAmbiguity(self.parser, dfa, startIndex, stopIndex, exact, ambigAlts, configs) - diff --git a/runtime/Python3/src/antlr4/atn/SemanticContext.py b/runtime/Python3/src/antlr4/atn/SemanticContext.py index d4593195e..8f4dc3108 100644 --- a/runtime/Python3/src/antlr4/atn/SemanticContext.py +++ b/runtime/Python3/src/antlr4/atn/SemanticContext.py @@ -95,6 +95,7 @@ def filterPrecedencePredicates(collection:set): class Predicate(SemanticContext): + __slots__ = ('ruleIndex', 'predIndex', 'isCtxDependent') def __init__(self, ruleIndex:int=-1, predIndex:int=-1, isCtxDependent:bool=False): self.ruleIndex = ruleIndex @@ -153,6 +154,7 @@ class PrecedencePredicate(SemanticContext): # is false. del AND class AND(SemanticContext): + __slots__ = 'opnds' def __init__(self, a:SemanticContext, b:SemanticContext): operands = set() @@ -238,6 +240,7 @@ class AND(SemanticContext): # contexts is true. del OR class OR (SemanticContext): + __slots__ = 'opnds' def __init__(self, a:SemanticContext, b:SemanticContext): operands = set() @@ -317,4 +320,4 @@ class OR (SemanticContext): return buf.getvalue() -SemanticContext.NONE = Predicate() \ No newline at end of file +SemanticContext.NONE = Predicate() diff --git a/runtime/Python3/src/antlr4/atn/Transition.py b/runtime/Python3/src/antlr4/atn/Transition.py index 0ed042cda..2e4c99717 100644 --- a/runtime/Python3/src/antlr4/atn/Transition.py +++ b/runtime/Python3/src/antlr4/atn/Transition.py @@ -26,6 +26,8 @@ ATNState = None RuleStartState = None class Transition (object): + __slots__ = ('target','isEpsilon','label') + # constants for serialization EPSILON = 1 RANGE = 2 @@ -66,6 +68,7 @@ class Transition (object): # TODO: make all transitions sets? no, should remove set edges class AtomTransition(Transition): + __slots__ = ('label_', 'serializationType') def __init__(self, target:ATNState, label:int): super().__init__(target) @@ -85,6 +88,7 @@ class AtomTransition(Transition): return str(self.label_) class RuleTransition(Transition): + __slots__ = ('ruleIndex', 'precedence', 'followState', 'serializationType') def __init__(self, ruleStart:RuleStartState, ruleIndex:int, precedence:int, followState:ATNState): super().__init__(ruleStart) @@ -99,6 +103,7 @@ class RuleTransition(Transition): class EpsilonTransition(Transition): + __slots__ = ('serializationType', 'outermostPrecedenceReturn') def __init__(self, target, outermostPrecedenceReturn=-1): super(EpsilonTransition, self).__init__(target) @@ -113,6 +118,7 @@ class EpsilonTransition(Transition): return "epsilon" class RangeTransition(Transition): + __slots__ = ('serializationType', 'start', 'stop') def __init__(self, target:ATNState, start:int, stop:int): super().__init__(target) @@ -139,6 +145,7 @@ class AbstractPredicateTransition(Transition): class PredicateTransition(AbstractPredicateTransition): + __slots__ = ('serializationType', 'ruleIndex', 'predIndex', 'isCtxDependent') def __init__(self, target:ATNState, ruleIndex:int, predIndex:int, isCtxDependent:bool): super().__init__(target) @@ -158,6 +165,7 @@ class PredicateTransition(AbstractPredicateTransition): return "pred_" + str(self.ruleIndex) + ":" + str(self.predIndex) class ActionTransition(Transition): + __slots__ = ('serializationType', 'ruleIndex', 'actionIndex', 'isCtxDependent') def __init__(self, target:ATNState, ruleIndex:int, actionIndex:int=-1, isCtxDependent:bool=False): super().__init__(target) @@ -175,6 +183,7 @@ class ActionTransition(Transition): # A transition containing a set of values. class SetTransition(Transition): + __slots__ = 'serializationType' def __init__(self, target:ATNState, set:IntervalSet): super().__init__(target) @@ -207,6 +216,7 @@ class NotSetTransition(SetTransition): class WildcardTransition(Transition): + __slots__ = 'serializationType' def __init__(self, target:ATNState): super().__init__(target) @@ -220,6 +230,7 @@ class WildcardTransition(Transition): class PrecedencePredicateTransition(AbstractPredicateTransition): + __slots__ = ('serializationType', 'precedence') def __init__(self, target:ATNState, precedence:int): super().__init__(target) @@ -254,4 +265,4 @@ Transition.serializationTypes = { del ATNState del RuleStartState -from antlr4.atn.ATNState import * \ No newline at end of file +from antlr4.atn.ATNState import * diff --git a/runtime/Python3/src/antlr4/dfa/DFA.py b/runtime/Python3/src/antlr4/dfa/DFA.py index af6839ca0..d80589a68 100644 --- a/runtime/Python3/src/antlr4/dfa/DFA.py +++ b/runtime/Python3/src/antlr4/dfa/DFA.py @@ -11,6 +11,7 @@ from antlr4.error.Errors import IllegalStateException class DFA(object): + __slots__ = ('atnStartState', 'decision', '_states', 's0', 'precedenceDfa') def __init__(self, atnStartState:DecisionState, decision:int=0): # From which ATN state did we create this DFA? @@ -130,4 +131,3 @@ class DFA(object): from antlr4.dfa.DFASerializer import LexerDFASerializer serializer = LexerDFASerializer(self) return str(serializer) - diff --git a/runtime/Python3/src/antlr4/dfa/DFASerializer.py b/runtime/Python3/src/antlr4/dfa/DFASerializer.py index eeb6e366c..bca0727b7 100644 --- a/runtime/Python3/src/antlr4/dfa/DFASerializer.py +++ b/runtime/Python3/src/antlr4/dfa/DFASerializer.py @@ -12,6 +12,7 @@ from antlr4.dfa.DFAState import DFAState class DFASerializer(object): + __slots__ = ('dfa', 'literalNames', 'symbolicNames') def __init__(self, dfa:DFA, literalNames:list=None, symbolicNames:list=None): self.dfa = dfa diff --git a/runtime/Python3/src/antlr4/dfa/DFAState.py b/runtime/Python3/src/antlr4/dfa/DFAState.py index d7af5a172..51955a448 100644 --- a/runtime/Python3/src/antlr4/dfa/DFAState.py +++ b/runtime/Python3/src/antlr4/dfa/DFAState.py @@ -11,6 +11,8 @@ from antlr4.atn.SemanticContext import SemanticContext class PredPrediction(object): + __slots__ = ('alt', 'pred') + def __init__(self, pred:SemanticContext, alt:int): self.alt = alt self.pred = pred @@ -43,6 +45,10 @@ class PredPrediction(object): # meaning that state was reached via a different set of rule invocations.

#/ class DFAState(object): + __slots__ = ( + 'stateNumber', 'configs', 'edges', 'isAcceptState', 'prediction', + 'lexerActionExecutor', 'requiresFullContext', 'predicates' + ) def __init__(self, stateNumber:int=-1, configs:ATNConfigSet=ATNConfigSet()): self.stateNumber = stateNumber diff --git a/runtime/Python3/src/antlr4/tree/Chunk.py b/runtime/Python3/src/antlr4/tree/Chunk.py index a2fd16c97..081419a34 100644 --- a/runtime/Python3/src/antlr4/tree/Chunk.py +++ b/runtime/Python3/src/antlr4/tree/Chunk.py @@ -8,6 +8,7 @@ class Chunk(object): pass class TagChunk(Chunk): + __slots__ = ('tag', 'label') def __init__(self, tag:str, label:str=None): self.tag = tag @@ -20,10 +21,10 @@ class TagChunk(Chunk): return self.label + ":" + self.tag class TextChunk(Chunk): + __slots__ = 'text' def __init__(self, text:str): self.text = text def __str__(self): return "'" + self.text + "'" - diff --git a/runtime/Python3/src/antlr4/tree/ParseTreeMatch.py b/runtime/Python3/src/antlr4/tree/ParseTreeMatch.py index bbda73e8f..c02bc0357 100644 --- a/runtime/Python3/src/antlr4/tree/ParseTreeMatch.py +++ b/runtime/Python3/src/antlr4/tree/ParseTreeMatch.py @@ -14,7 +14,7 @@ from antlr4.tree.Tree import ParseTree class ParseTreeMatch(object): - + __slots__ = ('tree', 'pattern', 'labels', 'mismatchedNode') # # Constructs a new instance of {@link ParseTreeMatch} from the specified # parse tree and pattern. diff --git a/runtime/Python3/src/antlr4/tree/ParseTreePattern.py b/runtime/Python3/src/antlr4/tree/ParseTreePattern.py index 1abb880d3..37fd0bf09 100644 --- a/runtime/Python3/src/antlr4/tree/ParseTreePattern.py +++ b/runtime/Python3/src/antlr4/tree/ParseTreePattern.py @@ -14,6 +14,7 @@ from antlr4.xpath.XPath import XPath class ParseTreePattern(object): + __slots__ = ('matcher', 'patternRuleIndex', 'pattern', 'patternTree') # Construct a new instance of the {@link ParseTreePattern} class. # diff --git a/runtime/Python3/src/antlr4/tree/ParseTreePatternMatcher.py b/runtime/Python3/src/antlr4/tree/ParseTreePatternMatcher.py index 07b96408c..62fd197b0 100644 --- a/runtime/Python3/src/antlr4/tree/ParseTreePatternMatcher.py +++ b/runtime/Python3/src/antlr4/tree/ParseTreePatternMatcher.py @@ -89,6 +89,7 @@ class StartRuleDoesNotConsumeFullPattern(Exception): class ParseTreePatternMatcher(object): + __slots__ = ('lexer', 'parser', 'start', 'stop', 'escape') # Constructs a {@link ParseTreePatternMatcher} or from a {@link Lexer} and # {@link Parser} object. The lexer input stream is altered for tokenizing diff --git a/runtime/Python3/src/antlr4/tree/RuleTagToken.py b/runtime/Python3/src/antlr4/tree/RuleTagToken.py index 7b2018fe7..a198f7da1 100644 --- a/runtime/Python3/src/antlr4/tree/RuleTagToken.py +++ b/runtime/Python3/src/antlr4/tree/RuleTagToken.py @@ -13,7 +13,8 @@ from antlr4.Token import Token class RuleTagToken(Token): - # + __slots__ = ('label', 'ruleName') + # # Constructs a new instance of {@link RuleTagToken} with the specified rule # name, bypass token type, and label. # diff --git a/runtime/Python3/src/antlr4/tree/TokenTagToken.py b/runtime/Python3/src/antlr4/tree/TokenTagToken.py index d00327ae9..b7beeb876 100644 --- a/runtime/Python3/src/antlr4/tree/TokenTagToken.py +++ b/runtime/Python3/src/antlr4/tree/TokenTagToken.py @@ -13,7 +13,7 @@ from antlr4.Token import CommonToken class TokenTagToken(CommonToken): - + __slots__ = ('tokenName', 'label') # Constructs a new instance of {@link TokenTagToken} with the specified # token name, type, and label. # diff --git a/runtime/Python3/src/antlr4/tree/Tree.py b/runtime/Python3/src/antlr4/tree/Tree.py index 68660b48b..812acc96b 100644 --- a/runtime/Python3/src/antlr4/tree/Tree.py +++ b/runtime/Python3/src/antlr4/tree/Tree.py @@ -80,6 +80,7 @@ class ParseTreeListener(object): del ParserRuleContext class TerminalNodeImpl(TerminalNode): + __slots__ = ('parentCtx', 'symbol') def __init__(self, symbol:Token): self.parentCtx = None @@ -187,4 +188,4 @@ class ParseTreeWalker(object): ctx.exitRule(listener) listener.exitEveryRule(ctx) -ParseTreeWalker.DEFAULT = ParseTreeWalker() \ No newline at end of file +ParseTreeWalker.DEFAULT = ParseTreeWalker() diff --git a/tool/resources/org/antlr/v4/tool/templates/codegen/Python3/Python3.stg b/tool/resources/org/antlr/v4/tool/templates/codegen/Python3/Python3.stg index 1fdceee02..d140615cf 100644 --- a/tool/resources/org/antlr/v4/tool/templates/codegen/Python3/Python3.stg +++ b/tool/resources/org/antlr/v4/tool/templates/codegen/Python3/Python3.stg @@ -652,6 +652,7 @@ CaptureNextTokenType(d) ::= " = self._input.LA(1)" StructDecl(struct,ctorAttrs,attrs,getters,dispatchMethods,interfaces,extensionMembers) ::= << class (ParserRuleContext): + __slots__ = 'parser' def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1:=None}>): super().__init__(parent, invokingState)