remove slow asserts
This commit is contained in:
parent
d7d59c53a7
commit
27855f7ed4
|
@ -128,7 +128,6 @@ class BufferedTokenStream(TokenStream):
|
|||
# @see #get(int i)
|
||||
#/
|
||||
def sync(self, i):
|
||||
assert i >= 0
|
||||
n = i - len(self.tokens) + 1 # how many more elements we need?
|
||||
if n > 0 :
|
||||
fetched = self.fetch(n)
|
||||
|
|
|
@ -142,7 +142,6 @@ class SingletonPredictionContext(PredictionContext):
|
|||
return SingletonPredictionContext(parent, returnState)
|
||||
|
||||
def __init__(self, parent, returnState):
|
||||
assert returnState!=ATNState.INVALID_STATE_NUMBER
|
||||
hashCode = calculateHashCode(parent, returnState)
|
||||
super(SingletonPredictionContext, self).__init__(hashCode)
|
||||
self.parentCtx = parent
|
||||
|
@ -152,11 +151,9 @@ class SingletonPredictionContext(PredictionContext):
|
|||
return 1
|
||||
|
||||
def getParent(self, index):
|
||||
assert index == 0
|
||||
return self.parentCtx
|
||||
|
||||
def getReturnState(self, index):
|
||||
assert index == 0
|
||||
return self.returnState
|
||||
|
||||
def __eq__(self, other):
|
||||
|
@ -166,8 +163,6 @@ class SingletonPredictionContext(PredictionContext):
|
|||
return False
|
||||
elif not isinstance(other, SingletonPredictionContext):
|
||||
return False
|
||||
elif hash(self) != hash(other):
|
||||
return False # can't be same if hash is different
|
||||
else:
|
||||
return self.returnState == other.returnState and self.parentCtx==other.parentCtx
|
||||
|
||||
|
@ -212,8 +207,6 @@ class ArrayPredictionContext(PredictionContext):
|
|||
|
||||
def __init__(self, parents, returnStates):
|
||||
super(ArrayPredictionContext, self).__init__(calculateListsHashCode(parents, returnStates))
|
||||
assert parents is not None and len(parents)>0
|
||||
assert returnStates is not None and len(returnStates)>0
|
||||
self.parents = parents
|
||||
self.returnStates = returnStates
|
||||
|
||||
|
@ -281,17 +274,7 @@ def PredictionContextFromRuleContext(atn, outerContext=None):
|
|||
return SingletonPredictionContext.create(parent, transition.followState.stateNumber)
|
||||
|
||||
|
||||
def calculateListsHashCode(parents, returnStates ):
|
||||
|
||||
with StringIO() as s:
|
||||
for parent in parents:
|
||||
s.write(unicode(parent))
|
||||
for returnState in returnStates:
|
||||
s.write(unicode(returnState))
|
||||
return hash(s.getvalue())
|
||||
|
||||
def merge(a, b, rootIsWildcard, mergeCache):
|
||||
assert a is not None and b is not None # must be empty context, never null
|
||||
|
||||
# share same graph if both same
|
||||
if a==b:
|
||||
|
@ -491,9 +474,9 @@ def mergeArrays(a, b, rootIsWildcard, mergeCache):
|
|||
return previous
|
||||
|
||||
# merge sorted payloads a + b => M
|
||||
i = 0; # walks a
|
||||
j = 0; # walks b
|
||||
k = 0; # walks target M array
|
||||
i = 0 # walks a
|
||||
j = 0 # walks b
|
||||
k = 0 # walks target M array
|
||||
|
||||
mergedReturnStates = [] * (len(a.returnState) + len( b.returnStates))
|
||||
mergedParents = [] * len(mergedReturnStates)
|
||||
|
|
|
@ -131,7 +131,6 @@ class BufferedTokenStream(TokenStream):
|
|||
# @see #get(int i)
|
||||
#/
|
||||
def sync(self, i:int):
|
||||
assert i >= 0
|
||||
n = i - len(self.tokens) + 1 # how many more elements we need?
|
||||
if n > 0 :
|
||||
fetched = self.fetch(n)
|
||||
|
|
|
@ -139,7 +139,6 @@ class SingletonPredictionContext(PredictionContext):
|
|||
return SingletonPredictionContext(parent, returnState)
|
||||
|
||||
def __init__(self, parent:PredictionContext, returnState:int):
|
||||
assert returnState!=ATNState.INVALID_STATE_NUMBER
|
||||
hashCode = calculateHashCode(parent, returnState)
|
||||
super().__init__(hashCode)
|
||||
self.parentCtx = parent
|
||||
|
@ -149,11 +148,9 @@ class SingletonPredictionContext(PredictionContext):
|
|||
return 1
|
||||
|
||||
def getParent(self, index:int):
|
||||
assert index == 0
|
||||
return self.parentCtx
|
||||
|
||||
def getReturnState(self, index:int):
|
||||
assert index == 0
|
||||
return self.returnState
|
||||
|
||||
def __eq__(self, other):
|
||||
|
@ -163,10 +160,8 @@ class SingletonPredictionContext(PredictionContext):
|
|||
return False
|
||||
elif not isinstance(other, SingletonPredictionContext):
|
||||
return False
|
||||
elif hash(self) != hash(other):
|
||||
return False # can't be same if hash is different
|
||||
else:
|
||||
return self.returnState == other.returnState and self.parentCtx==other.parentCtx
|
||||
return self.returnState == other.returnState and self.parentCtx == other.parentCtx
|
||||
|
||||
def __hash__(self):
|
||||
return self.cachedHashCode
|
||||
|
@ -209,8 +204,6 @@ class ArrayPredictionContext(PredictionContext):
|
|||
|
||||
def __init__(self, parents:list, returnStates:list):
|
||||
super().__init__(calculateListsHashCode(parents, returnStates))
|
||||
assert parents is not None and len(parents)>0
|
||||
assert returnStates is not None and len(returnStates)>0
|
||||
self.parents = parents
|
||||
self.returnStates = returnStates
|
||||
|
||||
|
@ -279,7 +272,6 @@ def PredictionContextFromRuleContext(atn:ATN, outerContext:RuleContext=None):
|
|||
|
||||
|
||||
def merge(a:PredictionContext, b:PredictionContext, rootIsWildcard:bool, mergeCache:dict):
|
||||
assert a is not None and b is not None # must be empty context, never null
|
||||
|
||||
# share same graph if both same
|
||||
if a==b:
|
||||
|
|
|
@ -539,9 +539,6 @@ class LexerATNSimulator(ATNSimulator):
|
|||
# configuration containing an ATN rule stop state. Later, when
|
||||
# traversing the DFA, we will know which rule to accept.
|
||||
def addDFAState(self, configs:ATNConfigSet) -> DFAState:
|
||||
# the lexer evaluates predicates on-the-fly; by this point configs
|
||||
# should not contain any configurations with unevaluated predicates.
|
||||
assert not configs.hasSemanticContext
|
||||
|
||||
proposed = DFAState(configs=configs)
|
||||
firstConfigWithRuleStopState = None
|
||||
|
|
|
@ -4,6 +4,7 @@ import antlr4
|
|||
from parser.cparser import CParser
|
||||
from parser.clexer import CLexer
|
||||
from datetime import datetime
|
||||
import cProfile
|
||||
|
||||
class ErrorListener(antlr4.error.ErrorListener.ErrorListener):
|
||||
|
||||
|
@ -34,10 +35,10 @@ def main():
|
|||
sub()
|
||||
after = datetime.now()
|
||||
print(str(after-before))
|
||||
before = after
|
||||
sub()
|
||||
after = datetime.now()
|
||||
print(str(after-before))
|
||||
# before = after
|
||||
# sub()
|
||||
# after = datetime.now()
|
||||
# print(str(after-before))
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
cProfile.run("main()", sort='tottime')
|
Loading…
Reference in New Issue