From 06730fcc6c5990272d99d58352288daa64fd3d29 Mon Sep 17 00:00:00 2001 From: ericvergnaud Date: Sun, 21 Jun 2015 01:11:11 +0800 Subject: [PATCH] delete legacy test generator and generated tests --- .../test/rt/gen/AbstractParserTestMethod.java | 12 - .../test/rt/gen/CompositeLexerTestMethod.java | 35 - .../rt/gen/CompositeParserTestMethod.java | 36 - .../test/rt/gen/ConcreteParserTestMethod.java | 25 - .../org/antlr/v4/test/rt/gen/Generator.java | 1430 ----- .../org/antlr/v4/test/rt/gen/Grammar.java | 51 - .../antlr/v4/test/rt/gen/JUnitTestFile.java | 101 - .../antlr/v4/test/rt/gen/JUnitTestMethod.java | 34 - .../antlr/v4/test/rt/gen/LexerTestMethod.java | 17 - .../v4/test/rt/gen/ParserTestMethod.java | 13 - .../LexerDelegatorInvokesDelegateRule.st | 4 - .../LexerDelegatorInvokesDelegateRule_S.st | 3 - .../LexerDelegatorRuleOverridesDelegate.st | 4 - .../LexerDelegatorRuleOverridesDelegate_S.st | 3 - .../BringInLiteralsFromDelegate.st | 4 - .../BringInLiteralsFromDelegate_S.st | 2 - .../CombinedImportsCombined.st | 3 - .../CombinedImportsCombined_S.st | 5 - .../DelegatesSeeSameTokenType.st | 17 - .../DelegatesSeeSameTokenType_S.st | 3 - .../DelegatesSeeSameTokenType_T.st | 3 - .../DelegatorAccessesDelegateMembers.st | 4 - .../DelegatorAccessesDelegateMembers_S.st | 5 - .../DelegatorInvokesDelegateRule.st | 5 - .../DelegatorInvokesDelegateRuleWithArgs.st | 5 - .../DelegatorInvokesDelegateRuleWithArgs_S.st | 2 - ...atorInvokesDelegateRuleWithReturnStruct.st | 5 - ...orInvokesDelegateRuleWithReturnStruct_S.st | 2 - .../DelegatorInvokesDelegateRule_S.st | 2 - ...egatorInvokesFirstVersionOfDelegateRule.st | 5 - ...atorInvokesFirstVersionOfDelegateRule_S.st | 3 - ...atorInvokesFirstVersionOfDelegateRule_T.st | 2 - .../DelegatorRuleOverridesDelegate.st | 4 - .../DelegatorRuleOverridesDelegate_S.st | 3 - .../DelegatorRuleOverridesDelegates.st | 4 - .../DelegatorRuleOverridesDelegates_S.st | 4 - .../DelegatorRuleOverridesDelegates_T.st | 3 - ...legatorRuleOverridesLookaheadInDelegate.st | 7 - ...gatorRuleOverridesLookaheadInDelegate_S.st | 5 - .../ImportLexerWithOnlyFragmentRules.st | 4 - .../ImportLexerWithOnlyFragmentRules_S.st | 6 - .../ImportedGrammarWithEmptyOptions.st | 5 - .../ImportedGrammarWithEmptyOptions_S.st | 3 - .../ImportedRuleWithAction.st | 5 - .../ImportedRuleWithAction_S.st | 2 - .../CompositeParsers/KeywordVSIDOrder.st | 5 - .../CompositeParsers/KeywordVSIDOrder_S.st | 2 - .../AmbigYieldsCtxSensitiveDFA.st | 5 - .../FullContextParsing/AmbiguityNoLoop.st | 12 - .../FullContextParsing/CtxSensitiveDFA.st | 9 - .../CtxSensitiveDFATwoDiffInput.st | 9 - .../FullContextParsing/ExprAmbiguity.st | 13 - .../FullContextIF_THEN_ELSEParse.st | 10 - .../LoopsSimulateTailRecursion.st | 15 - .../SLLSeesEOFInLLGrammar.st | 9 - .../rt/gen/grammars/LeftRecursion/AmbigLR.st | 21 - .../grammars/LeftRecursion/Declarations.st | 14 - .../DirectCallToLeftRecursiveRule.st | 6 - .../gen/grammars/LeftRecursion/Expressions.st | 13 - .../grammars/LeftRecursion/JavaExpressions.st | 56 - .../LeftRecursion/LabelsOnOpSubrule.st | 8 - .../grammars/LeftRecursion/MultipleActions.st | 8 - .../MultipleActionsPredicatesOptions.st | 9 - .../MultipleAlternativesWithCommonLabel.st | 16 - .../PrecedenceFilterConsidersContext.st | 6 - .../PrefixOpWithActionAndLabel.st | 11 - .../LeftRecursion/ReturnValueAndActions.st | 11 - .../ReturnValueAndActionsAndLabels.st | 14 - .../ReturnValueAndActionsList1.st | 13 - .../ReturnValueAndActionsList2.st | 12 - .../rt/gen/grammars/LeftRecursion/SemPred.st | 7 - .../LeftRecursion/SemPredFailOption.st | 7 - .../rt/gen/grammars/LeftRecursion/Simple.st | 7 - .../gen/grammars/LeftRecursion/TernaryExpr.st | 10 - .../TernaryExprExplicitAssociativity.st | 10 - .../LeftRecursion/WhitespaceInfluence.st | 49 - .../LexerErrors/DFAToATNThatFailsBackToDFA.st | 3 - .../DFAToATNThatMatchesThenFailsInATN.st | 4 - .../EnforcedGreedyNestedBrances.st | 3 - .../gen/grammars/LexerErrors/ErrorInMiddle.st | 2 - .../LexerErrors/InvalidCharAtStart.st | 2 - .../InvalidCharAtStartAfterDFACache.st | 2 - .../LexerErrors/InvalidCharInToken.st | 2 - .../InvalidCharInTokenAfterDFACache.st | 2 - .../gen/grammars/LexerErrors/LexerExecDFA.st | 5 - .../LexerErrors/StringsEmbeddedInActions.st | 4 - .../gen/grammars/LexerExec/ActionPlacement.st | 8 - .../test/rt/gen/grammars/LexerExec/CharSet.st | 3 - .../rt/gen/grammars/LexerExec/CharSetInSet.st | 4 - .../rt/gen/grammars/LexerExec/CharSetNot.st | 3 - .../rt/gen/grammars/LexerExec/CharSetPlus.st | 3 - .../rt/gen/grammars/LexerExec/CharSetRange.st | 4 - .../LexerExec/CharSetWithEscapedChar.st | 3 - .../LexerExec/CharSetWithMissingEndRange.st | 3 - .../LexerExec/CharSetWithMissingEscapeChar.st | 3 - .../grammars/LexerExec/CharSetWithQuote1.st | 3 - .../grammars/LexerExec/CharSetWithQuote2.st | 3 - .../LexerExec/CharSetWithReversedRange.st | 3 - .../rt/gen/grammars/LexerExec/EOFByItself.st | 3 - .../LexerExec/EOFSuffixInFirstRule.st | 4 - .../gen/grammars/LexerExec/GreedyClosure.st | 3 - .../gen/grammars/LexerExec/GreedyConfigs.st | 4 - .../gen/grammars/LexerExec/GreedyOptional.st | 3 - .../LexerExec/GreedyPositiveClosure.st | 3 - .../test/rt/gen/grammars/LexerExec/HexVsID.st | 8 - .../rt/gen/grammars/LexerExec/KeywordID.st | 4 - .../rt/gen/grammars/LexerExec/LargeLexer.st | 4002 -------------- .../grammars/LexerExec/NonGreedyClosure.st | 3 - .../grammars/LexerExec/NonGreedyConfigs.st | 4 - .../grammars/LexerExec/NonGreedyOptional.st | 3 - .../LexerExec/NonGreedyPositiveClosure.st | 3 - .../LexerExec/NonGreedyTermination1.st | 2 - .../LexerExec/NonGreedyTermination2.st | 2 - .../rt/gen/grammars/LexerExec/Parentheses.st | 7 - .../LexerExec/PositionAdjustingLexer.st | 34 - .../grammars/LexerExec/QuoteTranslation.st | 2 - .../RecursiveLexerRuleRefWithWildcardPlus.st | 3 - .../RecursiveLexerRuleRefWithWildcardStar.st | 3 - .../RefToRuleDoesNotSetTokenNorEmitAnother.st | 4 - .../test/rt/gen/grammars/LexerExec/Slashes.st | 6 - .../gen/grammars/LexerExec/ZeroLengthToken.st | 9 - .../test/rt/gen/grammars/Listeners/Basic.st | 23 - .../v4/test/rt/gen/grammars/Listeners/LR.st | 24 - .../rt/gen/grammars/Listeners/LRWithLabels.st | 24 - .../rt/gen/grammars/Listeners/RuleGetters.st | 24 - .../rt/gen/grammars/Listeners/TokenGetters.st | 23 - .../rt/gen/grammars/ParseTrees/2AltLoop.st | 11 - .../test/rt/gen/grammars/ParseTrees/2Alts.st | 11 - .../rt/gen/grammars/ParseTrees/ExtraToken.st | 14 - .../rt/gen/grammars/ParseTrees/NoViableAlt.st | 14 - .../rt/gen/grammars/ParseTrees/RuleRef.st | 13 - .../test/rt/gen/grammars/ParseTrees/Sync.st | 13 - .../test/rt/gen/grammars/ParseTrees/Token2.st | 11 - .../ParseTrees/TokenAndRuleContextString.st | 12 - .../grammars/ParserErrors/ConjuringUpToken.st | 2 - .../ParserErrors/ConjuringUpTokenFromSet.st | 2 - .../ParserErrors/ContextListGetters.st | 7 - .../DuplicatedLeftRecursiveCall.st | 5 - .../ParserErrors/InvalidATNStateRemoval.st | 5 - .../ParserErrors/InvalidEmptyInput.st | 3 - .../gen/grammars/ParserErrors/LL1ErrorInfo.st | 14 - .../test/rt/gen/grammars/ParserErrors/LL2.st | 5 - .../test/rt/gen/grammars/ParserErrors/LL3.st | 5 - .../rt/gen/grammars/ParserErrors/LLStar.st | 5 - .../MultiTokenDeletionBeforeLoop.st | 2 - .../MultiTokenDeletionBeforeLoop2.st | 2 - .../MultiTokenDeletionDuringLoop.st | 2 - .../MultiTokenDeletionDuringLoop2.st | 2 - .../ParserErrors/NoViableAltAvoidance.st | 7 - .../ParserErrors/SingleSetInsertion.st | 2 - .../SingleSetInsertionConsumption.st | 3 - .../ParserErrors/SingleTokenDeletion.st | 2 - .../SingleTokenDeletionBeforeLoop.st | 2 - .../SingleTokenDeletionBeforeLoop2.st | 2 - .../SingleTokenDeletionConsumption.st | 3 - .../SingleTokenDeletionDuringLoop.st | 2 - .../SingleTokenDeletionDuringLoop2.st | 2 - .../SingleTokenDeletionExpectingSet.st | 2 - .../ParserErrors/SingleTokenInsertion.st | 2 - .../grammars/ParserErrors/TokenMismatch.st | 2 - .../grammars/ParserErrors/TokenMismatch2.st | 9 - .../test/rt/gen/grammars/ParserExec/APlus.st | 6 - .../test/rt/gen/grammars/ParserExec/AStar.st | 6 - .../grammars/ParserExec/AlternateQuotes.st | 6 - .../AlternateQuotes_ModeTagsLexer.st | 8 - .../rt/gen/grammars/ParserExec/AorAPlus.st | 6 - .../rt/gen/grammars/ParserExec/AorAStar.st | 6 - .../test/rt/gen/grammars/ParserExec/AorB.st | 9 - .../rt/gen/grammars/ParserExec/AorBPlus.st | 8 - .../rt/gen/grammars/ParserExec/AorBStar.st | 8 - .../test/rt/gen/grammars/ParserExec/Basic.st | 7 - .../gen/grammars/ParserExec/EOFInClosure.st | 3 - .../ParserExec/IfIfElseGreedyBinding1.st | 8 - .../ParserExec/IfIfElseGreedyBinding2.st | 8 - .../ParserExec/IfIfElseNonGreedyBinding1.st | 8 - .../ParserExec/IfIfElseNonGreedyBinding2.st | 8 - .../grammars/ParserExec/LL1OptionalBlock.st | 7 - .../LabelAliasingAcrossLabeledAlternatives.st | 8 - .../test/rt/gen/grammars/ParserExec/Labels.st | 6 - .../ParserExec/ListLabelForClosureContext.st | 20 - .../grammars/ParserExec/ListLabelsOnSet.st | 7 - .../ParserExec/MultipleEOFHandling.st | 2 - .../rt/gen/grammars/ParserExec/Optional.st | 4 - .../gen/grammars/ParserExec/ParserProperty.st | 6 - .../grammars/ParserExec/PredicatedIfIfElse.st | 7 - .../grammars/ParserExec/PredictionIssue334.st | 14 - .../gen/grammars/ParserExec/ReferenceToATN.st | 5 - .../ParserExec/StartRuleWithoutEOF.st | 6 - .../grammars/SemPredEvalLexer/DisableRule.st | 5 - .../grammars/SemPredEvalLexer/EnumNotID.st | 4 - .../grammars/SemPredEvalLexer/IDnotEnum.st | 4 - .../gen/grammars/SemPredEvalLexer/IDvsEnum.st | 4 - .../gen/grammars/SemPredEvalLexer/Indent.st | 6 - .../LexerInputPositionSensitivePredicates.st | 6 - .../SemPredEvalLexer/PredicatedKeywords.st | 4 - .../SemPredEvalParser/2UnpredicatedAlts.st | 9 - .../2UnpredicatedAltsAndOneOrthogonalAlt.st | 10 - .../SemPredEvalParser/ActionHidesPreds.st | 9 - .../ActionsHidePredsInGlobalFOLLOW.st | 10 - .../AtomWithClosureInTranslatedLRRule.st | 6 - .../DepedentPredsInGlobalFOLLOW.st | 11 - ...pendentPredNotInOuterCtxShouldBeIgnored.st | 11 - .../SemPredEvalParser/DisabledAlternative.st | 5 - ...edNotPassedOuterCtxToAvoidCastException.st | 10 - .../NoTruePredsThrowsNoViableAlt.st | 8 - .../gen/grammars/SemPredEvalParser/Order.st | 10 - .../PredFromAltTestedInLoopBack.st | 9 - .../PredTestedEvenWhenUnAmbig.st | 8 - .../PredicateDependentOnArg.st | 10 - .../PredicateDependentOnArg2.st | 10 - .../SemPredEvalParser/PredsInGlobalFOLLOW.st | 10 - .../SemPredEvalParser/RewindBeforePredEval.st | 8 - .../gen/grammars/SemPredEvalParser/Simple.st | 9 - .../SemPredEvalParser/SimpleValidate.st | 8 - .../SemPredEvalParser/SimpleValidate2.st | 8 - .../gen/grammars/SemPredEvalParser/ToLeft.st | 8 - .../ToLeftWithVaryingPredicate.st | 10 - .../UnpredicatedPathsInAlt.st | 12 - .../SemPredEvalParser/ValidateInDFA.st | 11 - .../rt/gen/grammars/Sets/CharSetLiteral.st | 4 - .../rt/gen/grammars/Sets/ComplementSet.st | 3 - .../rt/gen/grammars/Sets/LexerOptionalSet.st | 3 - .../test/rt/gen/grammars/Sets/LexerPlusSet.st | 3 - .../test/rt/gen/grammars/Sets/LexerStarSet.st | 3 - .../v4/test/rt/gen/grammars/Sets/NotChar.st | 3 - .../test/rt/gen/grammars/Sets/NotCharSet.st | 3 - .../gen/grammars/Sets/NotCharSetWithLabel.st | 3 - .../grammars/Sets/NotCharSetWithRuleRef3.st | 5 - .../Sets/OptionalLexerSingleElement.st | 3 - .../test/rt/gen/grammars/Sets/OptionalSet.st | 2 - .../grammars/Sets/OptionalSingleElement.st | 3 - .../test/rt/gen/grammars/Sets/ParserNotSet.st | 2 - .../rt/gen/grammars/Sets/ParserNotToken.st | 2 - .../grammars/Sets/ParserNotTokenWithLabel.st | 2 - .../v4/test/rt/gen/grammars/Sets/ParserSet.st | 2 - .../grammars/Sets/PlusLexerSingleElement.st | 3 - .../v4/test/rt/gen/grammars/Sets/PlusSet.st | 2 - .../v4/test/rt/gen/grammars/Sets/RuleAsSet.st | 2 - .../gen/grammars/Sets/SeqDoesNotBecomeSet.st | 5 - .../grammars/Sets/StarLexerSingleElement.st | 3 - .../v4/test/rt/gen/grammars/Sets/StarSet.st | 2 - .../org/antlr/v4/test/rt/java/BaseTest.java | 1417 ----- .../org/antlr/v4/test/rt/java/Java.test.stg | 356 -- .../v4/test/rt/java/TestCompositeLexers.java | 57 - .../v4/test/rt/java/TestCompositeParsers.java | 342 -- .../test/rt/java/TestFullContextParsing.java | 228 - .../v4/test/rt/java/TestLeftRecursion.java | 1167 ---- .../v4/test/rt/java/TestLexerErrors.java | 178 - .../antlr/v4/test/rt/java/TestLexerExec.java | 4760 ----------------- .../antlr/v4/test/rt/java/TestListeners.java | 230 - .../antlr/v4/test/rt/java/TestParseTrees.java | 173 - .../v4/test/rt/java/TestParserErrors.java | 353 -- .../antlr/v4/test/rt/java/TestParserExec.java | 477 -- .../v4/test/rt/java/TestSemPredEvalLexer.java | 170 - .../test/rt/java/TestSemPredEvalParser.java | 456 -- .../org/antlr/v4/test/rt/java/TestSets.java | 263 - 256 files changed, 17946 deletions(-) delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/AbstractParserTestMethod.java delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/CompositeLexerTestMethod.java delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/CompositeParserTestMethod.java delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/ConcreteParserTestMethod.java delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/Generator.java delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/Grammar.java delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/JUnitTestFile.java delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/JUnitTestMethod.java delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/LexerTestMethod.java delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/ParserTestMethod.java delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeLexers/LexerDelegatorInvokesDelegateRule.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeLexers/LexerDelegatorInvokesDelegateRule_S.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeLexers/LexerDelegatorRuleOverridesDelegate.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeLexers/LexerDelegatorRuleOverridesDelegate_S.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/BringInLiteralsFromDelegate.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/BringInLiteralsFromDelegate_S.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/CombinedImportsCombined.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/CombinedImportsCombined_S.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/DelegatesSeeSameTokenType.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/DelegatesSeeSameTokenType_S.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/DelegatesSeeSameTokenType_T.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/DelegatorAccessesDelegateMembers.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/DelegatorAccessesDelegateMembers_S.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/DelegatorInvokesDelegateRule.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/DelegatorInvokesDelegateRuleWithArgs.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/DelegatorInvokesDelegateRuleWithArgs_S.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/DelegatorInvokesDelegateRuleWithReturnStruct.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/DelegatorInvokesDelegateRuleWithReturnStruct_S.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/DelegatorInvokesDelegateRule_S.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/DelegatorInvokesFirstVersionOfDelegateRule.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/DelegatorInvokesFirstVersionOfDelegateRule_S.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/DelegatorInvokesFirstVersionOfDelegateRule_T.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/DelegatorRuleOverridesDelegate.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/DelegatorRuleOverridesDelegate_S.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/DelegatorRuleOverridesDelegates.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/DelegatorRuleOverridesDelegates_S.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/DelegatorRuleOverridesDelegates_T.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/DelegatorRuleOverridesLookaheadInDelegate.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/DelegatorRuleOverridesLookaheadInDelegate_S.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/ImportLexerWithOnlyFragmentRules.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/ImportLexerWithOnlyFragmentRules_S.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/ImportedGrammarWithEmptyOptions.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/ImportedGrammarWithEmptyOptions_S.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/ImportedRuleWithAction.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/ImportedRuleWithAction_S.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/KeywordVSIDOrder.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/KeywordVSIDOrder_S.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/FullContextParsing/AmbigYieldsCtxSensitiveDFA.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/FullContextParsing/AmbiguityNoLoop.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/FullContextParsing/CtxSensitiveDFA.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/FullContextParsing/CtxSensitiveDFATwoDiffInput.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/FullContextParsing/ExprAmbiguity.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/FullContextParsing/FullContextIF_THEN_ELSEParse.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/FullContextParsing/LoopsSimulateTailRecursion.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/FullContextParsing/SLLSeesEOFInLLGrammar.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/LeftRecursion/AmbigLR.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/LeftRecursion/Declarations.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/LeftRecursion/DirectCallToLeftRecursiveRule.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/LeftRecursion/Expressions.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/LeftRecursion/JavaExpressions.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/LeftRecursion/LabelsOnOpSubrule.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/LeftRecursion/MultipleActions.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/LeftRecursion/MultipleActionsPredicatesOptions.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/LeftRecursion/MultipleAlternativesWithCommonLabel.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/LeftRecursion/PrecedenceFilterConsidersContext.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/LeftRecursion/PrefixOpWithActionAndLabel.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/LeftRecursion/ReturnValueAndActions.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/LeftRecursion/ReturnValueAndActionsAndLabels.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/LeftRecursion/ReturnValueAndActionsList1.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/LeftRecursion/ReturnValueAndActionsList2.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/LeftRecursion/SemPred.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/LeftRecursion/SemPredFailOption.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/LeftRecursion/Simple.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/LeftRecursion/TernaryExpr.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/LeftRecursion/TernaryExprExplicitAssociativity.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/LeftRecursion/WhitespaceInfluence.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/LexerErrors/DFAToATNThatFailsBackToDFA.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/LexerErrors/DFAToATNThatMatchesThenFailsInATN.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/LexerErrors/EnforcedGreedyNestedBrances.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/LexerErrors/ErrorInMiddle.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/LexerErrors/InvalidCharAtStart.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/LexerErrors/InvalidCharAtStartAfterDFACache.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/LexerErrors/InvalidCharInToken.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/LexerErrors/InvalidCharInTokenAfterDFACache.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/LexerErrors/LexerExecDFA.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/LexerErrors/StringsEmbeddedInActions.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/ActionPlacement.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/CharSet.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/CharSetInSet.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/CharSetNot.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/CharSetPlus.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/CharSetRange.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/CharSetWithEscapedChar.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/CharSetWithMissingEndRange.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/CharSetWithMissingEscapeChar.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/CharSetWithQuote1.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/CharSetWithQuote2.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/CharSetWithReversedRange.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/EOFByItself.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/EOFSuffixInFirstRule.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/GreedyClosure.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/GreedyConfigs.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/GreedyOptional.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/GreedyPositiveClosure.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/HexVsID.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/KeywordID.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/LargeLexer.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/NonGreedyClosure.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/NonGreedyConfigs.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/NonGreedyOptional.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/NonGreedyPositiveClosure.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/NonGreedyTermination1.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/NonGreedyTermination2.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/Parentheses.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/PositionAdjustingLexer.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/QuoteTranslation.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/RecursiveLexerRuleRefWithWildcardPlus.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/RecursiveLexerRuleRefWithWildcardStar.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/RefToRuleDoesNotSetTokenNorEmitAnother.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/Slashes.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/ZeroLengthToken.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/Listeners/Basic.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/Listeners/LR.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/Listeners/LRWithLabels.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/Listeners/RuleGetters.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/Listeners/TokenGetters.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/ParseTrees/2AltLoop.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/ParseTrees/2Alts.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/ParseTrees/ExtraToken.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/ParseTrees/NoViableAlt.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/ParseTrees/RuleRef.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/ParseTrees/Sync.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/ParseTrees/Token2.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/ParseTrees/TokenAndRuleContextString.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/ConjuringUpToken.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/ConjuringUpTokenFromSet.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/ContextListGetters.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/DuplicatedLeftRecursiveCall.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/InvalidATNStateRemoval.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/InvalidEmptyInput.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/LL1ErrorInfo.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/LL2.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/LL3.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/LLStar.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/MultiTokenDeletionBeforeLoop.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/MultiTokenDeletionBeforeLoop2.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/MultiTokenDeletionDuringLoop.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/MultiTokenDeletionDuringLoop2.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/NoViableAltAvoidance.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/SingleSetInsertion.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/SingleSetInsertionConsumption.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/SingleTokenDeletion.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/SingleTokenDeletionBeforeLoop.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/SingleTokenDeletionBeforeLoop2.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/SingleTokenDeletionConsumption.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/SingleTokenDeletionDuringLoop.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/SingleTokenDeletionDuringLoop2.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/SingleTokenDeletionExpectingSet.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/SingleTokenInsertion.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/TokenMismatch.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/TokenMismatch2.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/APlus.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/AStar.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/AlternateQuotes.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/AlternateQuotes_ModeTagsLexer.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/AorAPlus.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/AorAStar.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/AorB.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/AorBPlus.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/AorBStar.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/Basic.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/EOFInClosure.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/IfIfElseGreedyBinding1.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/IfIfElseGreedyBinding2.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/IfIfElseNonGreedyBinding1.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/IfIfElseNonGreedyBinding2.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/LL1OptionalBlock.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/LabelAliasingAcrossLabeledAlternatives.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/Labels.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/ListLabelForClosureContext.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/ListLabelsOnSet.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/MultipleEOFHandling.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/Optional.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/ParserProperty.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/PredicatedIfIfElse.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/PredictionIssue334.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/ReferenceToATN.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/StartRuleWithoutEOF.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalLexer/DisableRule.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalLexer/EnumNotID.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalLexer/IDnotEnum.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalLexer/IDvsEnum.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalLexer/Indent.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalLexer/LexerInputPositionSensitivePredicates.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalLexer/PredicatedKeywords.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/2UnpredicatedAlts.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/2UnpredicatedAltsAndOneOrthogonalAlt.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/ActionHidesPreds.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/ActionsHidePredsInGlobalFOLLOW.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/AtomWithClosureInTranslatedLRRule.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/DepedentPredsInGlobalFOLLOW.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/DependentPredNotInOuterCtxShouldBeIgnored.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/DisabledAlternative.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/IndependentPredNotPassedOuterCtxToAvoidCastException.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/NoTruePredsThrowsNoViableAlt.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/Order.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/PredFromAltTestedInLoopBack.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/PredTestedEvenWhenUnAmbig.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/PredicateDependentOnArg.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/PredicateDependentOnArg2.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/PredsInGlobalFOLLOW.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/RewindBeforePredEval.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/Simple.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/SimpleValidate.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/SimpleValidate2.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/ToLeft.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/ToLeftWithVaryingPredicate.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/UnpredicatedPathsInAlt.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/ValidateInDFA.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/Sets/CharSetLiteral.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/Sets/ComplementSet.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/Sets/LexerOptionalSet.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/Sets/LexerPlusSet.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/Sets/LexerStarSet.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/Sets/NotChar.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/Sets/NotCharSet.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/Sets/NotCharSetWithLabel.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/Sets/NotCharSetWithRuleRef3.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/Sets/OptionalLexerSingleElement.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/Sets/OptionalSet.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/Sets/OptionalSingleElement.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/Sets/ParserNotSet.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/Sets/ParserNotToken.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/Sets/ParserNotTokenWithLabel.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/Sets/ParserSet.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/Sets/PlusLexerSingleElement.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/Sets/PlusSet.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/Sets/RuleAsSet.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/Sets/SeqDoesNotBecomeSet.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/Sets/StarLexerSingleElement.st delete mode 100644 tool/test/org/antlr/v4/test/rt/gen/grammars/Sets/StarSet.st delete mode 100644 tool/test/org/antlr/v4/test/rt/java/BaseTest.java delete mode 100644 tool/test/org/antlr/v4/test/rt/java/Java.test.stg delete mode 100644 tool/test/org/antlr/v4/test/rt/java/TestCompositeLexers.java delete mode 100644 tool/test/org/antlr/v4/test/rt/java/TestCompositeParsers.java delete mode 100644 tool/test/org/antlr/v4/test/rt/java/TestFullContextParsing.java delete mode 100644 tool/test/org/antlr/v4/test/rt/java/TestLeftRecursion.java delete mode 100644 tool/test/org/antlr/v4/test/rt/java/TestLexerErrors.java delete mode 100644 tool/test/org/antlr/v4/test/rt/java/TestLexerExec.java delete mode 100644 tool/test/org/antlr/v4/test/rt/java/TestListeners.java delete mode 100644 tool/test/org/antlr/v4/test/rt/java/TestParseTrees.java delete mode 100644 tool/test/org/antlr/v4/test/rt/java/TestParserErrors.java delete mode 100644 tool/test/org/antlr/v4/test/rt/java/TestParserExec.java delete mode 100644 tool/test/org/antlr/v4/test/rt/java/TestSemPredEvalLexer.java delete mode 100644 tool/test/org/antlr/v4/test/rt/java/TestSemPredEvalParser.java delete mode 100644 tool/test/org/antlr/v4/test/rt/java/TestSets.java diff --git a/tool/test/org/antlr/v4/test/rt/gen/AbstractParserTestMethod.java b/tool/test/org/antlr/v4/test/rt/gen/AbstractParserTestMethod.java deleted file mode 100644 index 32d4bf4e6..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/AbstractParserTestMethod.java +++ /dev/null @@ -1,12 +0,0 @@ -package org.antlr.v4.test.rt.gen; - -public class AbstractParserTestMethod extends JUnitTestMethod { - - public String startRule; - - public AbstractParserTestMethod(String name, String grammarName, String startRule) { - super(name, grammarName, null, null, null, null); - this.startRule = startRule; - } - -} diff --git a/tool/test/org/antlr/v4/test/rt/gen/CompositeLexerTestMethod.java b/tool/test/org/antlr/v4/test/rt/gen/CompositeLexerTestMethod.java deleted file mode 100644 index a7c684151..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/CompositeLexerTestMethod.java +++ /dev/null @@ -1,35 +0,0 @@ -package org.antlr.v4.test.rt.gen; - -import org.stringtemplate.v4.STGroup; - -import java.io.File; - -public class CompositeLexerTestMethod extends LexerTestMethod { - - public Grammar[] slaveGrammars; - - public CompositeLexerTestMethod(String name, String grammarName, - String input, String expectedOutput, - String expectedErrors, String ... slaves) { - super(name, grammarName, input, expectedOutput, expectedErrors, null); - this.slaveGrammars = new Grammar[slaves.length]; - for(int i=0;i configs = setup(); - File source = configs.get("Source"); - for(Map.Entry item : configs.entrySet()) { - if( !"Source".equals(item.getKey()) ) { - Generator gen = new Generator(item.getKey(), source, item.getValue()); - gen.generateTests(); - } - } - } - - private static Map setup() throws Exception { - Map configs = new LinkedHashMap(); - configs.put("Source", testTemplatesRootDir()); // source of test templates - configs.put("Java", javaGenDir()); - configs.put("CSharp", csharpGenDir()); - configs.put("Python2", python2GenDir()); - configs.put("Python3", python3GenDir()); - configs.put("NodeJS", nodeJSGenDir()); - configs.put("Safari", safariGenDir()); - configs.put("Firefox", firefoxGenDir()); - configs.put("Chrome", chromeGenDir()); - configs.put("Explorer", iexplorerGenDir()); - return configs; - } - - private static File javaGenDir() throws Exception { - return new File(antlrRoot+"/tool/test/org/antlr/v4/test/rt/java"); - } - - private static File csharpGenDir() { - return new File(antlrRoot+"/../antlr4-csharp/tool/test/org/antlr/v4/test/rt/csharp"); - } - - private static File python2GenDir() { - return new File(antlrRoot+"/../antlr4-python2/tool/test/org/antlr/v4/test/rt/py2"); - } - - private static File python3GenDir() { - return new File(antlrRoot+"/../antlr4-python3/tool/test/org/antlr/v4/test/rt/py3"); - } - - private static File nodeJSGenDir() { - return new File(antlrRoot+"/../antlr4-javascript/tool/test/org/antlr/v4/test/rt/js/node"); - } - - private static File safariGenDir() { - return new File(antlrRoot+"/../antlr4-javascript/tool/test/org/antlr/v4/test/rt/js/safari"); - } - - private static File firefoxGenDir() { - return new File(antlrRoot+"/../antlr4-javascript/tool/test/org/antlr/v4/test/rt/js/firefox"); - } - - private static File chromeGenDir() { - return new File(antlrRoot+"/../antlr4-javascript/tool/test/org/antlr/v4/test/rt/js/chrome"); - } - - private static File iexplorerGenDir() { - return new File(antlrRoot+"/../antlr4-javascript/tool/test/org/antlr/v4/test/rt/js/explorer"); - } - - private static File testTemplatesRootDir() throws Exception { - return new File(antlrRoot+"/tool/test/org/antlr/v4/test/rt/gen/grammars"); - } - - public static String escape(String s) { - return s==null ? null : s.replace("\\","\\\\").replace("\r", "\\r").replace("\n", "\\n").replace("\"","\\\""); - } - - private void generateTests() throws Exception { - System.out.println("Generating runtime tests for "+target); - this.group = readTemplates(); - Collection tests = buildTests(); - for(JUnitTestFile test : tests) { - String code = generateTestCode(test); - writeTestFile(test, code); - } - } - - private STGroup readTemplates() throws Exception { - if(!output.exists()) - throw new FileNotFoundException(output.getAbsolutePath()); - String name = target + ".test.stg"; - File file = new File(output, name); - if(!file.exists()) - throw new FileNotFoundException(file.getAbsolutePath()); - return new STGroupFile(file.getAbsolutePath()); - } - - private String generateTestCode(JUnitTestFile test) throws Exception { - test.generateUnitTests(group); - ST template = group.getInstanceOf("TestFile"); - template.add("file", test); - return template.render(); - } - - private void writeTestFile(JUnitTestFile test, String code) throws Exception { - File file = new File(output, "Test" + test.getName() + ".java"); - OutputStream stream = new FileOutputStream(file); - try { - stream.write(code.getBytes()); - } finally { - stream.close(); - } - } - - private Collection buildTests() throws Exception { - List list = new ArrayList(); - list.add(buildCompositeLexers()); - list.add(buildCompositeParsers()); - list.add(buildFullContextParsing()); - list.add(buildLeftRecursion()); - list.add(buildLexerErrors()); - list.add(buildLexerExec()); - list.add(buildListeners()); - list.add(buildParserErrors()); - list.add(buildParserExec()); - list.add(buildParseTrees()); - list.add(buildSemPredEvalLexer()); - list.add(buildSemPredEvalParser()); - list.add(buildSets()); - return list; - } - - private JUnitTestFile buildSets() throws Exception { - JUnitTestFile file = new JUnitTestFile("Sets"); - // this must return A not I to the parser; calling a nonfragment rule - // from a nonfragment rule does not set the overall token. - file.addParserTest(input, "SeqDoesNotBecomeSet", "T", "a", - "34", - "34\n", - null); - file.addParserTest(input, "ParserSet", "T", "a", - "x", - "x\n", - null); - file.addParserTest(input, "ParserNotSet", "T", "a", - "zz", - "z\n", - null); - file.addParserTest(input, "ParserNotToken", "T", "a", - "zz", - "zz\n", - null); - file.addParserTest(input, "ParserNotTokenWithLabel", "T", "a", - "zz", - "z\n", - null); - file.addParserTest(input, "RuleAsSet", "T", "a", - "b", - "b\n", - null); - file.addParserTest(input, "NotChar", "T", "a", - "x", - "x\n", - null); - file.addParserTest(input, "OptionalSingleElement", "T", "a", - "bc", - "bc\n", - null); - file.addParserTest(input, "OptionalLexerSingleElement", "T", "a", - "bc", - "bc\n", - null); - file.addParserTests(input, "StarLexerSingleElement", "T", "a", - "bbbbc", "bbbbc\n", - "c", "c\n"); - file.addParserTest(input, "PlusLexerSingleElement", "T", "a", - "bbbbc", - "bbbbc\n", - null); - file.addParserTest(input, "OptionalSet", "T", "a", - "ac", - "ac\n", - null); - file.addParserTest(input, "StarSet", "T", "a", - "abaac", - "abaac\n", - null); - file.addParserTest(input, "PlusSet", "T", "a", - "abaac", - "abaac\n", - null); - file.addParserTest(input, "LexerOptionalSet", "T", "a", - "ac", - "ac\n", - null); - file.addParserTest(input, "LexerStarSet", "T", "a", - "abaac", - "abaac\n", - null); - file.addParserTest(input, "LexerPlusSet", "T", "a", - "abaac", - "abaac\n", - null); - file.addParserTest(input, "NotCharSet", "T", "a", - "x", - "x\n", - null); - file.addParserTest(input, "NotCharSetWithLabel", "T", "a", - "x", - "x\n", - null); - file.addParserTest(input, "NotCharSetWithRuleRef3", "T", "a", - "x", - "x\n", - null); - file.addParserTest(input, "CharSetLiteral", "T", "a", - "A a B b", - "A\n" + "a\n" + "B\n" + "b\n", - null); - file.addParserTest(input, "ComplementSet", "T", "parse", - "a", - "", - "line 1:0 token recognition error at: 'a'\n" + - "line 1:1 missing {} at ''\n"); - return file; - } - - private JUnitTestFile buildSemPredEvalParser() throws Exception { - JUnitTestFile file = new JUnitTestFile("SemPredEvalParser"); - JUnitTestMethod tm = file.addParserTest(input, "SimpleValidate", "T", "s", - "x", - "", - "line 1:0 no viable alternative at input 'x'\n"); - tm.debug = true; - tm = file.addParserTest(input, "SimpleValidate2", "T", "s", - "3 4 x", - "alt 2\n" + "alt 2\n", - "line 1:4 no viable alternative at input 'x'\n"); - tm.debug = true; - file.addParserTest(input, "AtomWithClosureInTranslatedLRRule", "T", "start", - "a+b+a", - "", - null); - tm = file.addParserTest(input, "ValidateInDFA", "T", "s", - "x ; y", - "", - "line 1:0 no viable alternative at input 'x'\n" + - "line 1:4 no viable alternative at input 'y'\n"); - tm.debug = true; - tm = file.addParserTest(input, "Simple", "T", "s", - "x y 3", - "alt 2\n" + "alt 2\n" + "alt 3\n", - null); - // Under new predicate ordering rules (see antlr/antlr4#29), the first - // alt with an acceptable config (unpredicated, or predicated and evaluates - // to true) is chosen. - tm.debug = true; - file.addParserTest(input, "Order", "T", "s", - "x y", - "alt 1\n" + "alt 1\n", - null); - // We have n-2 predicates for n alternatives. pick first alt - tm = file.addParserTest(input, "2UnpredicatedAlts", "T", "s", - "x; y", - "alt 1\n" + - "alt 1\n", - "line 1:0 reportAttemptingFullContext d=0 (a), input='x'\n" + - "line 1:0 reportAmbiguity d=0 (a): ambigAlts={1, 2}, input='x'\n" + - "line 1:3 reportAttemptingFullContext d=0 (a), input='y'\n" + - "line 1:3 reportAmbiguity d=0 (a): ambigAlts={1, 2}, input='y'\n"); - tm.debug = true; - tm = file.addParserTest(input, "2UnpredicatedAltsAndOneOrthogonalAlt", "T", "s", - "34; x; y", - "alt 1\n" + "alt 2\n" + "alt 2\n", - "line 1:4 reportAttemptingFullContext d=0 (a), input='x'\n" + - "line 1:4 reportAmbiguity d=0 (a): ambigAlts={2, 3}, input='x'\n" + - "line 1:7 reportAttemptingFullContext d=0 (a), input='y'\n" + - "line 1:7 reportAmbiguity d=0 (a): ambigAlts={2, 3}, input='y'\n"); - // The parser consumes ID and moves to the 2nd token INT. - // To properly evaluate the predicates after matching ID INT, - // we must correctly see come back to starting index so LT(1) works - tm.debug = true; - tm = file.addParserTest(input, "RewindBeforePredEval", "T", "s", - "y 3 x 4", - "alt 2\n" + "alt 1\n", - null); - // checks that we throw exception if all alts - // are covered with a predicate and none succeeds - tm.debug = true; - file.addParserTest(input, "NoTruePredsThrowsNoViableAlt", "T", "s", - "y 3 x 4", - "", - "line 1:0 no viable alternative at input 'y'\n"); - tm = file.addParserTest(input, "ToLeft", "T", "s", - "x x y", - "alt 2\n" + "alt 2\n" + "alt 2\n", - null); - tm.debug = true; - tm = file.addParserTest(input, "UnpredicatedPathsInAlt", "T", "s", - "x 4", - "alt 1\n", - null); - tm.debug = true; - file.addParserTest(input, "ActionHidesPreds", "T", "s", - "x x y", - "alt 1\n" + "alt 1\n" + "alt 1\n", - null); - /** In this case, we use predicates that depend on global information - * like we would do for a symbol table. We simply execute - * the predicates assuming that all necessary information is available. - * The i++ action is done outside of the prediction and so it is executed. - */ - tm = file.addParserTest(input, "ToLeftWithVaryingPredicate", "T", "s", - "x x y", - "i=1\n" + "alt 2\n" + "i=2\n" + "alt 1\n" + "i=3\n" + "alt 2\n", - null); - tm.debug = true; - /** - * In this case, we're passing a parameter into a rule that uses that - * information to predict the alternatives. This is the special case - * where we know exactly which context we are in. The context stack - * is empty and we have not dipped into the outer context to make a decision. - */ - tm = file.addParserTest(input, "PredicateDependentOnArg", "T", "s", - "a b", - "alt 2\n" + "alt 1\n", - null); - tm.debug = true; - /** In this case, we have to ensure that the predicates are not - tested during the closure after recognizing the 1st ID. The - closure will fall off the end of 'a' 1st time and reach into the - a[1] rule invocation. It should not execute predicates because it - does not know what the parameter is. The context stack will not - be empty and so they should be ignored. It will not affect - recognition, however. We are really making sure the ATN - simulation doesn't crash with context object issues when it - encounters preds during FOLLOW. - */ - tm = file.addParserTest(input, "PredicateDependentOnArg2", "T", "s", - "a b", - "", - null); - tm.debug = true; - // uses ID ';' or ID '.' lookahead to solve s. preds not tested. - tm = file.addParserTest(input, "DependentPredNotInOuterCtxShouldBeIgnored", "T", "s", - "a;", - "alt 2\n", - null); - tm.debug = true; - tm = file.addParserTest(input, "IndependentPredNotPassedOuterCtxToAvoidCastException", "T", "s", - "a;", - "alt 2\n", - null); - tm.debug = true; - /** During a global follow operation, we still collect semantic - * predicates as long as they are not dependent on local context - */ - tm = file.addParserTest(input, "PredsInGlobalFOLLOW", "T", "s", - "a!", - "eval=true\n" + /* now we are parsing */ "parse\n", - null); - tm.debug = true; - /** We cannot collect predicates that are dependent on local context if - * we are doing a global follow. They appear as if they were not there at all. - */ - tm = file.addParserTest(input, "DepedentPredsInGlobalFOLLOW","T", "s", - "a!", - "eval=true\n" + "parse\n", - null); - tm.debug = true; - /** Regular non-forced actions can create side effects used by semantic - * predicates and so we cannot evaluate any semantic predicate - * encountered after having seen a regular action. This includes - * during global follow operations. - */ - tm = file.addParserTest(input, "ActionsHidePredsInGlobalFOLLOW", "T", "s", - "a!", - "eval=true\n" + "parse\n", - null); - tm.debug = true; - tm = file.addParserTestsWithErrors(input, "PredTestedEvenWhenUnAmbig", "T", "primary", - "abc", "ID abc\n", null, - "enum", "", "line 1:0 no viable alternative at input 'enum'\n"); - tm.debug = true; - /** - * This is a regression test for antlr/antlr4#218 "ANTLR4 EOF Related Bug". - * https://github.com/antlr/antlr4/issues/218 - */ - tm = file.addParserTest(input, "DisabledAlternative", "T", "cppCompilationUnit", - "hello", - "", - null); - tm.debug = true; - /** Loopback doesn't eval predicate at start of alt */ - tm = file.addParserTestsWithErrors(input, "PredFromAltTestedInLoopBack", "T", "file_", - "s\n\n\nx\n", - "(file_ (para (paraContent s) \\n \\n) (para (paraContent \\n x \\n)) )\n", - "line 5:0 mismatched input '' expecting '\n'\n", - "s\n\n\nx\n\n", - "(file_ (para (paraContent s) \\n \\n) (para (paraContent \\n x) \\n \\n) )\n", - null); - tm.debug = true; - return file; - } - - private JUnitTestFile buildSemPredEvalLexer() throws Exception { - JUnitTestFile file = new JUnitTestFile("SemPredEvalLexer"); - LexerTestMethod tm = file.addLexerTest(input, "DisableRule", "L", - "enum abc", - "[@0,0:3='enum',<2>,1:0]\n" + - "[@1,5:7='abc',<3>,1:5]\n" + - "[@2,8:7='',<-1>,1:8]\n" + - "s0-' '->:s5=>4\n" + - "s0-'a'->:s6=>3\n" + - "s0-'e'->:s1=>3\n" + - ":s1=>3-'n'->:s2=>3\n" + - ":s2=>3-'u'->:s3=>3\n" + - ":s6=>3-'b'->:s6=>3\n" + - ":s6=>3-'c'->:s6=>3\n", - null); - tm.showDFA = true; - tm = file.addLexerTest(input, "IDvsEnum", "L", - "enum abc enum", - "[@0,0:3='enum',<2>,1:0]\n" + - "[@1,5:7='abc',<2>,1:5]\n" + - "[@2,9:12='enum',<2>,1:9]\n" + - "[@3,13:12='',<-1>,1:13]\n" + - "s0-' '->:s5=>3\n" + - "s0-'a'->:s4=>2\n" + - "s0-'e'->:s1=>2\n" + - ":s1=>2-'n'->:s2=>2\n" + - ":s2=>2-'u'->:s3=>2\n" + - ":s4=>2-'b'->:s4=>2\n" + - ":s4=>2-'c'->:s4=>2\n", // no 'm'-> transition...conflicts with pred - null); - tm.showDFA = true; - tm = file.addLexerTest(input, "IDnotEnum", "L", - "enum abc enum", - "[@0,0:3='enum',<2>,1:0]\n" + - "[@1,5:7='abc',<2>,1:5]\n" + - "[@2,9:12='enum',<2>,1:9]\n" + - "[@3,13:12='',<-1>,1:13]\n" + - "s0-' '->:s2=>3\n", // no edges in DFA for enum/id. all paths lead to pred. - null); - tm.showDFA = true; - tm = file.addLexerTest(input, "EnumNotID", "L", - "enum abc enum", - "[@0,0:3='enum',<1>,1:0]\n" + - "[@1,5:7='abc',<2>,1:5]\n" + - "[@2,9:12='enum',<1>,1:9]\n" + - "[@3,13:12='',<-1>,1:13]\n" + - "s0-' '->:s3=>3\n", // no edges in DFA for enum/id. all paths lead to pred. - null); - tm.showDFA = true; - tm = file.addLexerTest(input, "Indent", "L", - "abc\n def \n", - "INDENT\n" + // action output - "[@0,0:2='abc',<1>,1:0]\n" + // ID - "[@1,3:3='\\n',<3>,1:3]\n" + // NL - "[@2,4:5=' ',<2>,2:0]\n" + // INDENT - "[@3,6:8='def',<1>,2:2]\n" + // ID - "[@4,9:10=' ',<4>,2:5]\n" + // WS - "[@5,11:11='\\n',<3>,2:7]\n" + - "[@6,12:11='',<-1>,3:0]\n" + - "s0-'\n" + - "'->:s2=>3\n" + - "s0-'a'->:s1=>1\n" + - "s0-'d'->:s1=>1\n" + - ":s1=>1-'b'->:s1=>1\n" + - ":s1=>1-'c'->:s1=>1\n" + - ":s1=>1-'e'->:s1=>1\n" + - ":s1=>1-'f'->:s1=>1\n", - null); - tm.showDFA = true; - tm = file.addLexerTest(input, "LexerInputPositionSensitivePredicates", "L", - "a cde\nabcde\n", - "a\n" + - "cde\n" + - "ab\n" + - "cde\n" + - "[@0,0:0='a',<1>,1:0]\n" + - "[@1,2:4='cde',<2>,1:2]\n" + - "[@2,6:7='ab',<1>,2:0]\n" + - "[@3,8:10='cde',<2>,2:2]\n" + - "[@4,12:11='',<-1>,3:0]\n", - null); - tm.showDFA = true; - file.addLexerTest(input, "PredicatedKeywords", "L", - "enum enu a", - "enum!\n" + - "ID enu\n" + - "ID a\n" + - "[@0,0:3='enum',<1>,1:0]\n" + - "[@1,5:7='enu',<2>,1:5]\n" + - "[@2,9:9='a',<2>,1:9]\n" + - "[@3,10:9='',<-1>,1:10]\n", - null); - return file; - } - - private JUnitTestFile buildParseTrees() throws Exception { - JUnitTestFile file = new JUnitTestFile("ParseTrees"); - file.addParserTest(input, "TokenAndRuleContextString", "T", "s", - "x", - "[a, s]\n(a x)\n", - null); - file.addParserTest(input, "Token2", "T", "s", - "xy", - "(a x y)\n", - null); - file.addParserTest(input, "2Alts", "T", "s", - "y", - "(a y)\n", - null); - file.addParserTest(input, "2AltLoop", "T", "s", - "xyyxyxz", - "(a x y y x y x z)\n", - null); - file.addParserTest(input, "RuleRef", "T", "s", - "yx", - "(a (b y) x)\n", - null); - file.addParserTest(input, "ExtraToken", "T", "s", - "xzy", - "(a x z y)\n", // ERRORs not shown. z is colored red in tree view - "line 1:1 extraneous input 'z' expecting 'y'\n"); - file.addParserTest(input, "NoViableAlt", "T", "s", - "z", - "(a z)\n", - "line 1:0 mismatched input 'z' expecting {'x', 'y'}\n"); - file.addParserTest(input, "Sync", "T", "s", - "xzyy!", - "(a x z y y !)\n", - "line 1:1 extraneous input 'z' expecting {'y', '!'}\n"); - return file; - } - - private JUnitTestFile buildParserErrors() throws Exception { - JUnitTestFile file = new JUnitTestFile("ParserErrors"); - file.addParserTest(input, "TokenMismatch", "T", "a", - "aa", - "", - "line 1:1 mismatched input 'a' expecting 'b'\n"); - file.addParserTest(input, "TokenMismatch2", "T", "stat", - "( ~FORCE_ERROR~ ", - "", - "line 1:2 mismatched input '~FORCE_ERROR~' expecting ')'\n"); - file.addParserTest(input, "SingleTokenDeletion", "T", "a", - "aab", - "", - "line 1:1 extraneous input 'a' expecting 'b'\n"); - file.addParserTest(input, "SingleTokenDeletionExpectingSet", "T", "a", - "aab", - "", - "line 1:1 extraneous input 'a' expecting {'b', 'c'}\n"); - file.addParserTest(input, "SingleTokenDeletionConsumption", "T", "a", - "aabd", - "[@2,2:2='b',<1>,1:2]\n", - "line 1:1 extraneous input 'a' expecting {'b', 'c'}\n"); - file.addParserTest(input, "SingleTokenInsertion", "T", "a", - "ac", - "", - "line 1:1 missing 'b' at 'c'\n"); - file.addParserTest(input, "ConjuringUpToken", "T", "a", - "ac", - "conjured=[@-1,-1:-1='',<2>,1:1]\n", - "line 1:1 missing 'b' at 'c'\n"); - file.addParserTest(input, "SingleSetInsertion", "T", "a", - "ad", - "", - "line 1:1 missing {'b', 'c'} at 'd'\n"); - file.addParserTest(input, "SingleSetInsertionConsumption", "T", "a", - "ad", - "[@0,0:0='a',<3>,1:0]\n", - "line 1:1 missing {'b', 'c'} at 'd'\n"); - file.addParserTest(input, "ConjuringUpTokenFromSet", "T", "a", - "ad", - "conjured=[@-1,-1:-1='',<2>,1:1]\n", - "line 1:1 missing {'b', 'c'} at 'd'\n"); - file.addParserTest(input, "LL2", "T", "a", - "ae", - "", - "line 1:1 no viable alternative at input 'ae'\n"); - file.addParserTest(input, "LL3", "T", "a", - "abe", - "", - "line 1:2 no viable alternative at input 'abe'\n"); - file.addParserTest(input, "LLStar", "T", "a", - "aaae", - "", - "line 1:3 no viable alternative at input 'aaae'\n"); - file.addParserTest(input, "SingleTokenDeletionBeforeLoop", "T", "a", - "aabc", - "", - "line 1:1 extraneous input 'a' expecting {, 'b'}\n" + - "line 1:3 token recognition error at: 'c'\n"); - file.addParserTest(input, "MultiTokenDeletionBeforeLoop", "T", "a", - "aacabc", - "", - "line 1:1 extraneous input 'a' expecting {'b', 'c'}\n"); - file.addParserTest(input, "SingleTokenDeletionDuringLoop", "T", "a", - "ababbc", - "", - "line 1:2 extraneous input 'a' expecting {'b', 'c'}\n"); - file.addParserTest(input, "MultiTokenDeletionDuringLoop", "T", "a", - "abaaababc", - "", - "line 1:2 extraneous input 'a' expecting {'b', 'c'}\n" + - "line 1:6 extraneous input 'a' expecting {'b', 'c'}\n"); - file.addParserTest(input, "SingleTokenDeletionBeforeLoop2", "T", "a", - "aabc", - "", - "line 1:1 extraneous input 'a' expecting {, 'b', 'z'}\n" + - "line 1:3 token recognition error at: 'c'\n"); - file.addParserTest(input, "MultiTokenDeletionBeforeLoop2", "T", "a", - "aacabc", - "", - "line 1:1 extraneous input 'a' expecting {'b', 'z', 'c'}\n"); - file.addParserTest(input, "SingleTokenDeletionDuringLoop2", "T", "a", - "ababbc", - "", - "line 1:2 extraneous input 'a' expecting {'b', 'z', 'c'}\n"); - file.addParserTest(input, "MultiTokenDeletionDuringLoop2", "T", "a", - "abaaababc", - "", - "line 1:2 extraneous input 'a' expecting {'b', 'z', 'c'}\n" + - "line 1:6 extraneous input 'a' expecting {'b', 'z', 'c'}\n"); - file.addParserTest(input, "LL1ErrorInfo", "T", "start", - "dog and software", - "{'hardware', 'software'}\n", - null); - file.addParserTest(input, "InvalidEmptyInput", "T", "start", - "", - "", - "line 1:0 missing ID at ''\n"); - file.addParserTest(input, "ContextListGetters", "T", "s", - "abab", - "abab\n", - null); - file.addParserTestsWithErrors(input, "DuplicatedLeftRecursiveCall", "T", "start", - "xx", "", null, - "xxx", "", null, - "xxxx", "", null); - file.addParserTest(input, "InvalidATNStateRemoval", "T", "start", - "x:x", - "", - null); - // "a." matches 'a' to rule e but then realizes '.' won't match. - // previously would cause noviablealt. now prediction pretends to - // have "a' predict 2nd alt of e. Will get syntax error later so - // let it get farther. - file.addParserTest(input, "NoViableAltAvoidance", "T", "s", - "a.", - "", - "line 1:1 mismatched input '.' expecting '!'\n"); - return file; - } - - private JUnitTestFile buildListeners() throws Exception { - JUnitTestFile file = new JUnitTestFile("Listeners"); - file.addParserTest(input, "Basic", "T", "s", - "1 2", - "(a 1 2)\n" + "1\n" + "2\n", - null); - file.addParserTests(input, "TokenGetters", "T", "s", - "1 2", - "(a 1 2)\n" + - "1 2 [1, 2]\n", - "abc", - "(a abc)\n" + - "[@0,0:2='abc',<4>,1:0]\n"); - file.addParserTests(input, "RuleGetters", "T", "s", - "1 2", - "(a (b 1) (b 2))\n" + - "1 2 1\n", - "abc", - "(a (b abc))\n" + - "abc\n"); - file.addParserTest(input, "LR", "T", "s", - "1+2*3", - "(e (e 1) + (e (e 2) * (e 3)))\n" + - "1\n" + - "2\n" + - "3\n" + - "2 3 2\n" + - "1 2 1\n", - null); - file.addParserTest(input, "LRWithLabels", "T", "s", - "1(2,3)", - "(e (e 1) ( (eList (e 2) , (e 3)) ))\n" + - "1\n" + "2\n" + "3\n" + "1 [13 6]\n", - null); - return file; - } - - private JUnitTestFile buildLexerErrors() throws Exception { - JUnitTestFile file = new JUnitTestFile("LexerErrors"); - file.addLexerTest(input, "InvalidCharAtStart", "L", - "x", - "[@0,1:0='',<-1>,1:1]\n", - "line 1:0 token recognition error at: 'x'\n"); - file.addLexerTest(input, "StringsEmbeddedInActions", "L", - "[\"foo\"]", - "[@0,0:6='[\"foo\"]',<1>,1:0]\n" + - "[@1,7:6='',<-1>,1:7]\n", - null, 1); - file.addLexerTest(input, "StringsEmbeddedInActions", "L", - "[\"foo]", - "[@0,6:5='',<-1>,1:6]\n", - "line 1:0 token recognition error at: '[\"foo]'\n", - 2); - file.addLexerTest(input, "EnforcedGreedyNestedBrances", "L", - "{ { } }", - "[@0,0:6='{ { } }',<1>,1:0]\n" + - "[@1,7:6='',<-1>,1:7]\n", - null, 1); - file.addLexerTest(input, "EnforcedGreedyNestedBrances", "L", - "{ { }", - "[@0,5:4='',<-1>,1:5]\n", - "line 1:0 token recognition error at: '{ { }'\n", - 2); - file.addLexerTest(input, "InvalidCharAtStartAfterDFACache", "L", - "abx", - "[@0,0:1='ab',<1>,1:0]\n" + - "[@1,3:2='',<-1>,1:3]\n", - "line 1:2 token recognition error at: 'x'\n"); - file.addLexerTest(input, "InvalidCharInToken", "L", - "ax", - "[@0,2:1='',<-1>,1:2]\n", - "line 1:0 token recognition error at: 'ax'\n"); - file.addLexerTest(input, "InvalidCharInTokenAfterDFACache", "L", - "abax", - "[@0,0:1='ab',<1>,1:0]\n" + - "[@1,4:3='',<-1>,1:4]\n", - "line 1:2 token recognition error at: 'ax'\n"); - // The first ab caches the DFA then abx goes through the DFA but - // into the ATN for the x, which fails. Must go back into DFA - // and return to previous dfa accept state - file.addLexerTest(input, "DFAToATNThatFailsBackToDFA", "L", - "ababx", - "[@0,0:1='ab',<1>,1:0]\n" + - "[@1,2:3='ab',<1>,1:2]\n" + - "[@2,5:4='',<-1>,1:5]\n", - "line 1:4 token recognition error at: 'x'\n"); - // The first ab caches the DFA then abx goes through the DFA but - // into the ATN for the c. It marks that hasn't except state - // and then keeps going in the ATN. It fails on the x, but - // uses the previous accepted in the ATN not DFA - file.addLexerTest(input, "DFAToATNThatMatchesThenFailsInATN", "L", - "ababcx", - "[@0,0:1='ab',<1>,1:0]\n" + - "[@1,2:4='abc',<2>,1:2]\n" + - "[@2,6:5='',<-1>,1:6]\n", - "line 1:5 token recognition error at: 'x'\n"); - file.addLexerTest(input, "ErrorInMiddle", "L", - "abx", - "[@0,3:2='',<-1>,1:3]\n", - "line 1:0 token recognition error at: 'abx'\n"); - LexerTestMethod tm = file.addLexerTest(input, "LexerExecDFA", "L", - "x : x", - "[@0,0:0='x',<3>,1:0]\n" + - "[@1,2:2=':',<1>,1:2]\n" + - "[@2,4:4='x',<3>,1:4]\n" + - "[@3,5:4='',<-1>,1:5]\n", - "line 1:1 token recognition error at: ' '\n" + - "line 1:3 token recognition error at: ' '\n"); - tm.lexerOnly = false; - return file; - } - - private JUnitTestFile buildLeftRecursion() throws Exception { - JUnitTestFile file = new JUnitTestFile("LeftRecursion"); - file.addParserTests(input, "Simple", "T", "s", - "x", "(s (a x))\n", - "x y", "(s (a (a x) y))\n", - "x y z", "(s (a (a (a x) y) z))\n"); - file.addParserTests(input, "DirectCallToLeftRecursiveRule", "T", "a", - "x", "(a x)\n", - "x y", "(a (a x) y)\n", - "x y z", "(a (a (a x) y) z)\n"); - file.addParserTest(input, "SemPred", "T", "s", "x y z", - "(s (a (a (a x) y) z))\n", null); - file.addParserTests(input, "TernaryExpr", "T", "s", - "a", "(s (e a) )\n", - "a+b", "(s (e (e a) + (e b)) )\n", - "a*b", "(s (e (e a) * (e b)) )\n", - "a?b:c", "(s (e (e a) ? (e b) : (e c)) )\n", - "a=b=c", "(s (e (e a) = (e (e b) = (e c))) )\n", - "a?b+c:d", "(s (e (e a) ? (e (e b) + (e c)) : (e d)) )\n", - "a?b=c:d", "(s (e (e a) ? (e (e b) = (e c)) : (e d)) )\n", - "a? b?c:d : e", "(s (e (e a) ? (e (e b) ? (e c) : (e d)) : (e e)) )\n", - "a?b: c?d:e", "(s (e (e a) ? (e b) : (e (e c) ? (e d) : (e e))) )\n"); - file.addParserTests(input, "Expressions", "T", "s", - "a", "(s (e a) )\n", - "1", "(s (e 1) )\n", - "a-1", "(s (e (e a) - (e 1)) )\n", - "a.b", "(s (e (e a) . b) )\n", - "a.this", "(s (e (e a) . this) )\n", - "-a", "(s (e - (e a)) )\n", - "-a+b", "(s (e (e - (e a)) + (e b)) )\n"); - file.addParserTests(input, "JavaExpressions", "T", "s", - "a|b&c", "(s (e (e a) | (e (e b) & (e c))) )\n", - "(a|b)&c", "(s (e (e ( (e (e a) | (e b)) )) & (e c)) )\n", - "a > b", "(s (e (e a) > (e b)) )\n", - "a >> b", "(s (e (e a) >> (e b)) )\n", - "a=b=c", "(s (e (e a) = (e (e b) = (e c))) )\n", - "a^b^c", "(s (e (e a) ^ (e (e b) ^ (e c))) )\n", - "(T)x", "(s (e ( (type_ T) ) (e x)) )\n", - "new A().b", "(s (e (e new (type_ A) ( )) . b) )\n", - "(T)t.f()", "(s (e (e ( (type_ T) ) (e (e t) . f)) ( )) )\n", - "a.f(x)==T.c", "(s (e (e (e (e a) . f) ( (expressionList (e x)) )) == (e (e T) . c)) )\n", - "a.f().g(x,1)", "(s (e (e (e (e (e a) . f) ( )) . g) ( (expressionList (e x) , (e 1)) )) )\n", - "new T[((n-1) * x) + 1]", "(s (e new (type_ T) [ (e (e ( (e (e ( (e (e n) - (e 1)) )) * (e x)) )) + (e 1)) ]) )\n"); - file.addParserTests(input, "Declarations", "T", "s", - "a", "(s (declarator a) )\n", - "*a", "(s (declarator * (declarator a)) )\n", - "**a", "(s (declarator * (declarator * (declarator a))) )\n", - "a[3]", "(s (declarator (declarator a) [ (e 3) ]) )\n", - "b[]", "(s (declarator (declarator b) [ ]) )\n", - "(a)", "(s (declarator ( (declarator a) )) )\n", - "a[]()", "(s (declarator (declarator (declarator a) [ ]) ( )) )\n", - "a[][]", "(s (declarator (declarator (declarator a) [ ]) [ ]) )\n", - "*a[]", "(s (declarator * (declarator (declarator a) [ ])) )\n", - "(*a)[]", "(s (declarator (declarator ( (declarator * (declarator a)) )) [ ]) )\n"); - file.addParserTests(input, "ReturnValueAndActions", "T", "s", - "4", "4\n", - "1+2", "3\n", - "1+2*3", "7\n", - "(1+2)*3", "9\n"); - file.addParserTests(input, "LabelsOnOpSubrule", "T", "s", - "4", "(s (e 4))\n", - "1*2/3", "(s (e (e (e 1) * (e 2)) / (e 3)))\n", - "(1/2)*3", "(s (e (e ( (e (e 1) / (e 2)) )) * (e 3)))\n"); - file.addParserTests(input, "ReturnValueAndActionsAndLabels", "T", "s", - "4", "4\n", - "1+2", "3\n", - "1+2*3", "7\n", - "i++*3", "12\n"); - /** - * This is a regression test for antlr/antlr4#433 "Not all context accessor - * methods are generated when an alternative rule label is used for multiple - * alternatives". - * https://github.com/antlr/antlr4/issues/433 - */ - file.addParserTests(input, "MultipleAlternativesWithCommonLabel", "T", "s", - "4", "4\n", - "1+2", "3\n", - "1+2*3", "7\n", - "i++*3", "12\n"); - file.addParserTests(input, "PrefixOpWithActionAndLabel", "T", "s", - "a", "a\n", - "a+b", "(a+b)\n", - "a=b+c", "((a=b)+c)\n"); - file.addParserTests(input, "AmbigLR", "Expr", "prog", - "1\n", "", - "a = 5\n", "", - "b = 6\n", "", - "a+b*2\n", "", - "(1+2)*3\n", ""); - /** - * This is a regression test for #239 "recoursive parser using implicit - * tokens ignore white space lexer rule". - * https://github.com/antlr/antlr4/issues/239 - */ - file.addParserTests(input, "WhitespaceInfluence", "Expr", "prog", - "Test(1,3)", "", - "Test(1, 3)", ""); - /** - * This is a regression test for antlr/antlr4#509 "Incorrect rule chosen in - * unambiguous grammar". - * https://github.com/antlr/antlr4/issues/509 - */ - file.addParserTest(input, "PrecedenceFilterConsidersContext", "T", "prog", - "aa", - "(prog (statement (letterA a)) (statement (letterA a)) )\n", null); - /** - * This is a regression test for antlr/antlr4#625 "Duplicate action breaks - * operator precedence" - * https://github.com/antlr/antlr4/issues/625 - */ - file.addParserTests(input, "MultipleActions", "T", "s", - "4", "(s (e 4))\n", - "1*2/3", "(s (e (e (e 1) * (e 2)) / (e 3)))\n", - "(1/2)*3", "(s (e (e ( (e (e 1) / (e 2)) )) * (e 3)))\n"); - /** - * This is a regression test for antlr/antlr4#625 "Duplicate action breaks - * operator precedence" - * https://github.com/antlr/antlr4/issues/625 - */ - file.addParserTests(input, "MultipleActionsPredicatesOptions", "T", "s", - "4", "(s (e 4))\n", - "1*2/3", "(s (e (e (e 1) * (e 2)) / (e 3)))\n", - "(1/2)*3", "(s (e (e ( (e (e 1) / (e 2)) )) * (e 3)))\n"); - file.addParserTest(input, "SemPredFailOption", "T", "s", - "x y z", - "(s (a (a x) y z))\n", - "line 1:4 rule a custom message\n"); - /** - * This is a regression test for antlr/antlr4#542 "First alternative cannot - * be right-associative". - * https://github.com/antlr/antlr4/issues/542 - */ - file.addParserTests(input, "TernaryExprExplicitAssociativity", "T", "s", - "a", "(s (e a) )\n", - "a+b", "(s (e (e a) + (e b)) )\n", - "a*b", "(s (e (e a) * (e b)) )\n", - "a?b:c", "(s (e (e a) ? (e b) : (e c)) )\n", - "a=b=c", "(s (e (e a) = (e (e b) = (e c))) )\n", - "a?b+c:d", "(s (e (e a) ? (e (e b) + (e c)) : (e d)) )\n", - "a?b=c:d", "(s (e (e a) ? (e (e b) = (e c)) : (e d)) )\n", - "a? b?c:d : e", "(s (e (e a) ? (e (e b) ? (e c) : (e d)) : (e e)) )\n", - "a?b: c?d:e", "(s (e (e a) ? (e b) : (e (e c) ? (e d) : (e e))) )\n"); - /** - * This is a regression test for antlr/antlr4#677 "labels not working in - * grammar file". - * https://github.com/antlr/antlr4/issues/677 - * - *

This test treats {@code ,} and {@code >>} as part of a single compound - * operator (similar to a ternary operator).

- */ - file.addParserTests(input, "ReturnValueAndActionsList1", "T", "s", - "a*b", "(s (expr (expr a) * (expr b)) )\n", - "a,c>>x", "(s (expr (expr a) , (expr c) >> (expr x)) )\n", - "x", "(s (expr x) )\n", - "a*b,c,x*y>>r", "(s (expr (expr (expr a) * (expr b)) , (expr c) , (expr (expr x) * (expr y)) >> (expr r)) )\n"); - - /** - * This is a regression test for antlr/antlr4#677 "labels not working in - * grammar file". - * https://github.com/antlr/antlr4/issues/677 - * - *

This test treats the {@code ,} and {@code >>} operators separately.

- */ - file.addParserTests(input, "ReturnValueAndActionsList2", "T", "s", - "a*b", "(s (expr (expr a) * (expr b)) )\n", - "a,c>>x", "(s (expr (expr (expr a) , (expr c)) >> (expr x)) )\n", - "x", "(s (expr x) )\n", - "a*b,c,x*y>>r", "(s (expr (expr (expr (expr (expr a) * (expr b)) , (expr c)) , (expr (expr x) * (expr y))) >> (expr r)) )\n"); - return file; - } - - private JUnitTestFile buildFullContextParsing() throws Exception { - JUnitTestFile file = new JUnitTestFile("FullContextParsing"); - JUnitTestMethod tm = file.addParserTest(input, "AmbigYieldsCtxSensitiveDFA", "T", "s", "abc", - "Decision 0:\n" + - "s0-ID->:s1^=>1\n", - "line 1:0 reportAttemptingFullContext d=0 (s), input='abc'\n"); - tm.debug = true; - tm = file.addParserTestsWithErrors(input, "CtxSensitiveDFA", "T", "s", - "$ 34 abc", - "Decision 1:\n" + - "s0-INT->s1\n" + - "s1-ID->:s2^=>1\n", - "line 1:5 reportAttemptingFullContext d=1 (e), input='34abc'\n" + - "line 1:2 reportContextSensitivity d=1 (e), input='34'\n", - "@ 34 abc", - "Decision 1:\n" + - "s0-INT->s1\n" + - "s1-ID->:s2^=>1\n", - "line 1:5 reportAttemptingFullContext d=1 (e), input='34abc'\n" + - "line 1:5 reportContextSensitivity d=1 (e), input='34abc'\n"); - tm.debug = true; - tm = file.addParserTest(input, "CtxSensitiveDFATwoDiffInput", "T", "s", - "$ 34 abc @ 34 abc", - "Decision 2:\n" + - "s0-INT->s1\n" + - "s1-ID->:s2^=>1\n", - "line 1:5 reportAttemptingFullContext d=2 (e), input='34abc'\n" + - "line 1:2 reportContextSensitivity d=2 (e), input='34'\n" + - "line 1:14 reportAttemptingFullContext d=2 (e), input='34abc'\n" + - "line 1:14 reportContextSensitivity d=2 (e), input='34abc'\n"); - tm.debug = true; - tm = file.addParserTest(input, "SLLSeesEOFInLLGrammar", "T", "s", - "34 abc", - "Decision 0:\n" + - "s0-INT->s1\n" + - "s1-ID->:s2^=>1\n", - "line 1:3 reportAttemptingFullContext d=0 (e), input='34abc'\n" + - "line 1:0 reportContextSensitivity d=0 (e), input='34'\n"); - tm.debug = true; - tm = file.addParserTestsWithErrors(input, "FullContextIF_THEN_ELSEParse", "T", "s", - "{ if x then return }", - "Decision 1:\n" + - "s0-'}'->:s1=>2\n", - null, - "{ if x then return else foo }", - "Decision 1:\n" + - "s0-'else'->:s1^=>1\n", - "line 1:19 reportAttemptingFullContext d=1 (stat), input='else'\n" + - "line 1:19 reportContextSensitivity d=1 (stat), input='else'\n", - "{ if x then if y then return else foo }", - "Decision 1:\n" + - "s0-'}'->:s2=>2\n" + - "s0-'else'->:s1^=>1\n", - "line 1:29 reportAttemptingFullContext d=1 (stat), input='else'\n" + - "line 1:38 reportAmbiguity d=1 (stat): ambigAlts={1, 2}, input='elsefoo}'\n", - // should not be ambiguous because the second 'else bar' clearly - // indicates that the first else should match to the innermost if. - // LL_EXACT_AMBIG_DETECTION makes us keep going to resolve - "{ if x then if y then return else foo else bar }", - "Decision 1:\n" + - "s0-'else'->:s1^=>1\n", - "line 1:29 reportAttemptingFullContext d=1 (stat), input='else'\n" + - "line 1:38 reportContextSensitivity d=1 (stat), input='elsefooelse'\n" + - "line 1:38 reportAttemptingFullContext d=1 (stat), input='else'\n" + - "line 1:38 reportContextSensitivity d=1 (stat), input='else'\n", - "{ if x then return else foo\n" + - "if x then if y then return else foo }", - "Decision 1:\n" + - "s0-'}'->:s2=>2\n" + - "s0-'else'->:s1^=>1\n", - "line 1:19 reportAttemptingFullContext d=1 (stat), input='else'\n" + - "line 1:19 reportContextSensitivity d=1 (stat), input='else'\n" + - "line 2:27 reportAttemptingFullContext d=1 (stat), input='else'\n" + - "line 2:36 reportAmbiguity d=1 (stat): ambigAlts={1, 2}, input='elsefoo}'\n", - "{ if x then return else foo\n" + - "if x then if y then return else foo }", - "Decision 1:\n" + - "s0-'}'->:s2=>2\n" + - "s0-'else'->:s1^=>1\n", - "line 1:19 reportAttemptingFullContext d=1 (stat), input='else'\n" + - "line 1:19 reportContextSensitivity d=1 (stat), input='else'\n" + - "line 2:27 reportAttemptingFullContext d=1 (stat), input='else'\n" + - "line 2:36 reportAmbiguity d=1 (stat): ambigAlts={1, 2}, input='elsefoo}'\n"); - tm.debug = true; - tm = file.addParserTest(input, "LoopsSimulateTailRecursion", "T", "prog", - "a(i)<-x", - "pass: a(i)<-x\n", - "line 1:3 reportAttemptingFullContext d=3 (expr_primary), input='a(i)'\n" + - "line 1:7 reportAmbiguity d=3 (expr_primary): ambigAlts={2, 3}, input='a(i)<-x'\n"); - tm.debug = true; - tm = file.addParserTest(input, "AmbiguityNoLoop", "T", "prog", - "a@", - "alt 1\n", - "line 1:2 reportAttemptingFullContext d=0 (prog), input='a@'\n" + - "line 1:2 reportAmbiguity d=0 (prog): ambigAlts={1, 2}, input='a@'\n" + - "line 1:2 reportAttemptingFullContext d=1 (expr), input='a@'\n" + - "line 1:2 reportContextSensitivity d=1 (expr), input='a@'\n"); - tm.debug = true; - tm = file.addParserTestsWithErrors(input, "ExprAmbiguity", "T", "s", - "a+b", - "(expr a + (expr b))\n", - "line 1:1 reportAttemptingFullContext d=1 (expr), input='+'\n" + - "line 1:2 reportContextSensitivity d=1 (expr), input='+b'\n", - "a+b*c", - "(expr a + (expr b * (expr c)))\n", - "line 1:1 reportAttemptingFullContext d=1 (expr), input='+'\n" + - "line 1:2 reportContextSensitivity d=1 (expr), input='+b'\n" + - "line 1:3 reportAttemptingFullContext d=1 (expr), input='*'\n" + - "line 1:5 reportAmbiguity d=1 (expr): ambigAlts={1, 2}, input='*c'\n"); - tm.debug = true; - return file; - } - - private JUnitTestFile buildCompositeLexers() throws Exception { - JUnitTestFile file = new JUnitTestFile("CompositeLexers"); - file.addCompositeLexerTest(input, "LexerDelegatorInvokesDelegateRule", "M", "abc", - "S.A\n" + - "[@0,0:0='a',<3>,1:0]\n" + - "[@1,1:1='b',<1>,1:1]\n" + - "[@2,2:2='c',<4>,1:2]\n" + - "[@3,3:2='',<-1>,1:3]\n", null, "S"); - file.addCompositeLexerTest(input, "LexerDelegatorRuleOverridesDelegate", "M", "ab", - "M.A\n" + - "[@0,0:1='ab',<1>,1:0]\n" + - "[@1,2:1='',<-1>,1:2]\n", null, "S"); - return file; - } - - private JUnitTestFile buildLexerExec() throws Exception { - JUnitTestFile file = new JUnitTestFile("LexerExec"); - file.addLexerTest(input, "QuoteTranslation", "L", "\"", - "[@0,0:0='\"',<1>,1:0]\n" + - "[@1,1:0='',<-1>,1:1]\n", null); - file.addLexerTest(input, "RefToRuleDoesNotSetTokenNorEmitAnother", "L", "34 -21 3", - "[@0,0:1='34',<2>,1:0]\n" + - "[@1,3:5='-21',<1>,1:3]\n" + - "[@2,7:7='3',<2>,1:7]\n" + - "[@3,8:7='',<-1>,1:8]\n", null); - file.addLexerTest(input, "Slashes", "L", "\\ / \\/ /\\", - "[@0,0:0='\\',<1>,1:0]\n" + - "[@1,2:2='/',<2>,1:2]\n" + - "[@2,4:5='\\/',<3>,1:4]\n" + - "[@3,7:8='/\\',<4>,1:7]\n" + - "[@4,9:8='',<-1>,1:9]\n", null); - file.addLexerTest(input, "Parentheses", "L", "-.-.-!", - "[@0,0:4='-.-.-',<1>,1:0]\n" + - "[@1,5:5='!',<3>,1:5]\n" + - "[@2,6:5='',<-1>,1:6]\n", null); - file.addLexerTest(input, "NonGreedyTermination1", "L", "\"hi\"\"mom\"", - "[@0,0:3='\"hi\"',<1>,1:0]\n" + - "[@1,4:8='\"mom\"',<1>,1:4]\n" + - "[@2,9:8='',<-1>,1:9]\n", null); - file.addLexerTest(input, "NonGreedyTermination2", "L", "\"\"\"mom\"", - "[@0,0:6='\"\"\"mom\"',<1>,1:0]\n" + - "[@1,7:6='',<-1>,1:7]\n", null); - file.addLexerTest(input, "GreedyOptional", "L", "//blah\n//blah\n", - "[@0,0:13='//blah\\n//blah\\n',<1>,1:0]\n" + - "[@1,14:13='',<-1>,3:0]\n", null); - file.addLexerTest(input, "NonGreedyOptional", "L", "//blah\n//blah\n", - "[@0,0:6='//blah\\n',<1>,1:0]\n" + - "[@1,7:13='//blah\\n',<1>,2:0]\n" + - "[@2,14:13='',<-1>,3:0]\n", null); - file.addLexerTest(input, "GreedyClosure", "L", "//blah\n//blah\n", - "[@0,0:13='//blah\\n//blah\\n',<1>,1:0]\n" + - "[@1,14:13='',<-1>,3:0]\n", null); - file.addLexerTest(input, "NonGreedyClosure", "L", "//blah\n//blah\n", - "[@0,0:6='//blah\\n',<1>,1:0]\n" + - "[@1,7:13='//blah\\n',<1>,2:0]\n" + - "[@2,14:13='',<-1>,3:0]\n", null); - file.addLexerTest(input, "GreedyPositiveClosure", "L", "//blah\n//blah\n", - "[@0,0:13='//blah\\n//blah\\n',<1>,1:0]\n" + - "[@1,14:13='',<-1>,3:0]\n", null); - file.addLexerTest(input, "NonGreedyPositiveClosure", "L", "//blah\n//blah\n", - "[@0,0:6='//blah\\n',<1>,1:0]\n" + - "[@1,7:13='//blah\\n',<1>,2:0]\n" + - "[@2,14:13='',<-1>,3:0]\n", null); - file.addLexerTest(input, "RecursiveLexerRuleRefWithWildcardStar", "L", - "/* ick */\n" + - "/* /* */\n" + - "/* /*nested*/ */\n", - "[@0,0:8='/* ick */',<1>,1:0]\n" + - "[@1,9:9='\\n',<2>,1:9]\n" + - "[@2,10:34='/* /* */\\n/* /*nested*/ */',<1>,2:0]\n" + - "[@3,35:35='\\n',<2>,3:16]\n" + - "[@4,36:35='',<-1>,4:0]\n", null, 1); - file.addLexerTest(input, "RecursiveLexerRuleRefWithWildcardStar", "L", - "/* ick */x\n" + - "/* /* */x\n" + - "/* /*nested*/ */x\n", - "[@0,0:8='/* ick */',<1>,1:0]\n" + - "[@1,10:10='\\n',<2>,1:10]\n" + - "[@2,11:36='/* /* */x\\n/* /*nested*/ */',<1>,2:0]\n" + - "[@3,38:38='\\n',<2>,3:17]\n" + - "[@4,39:38='',<-1>,4:0]\n", - "line 1:9 token recognition error at: 'x'\n" + - "line 3:16 token recognition error at: 'x'\n", 2); - file.addLexerTest(input, "RecursiveLexerRuleRefWithWildcardPlus", "L", - "/* ick */\n" + - "/* /* */\n" + - "/* /*nested*/ */\n", - "[@0,0:8='/* ick */',<1>,1:0]\n" + - "[@1,9:9='\\n',<2>,1:9]\n" + - "[@2,10:34='/* /* */\\n/* /*nested*/ */',<1>,2:0]\n" + - "[@3,35:35='\\n',<2>,3:16]\n" + - "[@4,36:35='',<-1>,4:0]\n", null, 1); - file.addLexerTest(input, "RecursiveLexerRuleRefWithWildcardPlus", "L", - "/* ick */x\n" + - "/* /* */x\n" + - "/* /*nested*/ */x\n", - "[@0,0:8='/* ick */',<1>,1:0]\n" + - "[@1,10:10='\\n',<2>,1:10]\n" + - "[@2,11:36='/* /* */x\\n/* /*nested*/ */',<1>,2:0]\n" + - "[@3,38:38='\\n',<2>,3:17]\n" + - "[@4,39:38='',<-1>,4:0]\n", - "line 1:9 token recognition error at: 'x'\n" + - "line 3:16 token recognition error at: 'x'\n", 2); - file.addLexerTest(input, "ActionPlacement", "L", "ab", - "stuff0: \n" + - "stuff1: a\n" + - "stuff2: ab\n" + - "ab\n" + - "[@0,0:1='ab',<1>,1:0]\n" + - "[@1,2:1='',<-1>,1:2]\n", null); - file.addLexerTest(input, "GreedyConfigs", "L", "ab", - "ab\n" + - "[@0,0:1='ab',<1>,1:0]\n" + - "[@1,2:1='',<-1>,1:2]\n", null); - file.addLexerTest(input, "NonGreedyConfigs", "L", "ab", - "a\n" + - "b\n" + - "[@0,0:0='a',<1>,1:0]\n" + - "[@1,1:1='b',<3>,1:1]\n" + - "[@2,2:1='',<-1>,1:2]\n", null); - file.addLexerTest(input, "KeywordID", "L", "end eend ending a", - "[@0,0:2='end',<1>,1:0]\n" + - "[@1,3:3=' ',<3>,1:3]\n" + - "[@2,4:7='eend',<2>,1:4]\n" + - "[@3,8:8=' ',<3>,1:8]\n" + - "[@4,9:14='ending',<2>,1:9]\n" + - "[@5,15:15=' ',<3>,1:15]\n" + - "[@6,16:16='a',<2>,1:16]\n" + - "[@7,17:16='',<-1>,1:17]\n", null); - file.addLexerTest(input, "HexVsID", "L", "x 0 1 a.b a.l", - "[@0,0:0='x',<5>,1:0]\n" + - "[@1,1:1=' ',<6>,1:1]\n" + - "[@2,2:2='0',<2>,1:2]\n" + - "[@3,3:3=' ',<6>,1:3]\n" + - "[@4,4:4='1',<2>,1:4]\n" + - "[@5,5:5=' ',<6>,1:5]\n" + - "[@6,6:6='a',<5>,1:6]\n" + - "[@7,7:7='.',<4>,1:7]\n" + - "[@8,8:8='b',<5>,1:8]\n" + - "[@9,9:9=' ',<6>,1:9]\n" + - "[@10,10:10='a',<5>,1:10]\n" + - "[@11,11:11='.',<4>,1:11]\n" + - "[@12,12:12='l',<5>,1:12]\n" + - "[@13,13:12='',<-1>,1:13]\n",null); - file.addLexerTest(input, "EOFByItself", "L", "", - "[@0,0:-1='',<1>,1:0]\n" + - "[@1,0:-1='',<-1>,1:0]\n", null); - file.addLexerTest(input, "EOFSuffixInFirstRule", "L", "", - "[@0,0:-1='',<-1>,1:0]\n", null, 1); - file.addLexerTest(input, "EOFSuffixInFirstRule", "L", "a", - "[@0,0:0='a',<1>,1:0]\n" + - "[@1,1:0='',<-1>,1:1]\n", null, 2); - file.addLexerTest(input, "CharSet", "L", "34\n 34", - "I\n" + - "I\n" + - "[@0,0:1='34',<1>,1:0]\n" + - "[@1,4:5='34',<1>,2:1]\n" + - "[@2,6:5='',<-1>,2:3]\n", null); - file.addLexerTest(input, "CharSetPlus", "L", "34\n 34", - "I\n" + - "I\n" + - "[@0,0:1='34',<1>,1:0]\n" + - "[@1,4:5='34',<1>,2:1]\n" + - "[@2,6:5='',<-1>,2:3]\n", null); - file.addLexerTest(input, "CharSetNot", "L", "xaf", - "I\n" + - "[@0,0:2='xaf',<1>,1:0]\n" + - "[@1,3:2='',<-1>,1:3]\n", null); - file.addLexerTest(input, "CharSetInSet", "L", "a x", - "I\n" + - "I\n" + - "[@0,0:0='a',<1>,1:0]\n" + - "[@1,2:2='x',<1>,1:2]\n" + - "[@2,3:2='',<-1>,1:3]\n", null); - file.addLexerTest(input, "CharSetRange", "L", "34\n 34 a2 abc \n ", - "I\n" + - "I\n" + - "ID\n" + - "ID\n" + - "[@0,0:1='34',<1>,1:0]\n" + - "[@1,4:5='34',<1>,2:1]\n" + - "[@2,7:8='a2',<2>,2:4]\n" + - "[@3,10:12='abc',<2>,2:7]\n" + - "[@4,18:17='',<-1>,3:3]\n", null); - file.addLexerTest(input, "CharSetWithMissingEndRange", "L", "00\n", - "I\n" + - "[@0,0:1='00',<1>,1:0]\n" + - "[@1,3:2='',<-1>,2:0]\n", null); - file.addLexerTest(input, "CharSetWithMissingEscapeChar", "L", "34 ", - "I\n" + - "[@0,0:1='34',<1>,1:0]\n" + - "[@1,3:2='',<-1>,1:3]\n", null); - file.addLexerTest(input, "CharSetWithEscapedChar", "L", "- ] ", - "DASHBRACK\n" + - "DASHBRACK\n" + - "[@0,0:0='-',<1>,1:0]\n" + - "[@1,2:2=']',<1>,1:2]\n" + - "[@2,4:3='',<-1>,1:4]\n", null); - file.addLexerTest(input, "CharSetWithReversedRange", "L", "9", - "A\n" + - "[@0,0:0='9',<1>,1:0]\n" + - "[@1,1:0='',<-1>,1:1]\n", null); - file.addLexerTest(input, "CharSetWithQuote1", "L", "b\"a", - "A\n" + - "[@0,0:2='b\"a',<1>,1:0]\n" + - "[@1,3:2='',<-1>,1:3]\n", null); - file.addLexerTest(input, "CharSetWithQuote2", "L", "b\"\\a", - "A\n" + - "[@0,0:3='b\"\\a',<1>,1:0]\n" + - "[@1,4:3='',<-1>,1:4]\n", null); - final int TOKENS = 4; - final int LABEL = 5; - final int IDENTIFIER = 6; - file.addLexerTest(input, "PositionAdjustingLexer", "PositionAdjustingLexer", - "tokens\n" + - "tokens {\n" + - "notLabel\n" + - "label1 =\n" + - "label2 +=\n" + - "notLabel\n", - "[@0,0:5='tokens',<" + IDENTIFIER + ">,1:0]\n" + - "[@1,7:12='tokens',<" + TOKENS + ">,2:0]\n" + - "[@2,14:14='{',<3>,2:7]\n" + - "[@3,16:23='notLabel',<" + IDENTIFIER + ">,3:0]\n" + - "[@4,25:30='label1',<" + LABEL + ">,4:0]\n" + - "[@5,32:32='=',<1>,4:7]\n" + - "[@6,34:39='label2',<" + LABEL + ">,5:0]\n" + - "[@7,41:42='+=',<2>,5:7]\n" + - "[@8,44:51='notLabel',<" + IDENTIFIER + ">,6:0]\n" + - "[@9,53:52='',<-1>,7:0]\n", null); - file.addLexerTest(input, "LargeLexer", "L", "KW400", - "[@0,0:4='KW400',<402>,1:0]\n" + - "[@1,5:4='',<-1>,1:5]\n", null); - /** - * This is a regression test for antlr/antlr4#687 "Empty zero-length tokens - * cannot have lexer commands" and antlr/antlr4#688 "Lexer cannot match - * zero-length tokens" */ - file.addLexerTest(input, "ZeroLengthToken", "L", "'xxx'", - "[@0,0:4=''xxx'',<1>,1:0]\n" + - "[@1,5:4='',<-1>,1:5]\n", null); - return file; - } - - private JUnitTestFile buildCompositeParsers() throws Exception { - JUnitTestFile file = new JUnitTestFile("CompositeParsers"); - file.importErrorQueue = true; - file.importGrammar = true; - file.addCompositeParserTest(input, "DelegatorInvokesDelegateRule", "M", "s", "b", "S.a\n", null, "S"); - file.addCompositeParserTest(input, "BringInLiteralsFromDelegate", "M", "s", "=a", "S.a\n", null, "S"); - file.addCompositeParserTest(input, "DelegatorInvokesDelegateRuleWithArgs", "M", "s", "b", "S.a1000\n", null, "S"); - file.addCompositeParserTest(input, "DelegatorInvokesDelegateRuleWithReturnStruct", "M", "s", "b", "S.ab\n", null, "S"); - file.addCompositeParserTest(input, "DelegatorAccessesDelegateMembers", "M", "s", "b", "foo\n", null, "S"); - file.addCompositeParserTest(input, "DelegatorInvokesFirstVersionOfDelegateRule", "M", "s", "b", "S.a\n", null, "S", "T"); - CompositeParserTestMethod ct = file.addCompositeParserTest(input, "DelegatesSeeSameTokenType", "M", "s", "aa", "S.x\nT.y\n", null, "S", "T"); - ct.afterGrammar = "writeFile(tmpdir, \"M.g4\", grammar);\n" + - "ErrorQueue equeue = new ErrorQueue();\n" + - "Grammar g = new Grammar(tmpdir+\"/M.g4\", grammar, equeue);\n" + - "String expectedTokenIDToTypeMap = \"{EOF=-1, B=1, A=2, C=3, WS=4}\";\n" + - "String expectedStringLiteralToTypeMap = \"{'a'=2, 'b'=1, 'c'=3}\";\n" + - "String expectedTypeToTokenList = \"[B, A, C, WS]\";\n" + - "assertEquals(expectedTokenIDToTypeMap, g.tokenNameToTypeMap.toString());\n" + - "assertEquals(expectedStringLiteralToTypeMap, sort(g.stringLiteralToTypeMap).toString());\n" + - "assertEquals(expectedTypeToTokenList, realElements(g.typeToTokenList).toString());\n" + - "assertEquals(\"unexpected errors: \"+equeue, 0, equeue.errors.size());\n"; - ct = file.addCompositeParserTest(input, "CombinedImportsCombined", "M", "s", "x 34 9", "S.x\n", null, "S"); - ct.afterGrammar = "writeFile(tmpdir, \"M.g4\", grammar);\n" + - "ErrorQueue equeue = new ErrorQueue();\n" + - "new Grammar(tmpdir+\"/M.g4\", grammar, equeue);\n" + - "assertEquals(\"unexpected errors: \" + equeue, 0, equeue.errors.size());\n"; - file.addCompositeParserTest(input, "DelegatorRuleOverridesDelegate", "M", "a", "c", "S.a\n", null, "S"); - file.addCompositeParserTest(input, "DelegatorRuleOverridesLookaheadInDelegate", "M", "prog", "float x = 3;", "Decl: floatx=3;\n", null, "S"); - file.addCompositeParserTest(input, "DelegatorRuleOverridesDelegates", "M", "a", "c", "M.b\nS.a\n", null, "S", "T"); - file.addCompositeParserTest(input, "KeywordVSIDOrder", "M", "a", "abc", - "M.A\n" + - "M.a: [@0,0:2='abc',<1>,1:0]\n", null, "S"); - file.addCompositeParserTest(input, "ImportedRuleWithAction", "M", "s", "b", "", null, "S"); - file.addCompositeParserTest(input, "ImportedGrammarWithEmptyOptions", "M", "s", "b", "", null, "S"); - file.addCompositeParserTest(input, "ImportLexerWithOnlyFragmentRules", "M", "program", "test test", "", null, "S"); - return file; - } - - private JUnitTestFile buildParserExec() throws Exception { - JUnitTestFile file = new JUnitTestFile("ParserExec"); - file.addParserTest(input, "Labels", "T", "a", "abc 34;", "", null); - file.addParserTest(input, "ListLabelsOnSet", "T", "a", "abc 34;", "", null); - file.addParserTest(input, "AorB", "T", "a", "34", "alt 2\n", null); - file.addParserTest(input, "Basic", "T", "a", "abc 34", "abc34\n", null); - file.addParserTest(input, "APlus", "T", "a", "a b c", "abc\n", null); - file.addParserTest(input, "AorAPlus", "T", "a", "a b c", "abc\n", null); - file.addParserTest(input, "IfIfElseGreedyBinding1", "T", "start", - "if y if y x else x", "if y x else x\nif y if y x else x\n", null); - file.addParserTest(input, "IfIfElseGreedyBinding2", "T", "start", - "if y if y x else x", "if y x else x\nif y if y x else x\n", null); - file.addParserTest(input, "IfIfElseNonGreedyBinding1", "T", "start", - "if y if y x else x", "if y x\nif y if y x else x\n", null); - file.addParserTest(input, "IfIfElseNonGreedyBinding2", "T", "start", - "if y if y x else x", "if y x\nif y if y x else x\n", null); - file.addParserTests(input, "AStar", "T", "a", - "", "\n", - "a b c", "abc\n"); - file.addParserTests(input, "LL1OptionalBlock", "T", "a", - "", "\n", - "a", "a\n"); - file.addParserTests(input, "AorAStar", "T", "a", - "", "\n", - "a b c", "abc\n"); - file.addParserTest(input, "AorBPlus", "T", "a", "a 34 c", "a34c\n", null); - file.addParserTests(input, "AorBStar", "T", "a", - "", "\n", - "a 34 c", "a34c\n"); - file.addParserTests(input, "Optional", "T", "stat", - "x", "", - "if x", "", - "if x else x", "", - "if if x else x", ""); - file.addParserTest(input, "PredicatedIfIfElse", "T", "s", "if x if x a else b", "", null); - /* file.addTest(input, "StartRuleWithoutEOF", "T", "s", "abc 34", - "Decision 0:\n" + "s0-ID->s1\n" + "s1-INT->s2\n" + "s2-EOF->:s3=>1\n", null); */ - file.addParserTest(input, "LabelAliasingAcrossLabeledAlternatives", "T", "start", "xy", "x\ny\n", null); - file.addParserTest(input, "PredictionIssue334", "T", "file_", "a", "(file_ (item a) )\n", null); - file.addParserTest(input, "ListLabelForClosureContext", "T", "expression", "a", "", null); - /** - * This test ensures that {@link ParserATNSimulator} produces a correct - * result when the grammar contains multiple explicit references to - * {@code EOF} inside of parser rules. - */ - file.addParserTest(input, "MultipleEOFHandling", "T", "prog", "x", "", null); - /** - * This test ensures that {@link ParserATNSimulator} does not produce a - * {@link StackOverflowError} when it encounters an {@code EOF} transition - * inside a closure. - */ - file.addParserTest(input, "EOFInClosure", "T", "prog", "x", "", null); - /** - * This is a regression test for antlr/antlr4#561 "Issue with parser - * generation in 4.2.2" - * https://github.com/antlr/antlr4/issues/561 - */ - file.addParserTests(input, "ReferenceToATN", "T", "a", - "", "\n", - "a 34 c", "a34c\n"); - file.addParserTest(input, "ParserProperty", "T", "a", "abc", "valid\n", null); - /*CompositeParserTestMethod tm = file.addCompositeParserTest(input, "AlternateQuotes", "ModeTagsParser", "file_", "", "", null, "ModeTagsLexer"); - tm.slaveIsLexer = true;*/ - return file; - } - - - -} diff --git a/tool/test/org/antlr/v4/test/rt/gen/Grammar.java b/tool/test/org/antlr/v4/test/rt/gen/Grammar.java deleted file mode 100644 index ab444daad..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/Grammar.java +++ /dev/null @@ -1,51 +0,0 @@ -package org.antlr.v4.test.rt.gen; - -import org.stringtemplate.v4.ST; -import org.stringtemplate.v4.STGroup; - -import java.io.File; -import java.io.FileInputStream; -import java.io.InputStream; - -public class Grammar { - - public String fileName; - public String grammarName; - public String[] lines; - public ST template; - - public Grammar(String fileName, String grammarName) { - this.fileName = fileName; - this.grammarName = grammarName; - } - - public void load(File grammarDir) throws Exception { - template = loadGrammar(grammarDir, fileName); - } - - protected ST loadGrammar(File grammarDir, String grammarFileName) throws Exception { - File file = new File(grammarDir, grammarFileName + ".st"); - InputStream input = new FileInputStream(file); - try { - byte[] data = new byte[(int)file.length()]; - int next = 0; - while(input.available()>0) { - int read = input.read(data, next, data.length - next); - next += read; - } - String s = new String(data); - return new ST(s); - } finally { - input.close(); - } - } - - public void generate(STGroup group) { - template.add("grammarName", grammarName); - template.groupThatCreatedThisInstance = group; // so templates get interpreted - lines = template.render().split("\n"); - for(int i=0;i unitTests = new ArrayList(); - public String name; - public List tests = new ArrayList(); - public boolean importErrorQueue = false; - public boolean importGrammar = false; - - public JUnitTestFile(String name) { - this.name = name; - } - - public String getName() { - return name; - } - - public ParserTestMethod addParserTest(File grammarDir, String name, String grammarName, String methodName, - String input, String expectedOutput, String expectedErrors) throws Exception { - ParserTestMethod tm = new ParserTestMethod(name, grammarName, methodName, input, expectedOutput, expectedErrors); - tm.loadGrammars(grammarDir, this.name); - unitTests.add(tm); - return tm; - } - - public AbstractParserTestMethod addParserTests(File grammarDir, String name, String grammarName, String methodName, - String ... inputsAndOuputs) throws Exception { - AbstractParserTestMethod tm = new AbstractParserTestMethod(name, grammarName, methodName); - tm.loadGrammars(grammarDir, this.name); - unitTests.add(tm); - for(int i=0; i skip ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeLexers/LexerDelegatorInvokesDelegateRule_S.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeLexers/LexerDelegatorInvokesDelegateRule_S.st deleted file mode 100644 index 9d05c4c55..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeLexers/LexerDelegatorInvokesDelegateRule_S.st +++ /dev/null @@ -1,3 +0,0 @@ -lexer grammar S; -A : 'a' {}; -C : 'c' ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeLexers/LexerDelegatorRuleOverridesDelegate.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeLexers/LexerDelegatorRuleOverridesDelegate.st deleted file mode 100644 index ed26569b1..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeLexers/LexerDelegatorRuleOverridesDelegate.st +++ /dev/null @@ -1,4 +0,0 @@ -lexer grammar M; -import S; -A : 'a' B {}; -WS : (' '|'\n') -> skip ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeLexers/LexerDelegatorRuleOverridesDelegate_S.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeLexers/LexerDelegatorRuleOverridesDelegate_S.st deleted file mode 100644 index a1ca69d83..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeLexers/LexerDelegatorRuleOverridesDelegate_S.st +++ /dev/null @@ -1,3 +0,0 @@ -lexer grammar S; -A : 'a' {}; -B : 'b' {}; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/BringInLiteralsFromDelegate.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/BringInLiteralsFromDelegate.st deleted file mode 100644 index 363b5bac3..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/BringInLiteralsFromDelegate.st +++ /dev/null @@ -1,4 +0,0 @@ -grammar M; -import S; -s : a ; -WS : (' '|'\n') -> skip ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/BringInLiteralsFromDelegate_S.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/BringInLiteralsFromDelegate_S.st deleted file mode 100644 index ed4b76ea8..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/BringInLiteralsFromDelegate_S.st +++ /dev/null @@ -1,2 +0,0 @@ -parser grammar S; -a : '=' 'a' {}; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/CombinedImportsCombined.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/CombinedImportsCombined.st deleted file mode 100644 index 1609afb7b..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/CombinedImportsCombined.st +++ /dev/null @@ -1,3 +0,0 @@ -grammar M; -import S; -s : x INT; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/CombinedImportsCombined_S.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/CombinedImportsCombined_S.st deleted file mode 100644 index b2b4b7452..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/CombinedImportsCombined_S.st +++ /dev/null @@ -1,5 +0,0 @@ -parser grammar S; -tokens { A, B, C } -x : 'x' INT {}; -INT : '0'..'9'+ ; -WS : (' '|'\n') -> skip ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/DelegatesSeeSameTokenType.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/DelegatesSeeSameTokenType.st deleted file mode 100644 index dcd2e01b4..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/DelegatesSeeSameTokenType.st +++ /dev/null @@ -1,17 +0,0 @@ -// The lexer will create rules to match letters a, b, c. -// The associated token types A, B, C must have the same value -// and all import'd parsers. Since ANTLR regenerates all imports -// for use with the delegator M, it can generate the same token type -// mapping in each parser: -// public static final int C=6; -// public static final int EOF=-1; -// public static final int B=5; -// public static final int WS=7; -// public static final int A=4; -grammar M; -import S,T; -s : x y ; // matches AA, which should be 'aa' -B : 'b' ; // another order: B, A, C -A : 'a' ; -C : 'c' ; -WS : (' '|'\n') -> skip ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/DelegatesSeeSameTokenType_S.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/DelegatesSeeSameTokenType_S.st deleted file mode 100644 index b67e54223..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/DelegatesSeeSameTokenType_S.st +++ /dev/null @@ -1,3 +0,0 @@ -parser grammar S; -tokens { A, B, C } -x : A {}; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/DelegatesSeeSameTokenType_T.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/DelegatesSeeSameTokenType_T.st deleted file mode 100644 index 06817ea66..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/DelegatesSeeSameTokenType_T.st +++ /dev/null @@ -1,3 +0,0 @@ -parser grammar S; -tokens { C, B, A } // reverse order -y : A {}; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/DelegatorAccessesDelegateMembers.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/DelegatorAccessesDelegateMembers.st deleted file mode 100644 index f91311b18..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/DelegatorAccessesDelegateMembers.st +++ /dev/null @@ -1,4 +0,0 @@ -grammar M; // uses no rules from the import -import S; -s : 'b'{}; // gS is import pointer -WS : (' '|'\n') -> skip ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/DelegatorAccessesDelegateMembers_S.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/DelegatorAccessesDelegateMembers_S.st deleted file mode 100644 index 8b8bdca29..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/DelegatorAccessesDelegateMembers_S.st +++ /dev/null @@ -1,5 +0,0 @@ -parser grammar S; -@members { - -} -a : B; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/DelegatorInvokesDelegateRule.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/DelegatorInvokesDelegateRule.st deleted file mode 100644 index 0b8891303..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/DelegatorInvokesDelegateRule.st +++ /dev/null @@ -1,5 +0,0 @@ -grammar M; -import S; -s : a ; -B : 'b' ; // defines B from inherited token space -WS : (' '|'\n') -> skip ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/DelegatorInvokesDelegateRuleWithArgs.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/DelegatorInvokesDelegateRuleWithArgs.st deleted file mode 100644 index cff94f4a8..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/DelegatorInvokesDelegateRuleWithArgs.st +++ /dev/null @@ -1,5 +0,0 @@ -grammar M; -import S; -s : label=a[3] {} ; -B : 'b' ; // defines B from inherited token space -WS : (' '|'\n') -> skip ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/DelegatorInvokesDelegateRuleWithArgs_S.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/DelegatorInvokesDelegateRuleWithArgs_S.st deleted file mode 100644 index 7b965e623..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/DelegatorInvokesDelegateRuleWithArgs_S.st +++ /dev/null @@ -1,2 +0,0 @@ -parser grammar S; -a[int x] returns [int y] : B {;$y=1000;}; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/DelegatorInvokesDelegateRuleWithReturnStruct.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/DelegatorInvokesDelegateRuleWithReturnStruct.st deleted file mode 100644 index a42f67f68..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/DelegatorInvokesDelegateRuleWithReturnStruct.st +++ /dev/null @@ -1,5 +0,0 @@ -grammar M; -import S; -s : a {} ; -B : 'b' ; // defines B from inherited token space -WS : (' '|'\n') -> skip ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/DelegatorInvokesDelegateRuleWithReturnStruct_S.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/DelegatorInvokesDelegateRuleWithReturnStruct_S.st deleted file mode 100644 index 58deba919..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/DelegatorInvokesDelegateRuleWithReturnStruct_S.st +++ /dev/null @@ -1,2 +0,0 @@ -parser grammar S; -a : B {}; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/DelegatorInvokesDelegateRule_S.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/DelegatorInvokesDelegateRule_S.st deleted file mode 100644 index 21c07ab4f..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/DelegatorInvokesDelegateRule_S.st +++ /dev/null @@ -1,2 +0,0 @@ -parser grammar S; -a : B {}; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/DelegatorInvokesFirstVersionOfDelegateRule.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/DelegatorInvokesFirstVersionOfDelegateRule.st deleted file mode 100644 index 3bbf0d2e5..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/DelegatorInvokesFirstVersionOfDelegateRule.st +++ /dev/null @@ -1,5 +0,0 @@ -grammar M; -import S,T; -s : a ; -B : 'b' ; // defines B from inherited token space -WS : (' '|'\n') -> skip ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/DelegatorInvokesFirstVersionOfDelegateRule_S.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/DelegatorInvokesFirstVersionOfDelegateRule_S.st deleted file mode 100644 index e729bc983..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/DelegatorInvokesFirstVersionOfDelegateRule_S.st +++ /dev/null @@ -1,3 +0,0 @@ -parser grammar S; -a : B {}; -b : B; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/DelegatorInvokesFirstVersionOfDelegateRule_T.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/DelegatorInvokesFirstVersionOfDelegateRule_T.st deleted file mode 100644 index 259014348..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/DelegatorInvokesFirstVersionOfDelegateRule_T.st +++ /dev/null @@ -1,2 +0,0 @@ -parser grammar T; -a : B {}; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/DelegatorRuleOverridesDelegate.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/DelegatorRuleOverridesDelegate.st deleted file mode 100644 index 89a08234c..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/DelegatorRuleOverridesDelegate.st +++ /dev/null @@ -1,4 +0,0 @@ -grammar M; -import S; -b : 'b'|'c'; -WS : (' '|'\n') -> skip ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/DelegatorRuleOverridesDelegate_S.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/DelegatorRuleOverridesDelegate_S.st deleted file mode 100644 index 4354737ad..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/DelegatorRuleOverridesDelegate_S.st +++ /dev/null @@ -1,3 +0,0 @@ -parser grammar S; -a : b {}; -b : B ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/DelegatorRuleOverridesDelegates.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/DelegatorRuleOverridesDelegates.st deleted file mode 100644 index f79357b0c..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/DelegatorRuleOverridesDelegates.st +++ /dev/null @@ -1,4 +0,0 @@ -grammar M; -import S, T; -b : 'b'|'c' {}|B|A; -WS : (' '|'\n') -> skip ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/DelegatorRuleOverridesDelegates_S.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/DelegatorRuleOverridesDelegates_S.st deleted file mode 100644 index 22e1ee03f..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/DelegatorRuleOverridesDelegates_S.st +++ /dev/null @@ -1,4 +0,0 @@ -parser grammar S; -a : b {}; -b : 'b' ; - \ No newline at end of file diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/DelegatorRuleOverridesDelegates_T.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/DelegatorRuleOverridesDelegates_T.st deleted file mode 100644 index 69f759d51..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/DelegatorRuleOverridesDelegates_T.st +++ /dev/null @@ -1,3 +0,0 @@ -parser grammar S; -tokens { A } -b : 'b' {}; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/DelegatorRuleOverridesLookaheadInDelegate.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/DelegatorRuleOverridesLookaheadInDelegate.st deleted file mode 100644 index 6bc0e69e1..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/DelegatorRuleOverridesLookaheadInDelegate.st +++ /dev/null @@ -1,7 +0,0 @@ -grammar M; -import S; -prog : decl ; -type_ : 'int' | 'float' ; -ID : 'a'..'z'+ ; -INT : '0'..'9'+ ; -WS : (' '|'\n') -> skip; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/DelegatorRuleOverridesLookaheadInDelegate_S.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/DelegatorRuleOverridesLookaheadInDelegate_S.st deleted file mode 100644 index c00963f02..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/DelegatorRuleOverridesLookaheadInDelegate_S.st +++ /dev/null @@ -1,5 +0,0 @@ -parser grammar S; -type_ : 'int' ; -decl : type_ ID ';' - | type_ ID init ';' {}; -init : '=' INT; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/ImportLexerWithOnlyFragmentRules.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/ImportLexerWithOnlyFragmentRules.st deleted file mode 100644 index ed0547f02..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/ImportLexerWithOnlyFragmentRules.st +++ /dev/null @@ -1,4 +0,0 @@ -grammar M; -import S; -program : 'test' 'test'; -WS : (UNICODE_CLASS_Zs)+ -> skip; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/ImportLexerWithOnlyFragmentRules_S.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/ImportLexerWithOnlyFragmentRules_S.st deleted file mode 100644 index dab7479fe..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/ImportLexerWithOnlyFragmentRules_S.st +++ /dev/null @@ -1,6 +0,0 @@ -lexer grammar S; -fragment -UNICODE_CLASS_Zs : '\u0020' | '\u00A0' | '\u1680' | '\u180E' - | '\u2000'..'\u200A' - | '\u202F' | '\u205F' | '\u3000' - ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/ImportedGrammarWithEmptyOptions.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/ImportedGrammarWithEmptyOptions.st deleted file mode 100644 index 46c443c69..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/ImportedGrammarWithEmptyOptions.st +++ /dev/null @@ -1,5 +0,0 @@ -grammar M; -import S; -s : a; -B : 'b'; -WS : (' '|'\n') -> skip ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/ImportedGrammarWithEmptyOptions_S.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/ImportedGrammarWithEmptyOptions_S.st deleted file mode 100644 index e34c7abe2..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/ImportedGrammarWithEmptyOptions_S.st +++ /dev/null @@ -1,3 +0,0 @@ -parser grammar S; -options {} -a : B; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/ImportedRuleWithAction.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/ImportedRuleWithAction.st deleted file mode 100644 index 46c443c69..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/ImportedRuleWithAction.st +++ /dev/null @@ -1,5 +0,0 @@ -grammar M; -import S; -s : a; -B : 'b'; -WS : (' '|'\n') -> skip ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/ImportedRuleWithAction_S.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/ImportedRuleWithAction_S.st deleted file mode 100644 index 2a20ae0dd..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/ImportedRuleWithAction_S.st +++ /dev/null @@ -1,2 +0,0 @@ -parser grammar S; -a @after {} : B; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/KeywordVSIDOrder.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/KeywordVSIDOrder.st deleted file mode 100644 index f0620d567..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/KeywordVSIDOrder.st +++ /dev/null @@ -1,5 +0,0 @@ -grammar M; -import S; -a : A {}; -A : 'abc' {}; -WS : (' '|'\n') -> skip ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/KeywordVSIDOrder_S.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/KeywordVSIDOrder_S.st deleted file mode 100644 index 8808195c1..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/CompositeParsers/KeywordVSIDOrder_S.st +++ /dev/null @@ -1,2 +0,0 @@ -lexer grammar S; -ID : 'a'..'z'+; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/FullContextParsing/AmbigYieldsCtxSensitiveDFA.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/FullContextParsing/AmbigYieldsCtxSensitiveDFA.st deleted file mode 100644 index b442c09c6..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/FullContextParsing/AmbigYieldsCtxSensitiveDFA.st +++ /dev/null @@ -1,5 +0,0 @@ -grammar ; -s @after {} - : ID | ID {} ; -ID : 'a'..'z'+; -WS : (' '|'\t'|'\n')+ -> skip ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/FullContextParsing/AmbiguityNoLoop.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/FullContextParsing/AmbiguityNoLoop.st deleted file mode 100644 index 867aa162c..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/FullContextParsing/AmbiguityNoLoop.st +++ /dev/null @@ -1,12 +0,0 @@ -grammar ; -prog -@init {} - : expr expr {} - | expr - ; -expr: '@' - | ID '@' - | ID - ; -ID : [a-z]+ ; -WS : [ \r\n\t]+ -> skip ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/FullContextParsing/CtxSensitiveDFA.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/FullContextParsing/CtxSensitiveDFA.st deleted file mode 100644 index 4c69eb9fa..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/FullContextParsing/CtxSensitiveDFA.st +++ /dev/null @@ -1,9 +0,0 @@ -grammar ; -s @after {} - : '$' a | '@' b ; -a : e ID ; -b : e INT ID ; -e : INT | ; -ID : 'a'..'z'+ ; -INT : '0'..'9'+ ; -WS : (' '|'\t'|'\n')+ -> skip ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/FullContextParsing/CtxSensitiveDFATwoDiffInput.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/FullContextParsing/CtxSensitiveDFATwoDiffInput.st deleted file mode 100644 index 3d021b445..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/FullContextParsing/CtxSensitiveDFATwoDiffInput.st +++ /dev/null @@ -1,9 +0,0 @@ -grammar ; -s @after {} - : ('$' a | '@' b)+ ; -a : e ID ; -b : e INT ID ; -e : INT | ; -ID : 'a'..'z'+ ; -INT : '0'..'9'+ ; -WS : (' '|'\t'|'\n')+ -> skip ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/FullContextParsing/ExprAmbiguity.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/FullContextParsing/ExprAmbiguity.st deleted file mode 100644 index da23906ab..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/FullContextParsing/ExprAmbiguity.st +++ /dev/null @@ -1,13 +0,0 @@ -grammar ; -s -@init {} -: expr[0] {}; - expr[int _p] - : ID - ( - {5 >= $_p}? '*' expr[6] - | {4 >= $_p}? '+' expr[5] - )* - ; -ID : [a-zA-Z]+ ; -WS : [ \r\n\t]+ -> skip ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/FullContextParsing/FullContextIF_THEN_ELSEParse.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/FullContextParsing/FullContextIF_THEN_ELSEParse.st deleted file mode 100644 index 1dade2a7e..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/FullContextParsing/FullContextIF_THEN_ELSEParse.st +++ /dev/null @@ -1,10 +0,0 @@ -grammar ; -s -@init {} -@after {} - : '{' stat* '}' ; -stat: 'if' ID 'then' stat ('else' ID)? - | 'return' - ; -ID : 'a'..'z'+ ; -WS : (' '|'\t'|'\n')+ -> skip ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/FullContextParsing/LoopsSimulateTailRecursion.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/FullContextParsing/LoopsSimulateTailRecursion.st deleted file mode 100644 index 09883ac5c..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/FullContextParsing/LoopsSimulateTailRecursion.st +++ /dev/null @@ -1,15 +0,0 @@ -grammar ; -prog -@init {} - : expr_or_assign*; -expr_or_assign - : expr '++' {} - | expr {} - ; -expr: expr_primary ('\<-' ID)?; -expr_primary - : '(' ID ')' - | ID '(' ID ')' - | ID - ; -ID : [a-z]+ ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/FullContextParsing/SLLSeesEOFInLLGrammar.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/FullContextParsing/SLLSeesEOFInLLGrammar.st deleted file mode 100644 index c74f4def8..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/FullContextParsing/SLLSeesEOFInLLGrammar.st +++ /dev/null @@ -1,9 +0,0 @@ -grammar ; -s @after {} - : a; -a : e ID ; -b : e INT ID ; -e : INT | ; -ID : 'a'..'z'+ ; -INT : '0'..'9'+ ; -WS : (' '|'\t'|'\n')+ -> skip ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/LeftRecursion/AmbigLR.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/LeftRecursion/AmbigLR.st deleted file mode 100644 index 47e9e649c..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/LeftRecursion/AmbigLR.st +++ /dev/null @@ -1,21 +0,0 @@ -grammar ; -prog: stat ; -stat: expr NEWLINE # printExpr - | ID '=' expr NEWLINE # assign - | NEWLINE # blank - ; -expr: expr ('*'|'/') expr # MulDiv - | expr ('+'|'-') expr # AddSub - | INT # int - | ID # id - | '(' expr ')' # parens - ; - -MUL : '*' ; // assigns token name to '*' used above in grammar -DIV : '/' ; -ADD : '+' ; -SUB : '-' ; -ID : [a-zA-Z]+ ; // match identifiers -INT : [0-9]+ ; // match integers -NEWLINE:'\r'? '\n' ; // return newlines to parser (is end-statement signal) -WS : [ \t]+ -> skip ; // toss out whitespace diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/LeftRecursion/Declarations.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/LeftRecursion/Declarations.st deleted file mode 100644 index 41db93b3b..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/LeftRecursion/Declarations.st +++ /dev/null @@ -1,14 +0,0 @@ -grammar ; -s @after {} : declarator EOF ; // must indicate EOF can follow -declarator - : declarator '[' e ']' - | declarator '[' ']' - | declarator '(' ')' - | '*' declarator // binds less tight than suffixes - | '(' declarator ')' - | ID - ; -e : INT ; -ID : 'a'..'z'+ ; -INT : '0'..'9'+ ; -WS : (' '|'\n') -> skip ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/LeftRecursion/DirectCallToLeftRecursiveRule.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/LeftRecursion/DirectCallToLeftRecursiveRule.st deleted file mode 100644 index b41efcc64..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/LeftRecursion/DirectCallToLeftRecursiveRule.st +++ /dev/null @@ -1,6 +0,0 @@ -grammar ; -a @after {} : a ID - | ID - ; -ID : 'a'..'z'+ ; -WS : (' '|'\n') -> skip ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/LeftRecursion/Expressions.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/LeftRecursion/Expressions.st deleted file mode 100644 index 6767d6dc4..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/LeftRecursion/Expressions.st +++ /dev/null @@ -1,13 +0,0 @@ -grammar ; -s @after {} : e EOF ; // must indicate EOF can follow -e : e '.' ID - | e '.' 'this' - | '-' e - | e '*' e - | e ('+'|'-') e - | INT - | ID - ; -ID : 'a'..'z'+ ; -INT : '0'..'9'+ ; -WS : (' '|'\n') -> skip ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/LeftRecursion/JavaExpressions.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/LeftRecursion/JavaExpressions.st deleted file mode 100644 index 2cb9f774a..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/LeftRecursion/JavaExpressions.st +++ /dev/null @@ -1,56 +0,0 @@ -grammar ; -s @after {} : e EOF ; // must indicate EOF can follow -expressionList - : e (',' e)* - ; -e : '(' e ')' - | 'this' - | 'super' - | INT - | ID - | type_ '.' 'class' - | e '.' ID - | e '.' 'this' - | e '.' 'super' '(' expressionList? ')' - | e '.' 'new' ID '(' expressionList? ')' - | 'new' type_ ( '(' expressionList? ')' | ('[' e ']')+) - | e '[' e ']' - | '(' type_ ')' e - | e ('++' | '--') - | e '(' expressionList? ')' - | ('+'|'-'|'++'|'--') e - | ('~'|'!') e - | e ('*'|'/'|'%') e - | e ('+'|'-') e - | e ('\<\<' | '>>>' | '>>') e - | e ('\<=' | '>=' | '>' | '\<') e - | e 'instanceof' e - | e ('==' | '!=') e - | e '&' e - |\ e '^' e - | e '|' e - | e '&&' e - | e '||' e - | e '?' e ':' e - |\ - e ('=' - |'+=' - |'-=' - |'*=' - |'/=' - |'&=' - |'|=' - |'^=' - |'>>=' - |'>>>=' - |'\<\<=' - |'%=') e - ; -type_: ID - | ID '[' ']' - | 'int' - | 'int' '[' ']' - ; -ID : ('a'..'z'|'A'..'Z'|'_'|'$')+; -INT : '0'..'9'+ ; -WS : (' '|'\n') -> skip ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/LeftRecursion/LabelsOnOpSubrule.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/LeftRecursion/LabelsOnOpSubrule.st deleted file mode 100644 index 567f539d6..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/LeftRecursion/LabelsOnOpSubrule.st +++ /dev/null @@ -1,8 +0,0 @@ -grammar ; -s @after {} : e; -e : a=e op=('*'|'/') b=e {} - | INT {} - | '(' x=e ')' {} - ; -INT : '0'..'9'+ ; -WS : (' '|'\n') -> skip ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/LeftRecursion/MultipleActions.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/LeftRecursion/MultipleActions.st deleted file mode 100644 index a96434467..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/LeftRecursion/MultipleActions.st +++ /dev/null @@ -1,8 +0,0 @@ -grammar ; -s @after {} : e ; -e : a=e op=('*'|'/') b=e {}{} - | INT {}{} - | '(' x=e ')' {}{} - ; -INT : '0'..'9'+ ; -WS : (' '|'\n') -> skip ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/LeftRecursion/MultipleActionsPredicatesOptions.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/LeftRecursion/MultipleActionsPredicatesOptions.st deleted file mode 100644 index aa74f8906..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/LeftRecursion/MultipleActionsPredicatesOptions.st +++ /dev/null @@ -1,9 +0,0 @@ -grammar ; -s @after {} : e ; -e : a=e op=('*'|'/') b=e {}{}? - | a=e op=('+'|'-') b=e {}\{}?\ - | INT {}{} - | '(' x=e ')' {}{} - ; -INT : '0'..'9'+ ; -WS : (' '|'\n') -> skip ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/LeftRecursion/MultipleAlternativesWithCommonLabel.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/LeftRecursion/MultipleAlternativesWithCommonLabel.st deleted file mode 100644 index 8c540c2d5..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/LeftRecursion/MultipleAlternativesWithCommonLabel.st +++ /dev/null @@ -1,16 +0,0 @@ -grammar ; -s : e {}; -e returns [int v] - : e '*' e {$v = .e(0).v * .e(1).v;} # binary - | e '+' e {$v = .e(0).v + .e(1).v;} # binary - | INT {$v = $INT.int;} # anInt - | '(' e ')' {$v = $e.v;} # parens - | left=e INC {$v = $left.v + 1;} # unary - | left=e DEC {$v = $left.v - 1;} # unary - | ID {} # anID - ; -ID : 'a'..'z'+ ; -INT : '0'..'9'+ ; -INC : '++' ; -DEC : '--' ; -WS : (' '|'\n') -> skip ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/LeftRecursion/PrecedenceFilterConsidersContext.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/LeftRecursion/PrecedenceFilterConsidersContext.st deleted file mode 100644 index 4bbada997..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/LeftRecursion/PrecedenceFilterConsidersContext.st +++ /dev/null @@ -1,6 +0,0 @@ -grammar ; -prog -@after {} -: statement* EOF {}; -statement: letterA | statement letterA 'b' ; -letterA: 'a'; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/LeftRecursion/PrefixOpWithActionAndLabel.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/LeftRecursion/PrefixOpWithActionAndLabel.st deleted file mode 100644 index d78cbe37d..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/LeftRecursion/PrefixOpWithActionAndLabel.st +++ /dev/null @@ -1,11 +0,0 @@ -grammar ; -s : e {} ; -e returns [String result] - : ID '=' e1=e {$result = "(" + $ID.text + "=" + $e1.result + ")";} - | ID {$result = $ID.text;} - | e1=e '+' e2=e {$result = "(" + $e1.result + "+" + $e2.result + ")";} - ; -ID : 'a'..'z'+ ; -INT : '0'..'9'+ ; -WS : (' '|'\n') -> skip ; - diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/LeftRecursion/ReturnValueAndActions.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/LeftRecursion/ReturnValueAndActions.st deleted file mode 100644 index 88ee82c58..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/LeftRecursion/ReturnValueAndActions.st +++ /dev/null @@ -1,11 +0,0 @@ -grammar ; -s : e {}; -e returns [int v, ignored] - : a=e '*' b=e {$v = $a.v * $b.v;} - | a=e '+' b=e {$v = $a.v + $b.v;} - | INT {$v = $INT.int;} - | '(' x=e ')' {$v = $x.v;} - ; -INT : '0'..'9'+ ; -WS : (' '|'\n') -> skip ; - diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/LeftRecursion/ReturnValueAndActionsAndLabels.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/LeftRecursion/ReturnValueAndActionsAndLabels.st deleted file mode 100644 index 739b664a0..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/LeftRecursion/ReturnValueAndActionsAndLabels.st +++ /dev/null @@ -1,14 +0,0 @@ -grammar ; -s : q=e {}; -e returns [int v] - : a=e op='*' b=e {$v = $a.v * $b.v;} # mult - | a=e '+' b=e {$v = $a.v + $b.v;} # add - | INT {$v = $INT.int;} # anInt - | '(' x=e ')' {$v = $x.v;} # parens - | x=e '++' {$v = $x.v+1;} # inc - | e '--' # dec - | ID {$v = 3;} # anID - ; -ID : 'a'..'z'+ ; -INT : '0'..'9'+ ; -WS : (' '|'\n') -> skip ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/LeftRecursion/ReturnValueAndActionsList1.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/LeftRecursion/ReturnValueAndActionsList1.st deleted file mode 100644 index 9ac58c4a7..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/LeftRecursion/ReturnValueAndActionsList1.st +++ /dev/null @@ -1,13 +0,0 @@ -grammar ; -s @after {} : expr EOF; -expr: - a=expr '*' a=expr #Factor - | b+=expr (',' b+=expr)* '>>' c=expr #Send - | ID #JustId //semantic check on modifiers -; - -ID : ('a'..'z'|'A'..'Z'|'_') - ('a'..'z'|'A'..'Z'|'0'..'9'|'_')* -; - -WS : [ \t\n]+ -> skip ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/LeftRecursion/ReturnValueAndActionsList2.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/LeftRecursion/ReturnValueAndActionsList2.st deleted file mode 100644 index eebe5a5ca..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/LeftRecursion/ReturnValueAndActionsList2.st +++ /dev/null @@ -1,12 +0,0 @@ -grammar ; -s @after {} : expr EOF; -expr: - a=expr '*' a=expr #Factor - | b+=expr ',' b+=expr #Comma - | b+=expr '>>' c=expr #Send - | ID #JustId //semantic check on modifiers - ; -ID : ('a'..'z'|'A'..'Z'|'_') - ('a'..'z'|'A'..'Z'|'0'..'9'|'_')* -; -WS : [ \t\n]+ -> skip ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/LeftRecursion/SemPred.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/LeftRecursion/SemPred.st deleted file mode 100644 index b95a58025..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/LeftRecursion/SemPred.st +++ /dev/null @@ -1,7 +0,0 @@ -grammar ; -s @after {} : a ; -a : a {}? ID - | ID - ; -ID : 'a'..'z'+ ; -WS : (' '|'\n') -> skip ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/LeftRecursion/SemPredFailOption.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/LeftRecursion/SemPredFailOption.st deleted file mode 100644 index b5eb5f7ec..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/LeftRecursion/SemPredFailOption.st +++ /dev/null @@ -1,7 +0,0 @@ -grammar ; -s @after {} : a ; -a : a ID {}?\ - | ID - ; -ID : 'a'..'z'+ ; -WS : (' '|'\n') -> skip ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/LeftRecursion/Simple.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/LeftRecursion/Simple.st deleted file mode 100644 index 5f5ea05d1..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/LeftRecursion/Simple.st +++ /dev/null @@ -1,7 +0,0 @@ -grammar ; -s @after {} : a ; -a : a ID - | ID - ; -ID : 'a'..'z'+ ; -WS : (' '|'\n') -> skip ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/LeftRecursion/TernaryExpr.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/LeftRecursion/TernaryExpr.st deleted file mode 100644 index 5c6721a58..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/LeftRecursion/TernaryExpr.st +++ /dev/null @@ -1,10 +0,0 @@ -grammar ; -s @after {} : e EOF ; // must indicate EOF can follow or 'a\' won't match -e : e '*' e - | e '+' e - |\ e '?' e ':' e - |\ e '=' e - | ID - ; -ID : 'a'..'z'+ ; -WS : (' '|'\n') -> skip ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/LeftRecursion/TernaryExprExplicitAssociativity.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/LeftRecursion/TernaryExprExplicitAssociativity.st deleted file mode 100644 index d893ed739..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/LeftRecursion/TernaryExprExplicitAssociativity.st +++ /dev/null @@ -1,10 +0,0 @@ -grammar ; -s @after {} : e EOF; // must indicate EOF can follow or 'a\' won't match -e :\ e '*' e - |\ e '+' e - |\ e '?' e ':' e - |\ e '=' e - | ID - ; -ID : 'a'..'z'+ ; -WS : (' '|'\n') -> skip ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/LeftRecursion/WhitespaceInfluence.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/LeftRecursion/WhitespaceInfluence.st deleted file mode 100644 index ba95b894d..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/LeftRecursion/WhitespaceInfluence.st +++ /dev/null @@ -1,49 +0,0 @@ -grammar ; -prog : expression EOF; -expression - : ID '(' expression (',' expression)* ')' # doFunction - | '(' expression ')' # doParenthesis - | '!' expression # doNot - | '-' expression # doNegate - | '+' expression # doPositiv - | expression '^' expression # doPower - | expression '*' expression # doMultipy - | expression '/' expression # doDivide - | expression '%' expression # doModulo - | expression '-' expression # doMinus - | expression '+' expression # doPlus - | expression '=' expression # doEqual - | expression '!=' expression # doNotEqual - | expression '>' expression # doGreather - | expression '>=' expression # doGreatherEqual - | expression '\<' expression # doLesser - | expression '\<=' expression # doLesserEqual - | expression K_IN '(' expression (',' expression)* ')' # doIn - | expression ( '&' | K_AND) expression # doAnd - | expression ( '|' | K_OR) expression # doOr - | '[' expression (',' expression)* ']' # newArray - | K_TRUE # newTrueBoolean - | K_FALSE # newFalseBoolean - | NUMBER # newNumber - | DATE # newDateTime - | ID # newIdentifier - | SQ_STRING # newString - | K_NULL # newNull - ; - -// Fragments -fragment DIGIT : '0' .. '9'; -fragment UPPER : 'A' .. 'Z'; -fragment LOWER : 'a' .. 'z'; -fragment LETTER : LOWER | UPPER; -fragment WORD : LETTER | '_' | '$' | '#' | '.'; -fragment ALPHANUM : WORD | DIGIT; - -// Tokens -ID : LETTER ALPHANUM*; -NUMBER : DIGIT+ ('.' DIGIT+)? (('e'|'E')('+'|'-')? DIGIT+)?; -DATE : '\'' DIGIT DIGIT DIGIT DIGIT '-' DIGIT DIGIT '-' DIGIT DIGIT (' ' DIGIT DIGIT ':' DIGIT DIGIT ':' DIGIT DIGIT ('.' DIGIT+)?)? '\''; -SQ_STRING : '\'' ('\'\'' | ~'\'')* '\''; -DQ_STRING : '\"' ('\\\"' | ~'\"')* '\"'; -WS : [ \t\n\r]+ -> skip ; -COMMENTS : ('/*' .*? '*/' | '//' ~'\n'* '\n' ) -> skip; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerErrors/DFAToATNThatFailsBackToDFA.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerErrors/DFAToATNThatFailsBackToDFA.st deleted file mode 100644 index 7eae0c410..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerErrors/DFAToATNThatFailsBackToDFA.st +++ /dev/null @@ -1,3 +0,0 @@ -lexer grammar ; -A : 'ab' ; -B : 'abc' ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerErrors/DFAToATNThatMatchesThenFailsInATN.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerErrors/DFAToATNThatMatchesThenFailsInATN.st deleted file mode 100644 index 528a248fe..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerErrors/DFAToATNThatMatchesThenFailsInATN.st +++ /dev/null @@ -1,4 +0,0 @@ -lexer grammar ; -A : 'ab' ; -B : 'abc' ; -C : 'abcd' ; \ No newline at end of file diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerErrors/EnforcedGreedyNestedBrances.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerErrors/EnforcedGreedyNestedBrances.st deleted file mode 100644 index 87131b66a..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerErrors/EnforcedGreedyNestedBrances.st +++ /dev/null @@ -1,3 +0,0 @@ -lexer grammar ; -ACTION : '{' (ACTION | ~[{}])* '}'; -WS : [ \r\n\t]+ -> skip; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerErrors/ErrorInMiddle.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerErrors/ErrorInMiddle.st deleted file mode 100644 index a0dd4d2c6..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerErrors/ErrorInMiddle.st +++ /dev/null @@ -1,2 +0,0 @@ -lexer grammar ; -A : 'abc' ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerErrors/InvalidCharAtStart.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerErrors/InvalidCharAtStart.st deleted file mode 100644 index c294d7efc..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerErrors/InvalidCharAtStart.st +++ /dev/null @@ -1,2 +0,0 @@ -lexer grammar ; -A : 'a' 'b' ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerErrors/InvalidCharAtStartAfterDFACache.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerErrors/InvalidCharAtStartAfterDFACache.st deleted file mode 100644 index c294d7efc..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerErrors/InvalidCharAtStartAfterDFACache.st +++ /dev/null @@ -1,2 +0,0 @@ -lexer grammar ; -A : 'a' 'b' ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerErrors/InvalidCharInToken.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerErrors/InvalidCharInToken.st deleted file mode 100644 index c294d7efc..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerErrors/InvalidCharInToken.st +++ /dev/null @@ -1,2 +0,0 @@ -lexer grammar ; -A : 'a' 'b' ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerErrors/InvalidCharInTokenAfterDFACache.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerErrors/InvalidCharInTokenAfterDFACache.st deleted file mode 100644 index c294d7efc..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerErrors/InvalidCharInTokenAfterDFACache.st +++ /dev/null @@ -1,2 +0,0 @@ -lexer grammar ; -A : 'a' 'b' ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerErrors/LexerExecDFA.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerErrors/LexerExecDFA.st deleted file mode 100644 index 983c36539..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerErrors/LexerExecDFA.st +++ /dev/null @@ -1,5 +0,0 @@ -grammar ; -start : ID ':' expr; -expr : primary expr? {} | expr '->' ID; -primary : ID; -ID : [a-z]+; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerErrors/StringsEmbeddedInActions.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerErrors/StringsEmbeddedInActions.st deleted file mode 100644 index 4acffdb58..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerErrors/StringsEmbeddedInActions.st +++ /dev/null @@ -1,4 +0,0 @@ -lexer grammar ; -ACTION2 : '[' (STRING | ~'"')*? ']'; -STRING : '"' ('\\"' | .)*? '"'; -WS : [ \t\r\n]+ -> skip; \ No newline at end of file diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/ActionPlacement.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/ActionPlacement.st deleted file mode 100644 index 18bffeafb..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/ActionPlacement.st +++ /dev/null @@ -1,8 +0,0 @@ -lexer grammar ; -I : ({} 'a' -| {} - 'a' {} - 'b' {}) - {} ; -WS : (' '|'\n') -> skip ; -J : .; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/CharSet.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/CharSet.st deleted file mode 100644 index 6dc908042..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/CharSet.st +++ /dev/null @@ -1,3 +0,0 @@ -lexer grammar ; -I : '0'..'9'+ {} ; -WS : [ \n\u000D] -> skip ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/CharSetInSet.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/CharSetInSet.st deleted file mode 100644 index 7f86ef615..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/CharSetInSet.st +++ /dev/null @@ -1,4 +0,0 @@ -lexer grammar ; -I : (~[ab \\n]|'a') {} ; -WS : [ \n\u000D]+ -> skip ; - \ No newline at end of file diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/CharSetNot.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/CharSetNot.st deleted file mode 100644 index 8c8a4d43a..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/CharSetNot.st +++ /dev/null @@ -1,3 +0,0 @@ -lexer grammar ; -I : ~[ab \n] ~[ \ncd]* {} ; -WS : [ \n\u000D]+ -> skip ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/CharSetPlus.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/CharSetPlus.st deleted file mode 100644 index cc1ad08c3..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/CharSetPlus.st +++ /dev/null @@ -1,3 +0,0 @@ -lexer grammar ; -I : '0'..'9'+ {} ; -WS : [ \n\u000D]+ -> skip ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/CharSetRange.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/CharSetRange.st deleted file mode 100644 index 9d49c4d11..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/CharSetRange.st +++ /dev/null @@ -1,4 +0,0 @@ -lexer grammar ; -I : [0-9]+ {} ; -ID : [a-zA-Z] [a-zA-Z0-9]* {} ; -WS : [ \n\u0009\r]+ -> skip ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/CharSetWithEscapedChar.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/CharSetWithEscapedChar.st deleted file mode 100644 index e5972371e..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/CharSetWithEscapedChar.st +++ /dev/null @@ -1,3 +0,0 @@ -lexer grammar ; -DASHBRACK : [\\-\]]+ {} ; -WS : [ \u]+ -> skip ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/CharSetWithMissingEndRange.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/CharSetWithMissingEndRange.st deleted file mode 100644 index f362a75a2..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/CharSetWithMissingEndRange.st +++ /dev/null @@ -1,3 +0,0 @@ -lexer grammar ; -I : [0-]+ {} ; -WS : [ \n\u000D]+ -> skip ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/CharSetWithMissingEscapeChar.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/CharSetWithMissingEscapeChar.st deleted file mode 100644 index 6340980ac..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/CharSetWithMissingEscapeChar.st +++ /dev/null @@ -1,3 +0,0 @@ -lexer grammar ; -I : [0-9]+ {} ; -WS : [ \u]+ -> skip ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/CharSetWithQuote1.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/CharSetWithQuote1.st deleted file mode 100644 index abac47e4a..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/CharSetWithQuote1.st +++ /dev/null @@ -1,3 +0,0 @@ -lexer grammar ; -A : ["a-z]+ {} ; -WS : [ \n\t]+ -> skip ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/CharSetWithQuote2.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/CharSetWithQuote2.st deleted file mode 100644 index 01df4d68f..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/CharSetWithQuote2.st +++ /dev/null @@ -1,3 +0,0 @@ -lexer grammar ; -A : ["\\ab]+ {} ; -WS : [ \n\t]+ -> skip ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/CharSetWithReversedRange.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/CharSetWithReversedRange.st deleted file mode 100644 index f01b9afd1..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/CharSetWithReversedRange.st +++ /dev/null @@ -1,3 +0,0 @@ -lexer grammar ; -A : [z-a9]+ {} ; -WS : [ \u]+ -> skip ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/EOFByItself.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/EOFByItself.st deleted file mode 100644 index ca92cc56d..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/EOFByItself.st +++ /dev/null @@ -1,3 +0,0 @@ -lexer grammar ; -DONE : EOF ; -A : 'a'; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/EOFSuffixInFirstRule.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/EOFSuffixInFirstRule.st deleted file mode 100644 index 374abcfa3..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/EOFSuffixInFirstRule.st +++ /dev/null @@ -1,4 +0,0 @@ -lexer grammar ; -A : 'a' EOF ; -B : 'a'; -C : 'c'; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/GreedyClosure.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/GreedyClosure.st deleted file mode 100644 index fca53f1f7..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/GreedyClosure.st +++ /dev/null @@ -1,3 +0,0 @@ -lexer grammar ; -CMT : '//' .*? '\n' CMT*; -WS : (' '|'\t')+; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/GreedyConfigs.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/GreedyConfigs.st deleted file mode 100644 index 0c009d2ec..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/GreedyConfigs.st +++ /dev/null @@ -1,4 +0,0 @@ -lexer grammar ; -I : ('a' | 'ab') {} ; -WS : (' '|'\n') -> skip ; -J : .; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/GreedyOptional.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/GreedyOptional.st deleted file mode 100644 index 62435a715..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/GreedyOptional.st +++ /dev/null @@ -1,3 +0,0 @@ -lexer grammar ; -CMT : '//' .*? '\n' CMT?; -WS : (' '|'\t')+; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/GreedyPositiveClosure.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/GreedyPositiveClosure.st deleted file mode 100644 index 1de332e41..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/GreedyPositiveClosure.st +++ /dev/null @@ -1,3 +0,0 @@ -lexer grammar ; -CMT : ('//' .*? '\n')+; -WS : (' '|'\t')+; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/HexVsID.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/HexVsID.st deleted file mode 100644 index 1e0f9ec0e..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/HexVsID.st +++ /dev/null @@ -1,8 +0,0 @@ -lexer grammar ; -HexLiteral : '0' ('x'|'X') HexDigit+ ; -DecimalLiteral : ('0' | '1'..'9' '0'..'9'*) ; -FloatingPointLiteral : ('0x' | '0X') HexDigit* ('.' HexDigit*)? ; -DOT : '.' ; -ID : 'a'..'z'+ ; -fragment HexDigit : ('0'..'9'|'a'..'f'|'A'..'F') ; -WS : (' '|'\n')+; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/KeywordID.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/KeywordID.st deleted file mode 100644 index f98be3d02..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/KeywordID.st +++ /dev/null @@ -1,4 +0,0 @@ -lexer grammar ; -KEND : 'end' ; // has priority -ID : 'a'..'z'+ ; -WS : (' '|'\n')+; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/LargeLexer.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/LargeLexer.st deleted file mode 100644 index 6ff2a9a58..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/LargeLexer.st +++ /dev/null @@ -1,4002 +0,0 @@ -lexer grammar L; -WS : [ \t\r\n]+ -> skip; -KW0 : 'KW' '0'; -KW1 : 'KW' '1'; -KW2 : 'KW' '2'; -KW3 : 'KW' '3'; -KW4 : 'KW' '4'; -KW5 : 'KW' '5'; -KW6 : 'KW' '6'; -KW7 : 'KW' '7'; -KW8 : 'KW' '8'; -KW9 : 'KW' '9'; -KW10 : 'KW' '10'; -KW11 : 'KW' '11'; -KW12 : 'KW' '12'; -KW13 : 'KW' '13'; -KW14 : 'KW' '14'; -KW15 : 'KW' '15'; -KW16 : 'KW' '16'; -KW17 : 'KW' '17'; -KW18 : 'KW' '18'; -KW19 : 'KW' '19'; -KW20 : 'KW' '20'; -KW21 : 'KW' '21'; -KW22 : 'KW' '22'; -KW23 : 'KW' '23'; -KW24 : 'KW' '24'; -KW25 : 'KW' '25'; -KW26 : 'KW' '26'; -KW27 : 'KW' '27'; -KW28 : 'KW' '28'; -KW29 : 'KW' '29'; -KW30 : 'KW' '30'; -KW31 : 'KW' '31'; -KW32 : 'KW' '32'; -KW33 : 'KW' '33'; -KW34 : 'KW' '34'; -KW35 : 'KW' '35'; -KW36 : 'KW' '36'; -KW37 : 'KW' '37'; -KW38 : 'KW' '38'; -KW39 : 'KW' '39'; -KW40 : 'KW' '40'; -KW41 : 'KW' '41'; -KW42 : 'KW' '42'; -KW43 : 'KW' '43'; -KW44 : 'KW' '44'; -KW45 : 'KW' '45'; -KW46 : 'KW' '46'; -KW47 : 'KW' '47'; -KW48 : 'KW' '48'; -KW49 : 'KW' '49'; -KW50 : 'KW' '50'; -KW51 : 'KW' '51'; -KW52 : 'KW' '52'; -KW53 : 'KW' '53'; -KW54 : 'KW' '54'; -KW55 : 'KW' '55'; -KW56 : 'KW' '56'; -KW57 : 'KW' '57'; -KW58 : 'KW' '58'; -KW59 : 'KW' '59'; -KW60 : 'KW' '60'; -KW61 : 'KW' '61'; -KW62 : 'KW' '62'; -KW63 : 'KW' '63'; -KW64 : 'KW' '64'; -KW65 : 'KW' '65'; -KW66 : 'KW' '66'; -KW67 : 'KW' '67'; -KW68 : 'KW' '68'; -KW69 : 'KW' '69'; -KW70 : 'KW' '70'; -KW71 : 'KW' '71'; -KW72 : 'KW' '72'; -KW73 : 'KW' '73'; -KW74 : 'KW' '74'; -KW75 : 'KW' '75'; -KW76 : 'KW' '76'; -KW77 : 'KW' '77'; -KW78 : 'KW' '78'; -KW79 : 'KW' '79'; -KW80 : 'KW' '80'; -KW81 : 'KW' '81'; -KW82 : 'KW' '82'; -KW83 : 'KW' '83'; -KW84 : 'KW' '84'; -KW85 : 'KW' '85'; -KW86 : 'KW' '86'; -KW87 : 'KW' '87'; -KW88 : 'KW' '88'; -KW89 : 'KW' '89'; -KW90 : 'KW' '90'; -KW91 : 'KW' '91'; -KW92 : 'KW' '92'; -KW93 : 'KW' '93'; -KW94 : 'KW' '94'; -KW95 : 'KW' '95'; -KW96 : 'KW' '96'; -KW97 : 'KW' '97'; -KW98 : 'KW' '98'; -KW99 : 'KW' '99'; -KW100 : 'KW' '100'; -KW101 : 'KW' '101'; -KW102 : 'KW' '102'; -KW103 : 'KW' '103'; -KW104 : 'KW' '104'; -KW105 : 'KW' '105'; -KW106 : 'KW' '106'; -KW107 : 'KW' '107'; -KW108 : 'KW' '108'; -KW109 : 'KW' '109'; -KW110 : 'KW' '110'; -KW111 : 'KW' '111'; -KW112 : 'KW' '112'; -KW113 : 'KW' '113'; -KW114 : 'KW' '114'; -KW115 : 'KW' '115'; -KW116 : 'KW' '116'; -KW117 : 'KW' '117'; -KW118 : 'KW' '118'; -KW119 : 'KW' '119'; -KW120 : 'KW' '120'; -KW121 : 'KW' '121'; -KW122 : 'KW' '122'; -KW123 : 'KW' '123'; -KW124 : 'KW' '124'; -KW125 : 'KW' '125'; -KW126 : 'KW' '126'; -KW127 : 'KW' '127'; -KW128 : 'KW' '128'; -KW129 : 'KW' '129'; -KW130 : 'KW' '130'; -KW131 : 'KW' '131'; -KW132 : 'KW' '132'; -KW133 : 'KW' '133'; -KW134 : 'KW' '134'; -KW135 : 'KW' '135'; -KW136 : 'KW' '136'; -KW137 : 'KW' '137'; -KW138 : 'KW' '138'; -KW139 : 'KW' '139'; -KW140 : 'KW' '140'; -KW141 : 'KW' '141'; -KW142 : 'KW' '142'; -KW143 : 'KW' '143'; -KW144 : 'KW' '144'; -KW145 : 'KW' '145'; -KW146 : 'KW' '146'; -KW147 : 'KW' '147'; -KW148 : 'KW' '148'; -KW149 : 'KW' '149'; -KW150 : 'KW' '150'; -KW151 : 'KW' '151'; -KW152 : 'KW' '152'; -KW153 : 'KW' '153'; -KW154 : 'KW' '154'; -KW155 : 'KW' '155'; -KW156 : 'KW' '156'; -KW157 : 'KW' '157'; -KW158 : 'KW' '158'; -KW159 : 'KW' '159'; -KW160 : 'KW' '160'; -KW161 : 'KW' '161'; -KW162 : 'KW' '162'; -KW163 : 'KW' '163'; -KW164 : 'KW' '164'; -KW165 : 'KW' '165'; -KW166 : 'KW' '166'; -KW167 : 'KW' '167'; -KW168 : 'KW' '168'; -KW169 : 'KW' '169'; -KW170 : 'KW' '170'; -KW171 : 'KW' '171'; -KW172 : 'KW' '172'; -KW173 : 'KW' '173'; -KW174 : 'KW' '174'; -KW175 : 'KW' '175'; -KW176 : 'KW' '176'; -KW177 : 'KW' '177'; -KW178 : 'KW' '178'; -KW179 : 'KW' '179'; -KW180 : 'KW' '180'; -KW181 : 'KW' '181'; -KW182 : 'KW' '182'; -KW183 : 'KW' '183'; -KW184 : 'KW' '184'; -KW185 : 'KW' '185'; -KW186 : 'KW' '186'; -KW187 : 'KW' '187'; -KW188 : 'KW' '188'; -KW189 : 'KW' '189'; -KW190 : 'KW' '190'; -KW191 : 'KW' '191'; -KW192 : 'KW' '192'; -KW193 : 'KW' '193'; -KW194 : 'KW' '194'; -KW195 : 'KW' '195'; -KW196 : 'KW' '196'; -KW197 : 'KW' '197'; -KW198 : 'KW' '198'; -KW199 : 'KW' '199'; -KW200 : 'KW' '200'; -KW201 : 'KW' '201'; -KW202 : 'KW' '202'; -KW203 : 'KW' '203'; -KW204 : 'KW' '204'; -KW205 : 'KW' '205'; -KW206 : 'KW' '206'; -KW207 : 'KW' '207'; -KW208 : 'KW' '208'; -KW209 : 'KW' '209'; -KW210 : 'KW' '210'; -KW211 : 'KW' '211'; -KW212 : 'KW' '212'; -KW213 : 'KW' '213'; -KW214 : 'KW' '214'; -KW215 : 'KW' '215'; -KW216 : 'KW' '216'; -KW217 : 'KW' '217'; -KW218 : 'KW' '218'; -KW219 : 'KW' '219'; -KW220 : 'KW' '220'; -KW221 : 'KW' '221'; -KW222 : 'KW' '222'; -KW223 : 'KW' '223'; -KW224 : 'KW' '224'; -KW225 : 'KW' '225'; -KW226 : 'KW' '226'; -KW227 : 'KW' '227'; -KW228 : 'KW' '228'; -KW229 : 'KW' '229'; -KW230 : 'KW' '230'; -KW231 : 'KW' '231'; -KW232 : 'KW' '232'; -KW233 : 'KW' '233'; -KW234 : 'KW' '234'; -KW235 : 'KW' '235'; -KW236 : 'KW' '236'; -KW237 : 'KW' '237'; -KW238 : 'KW' '238'; -KW239 : 'KW' '239'; -KW240 : 'KW' '240'; -KW241 : 'KW' '241'; -KW242 : 'KW' '242'; -KW243 : 'KW' '243'; -KW244 : 'KW' '244'; -KW245 : 'KW' '245'; -KW246 : 'KW' '246'; -KW247 : 'KW' '247'; -KW248 : 'KW' '248'; -KW249 : 'KW' '249'; -KW250 : 'KW' '250'; -KW251 : 'KW' '251'; -KW252 : 'KW' '252'; -KW253 : 'KW' '253'; -KW254 : 'KW' '254'; -KW255 : 'KW' '255'; -KW256 : 'KW' '256'; -KW257 : 'KW' '257'; -KW258 : 'KW' '258'; -KW259 : 'KW' '259'; -KW260 : 'KW' '260'; -KW261 : 'KW' '261'; -KW262 : 'KW' '262'; -KW263 : 'KW' '263'; -KW264 : 'KW' '264'; -KW265 : 'KW' '265'; -KW266 : 'KW' '266'; -KW267 : 'KW' '267'; -KW268 : 'KW' '268'; -KW269 : 'KW' '269'; -KW270 : 'KW' '270'; -KW271 : 'KW' '271'; -KW272 : 'KW' '272'; -KW273 : 'KW' '273'; -KW274 : 'KW' '274'; -KW275 : 'KW' '275'; -KW276 : 'KW' '276'; -KW277 : 'KW' '277'; -KW278 : 'KW' '278'; -KW279 : 'KW' '279'; -KW280 : 'KW' '280'; -KW281 : 'KW' '281'; -KW282 : 'KW' '282'; -KW283 : 'KW' '283'; -KW284 : 'KW' '284'; -KW285 : 'KW' '285'; -KW286 : 'KW' '286'; -KW287 : 'KW' '287'; -KW288 : 'KW' '288'; -KW289 : 'KW' '289'; -KW290 : 'KW' '290'; -KW291 : 'KW' '291'; -KW292 : 'KW' '292'; -KW293 : 'KW' '293'; -KW294 : 'KW' '294'; -KW295 : 'KW' '295'; -KW296 : 'KW' '296'; -KW297 : 'KW' '297'; -KW298 : 'KW' '298'; -KW299 : 'KW' '299'; -KW300 : 'KW' '300'; -KW301 : 'KW' '301'; -KW302 : 'KW' '302'; -KW303 : 'KW' '303'; -KW304 : 'KW' '304'; -KW305 : 'KW' '305'; -KW306 : 'KW' '306'; -KW307 : 'KW' '307'; -KW308 : 'KW' '308'; -KW309 : 'KW' '309'; -KW310 : 'KW' '310'; -KW311 : 'KW' '311'; -KW312 : 'KW' '312'; -KW313 : 'KW' '313'; -KW314 : 'KW' '314'; -KW315 : 'KW' '315'; -KW316 : 'KW' '316'; -KW317 : 'KW' '317'; -KW318 : 'KW' '318'; -KW319 : 'KW' '319'; -KW320 : 'KW' '320'; -KW321 : 'KW' '321'; -KW322 : 'KW' '322'; -KW323 : 'KW' '323'; -KW324 : 'KW' '324'; -KW325 : 'KW' '325'; -KW326 : 'KW' '326'; -KW327 : 'KW' '327'; -KW328 : 'KW' '328'; -KW329 : 'KW' '329'; -KW330 : 'KW' '330'; -KW331 : 'KW' '331'; -KW332 : 'KW' '332'; -KW333 : 'KW' '333'; -KW334 : 'KW' '334'; -KW335 : 'KW' '335'; -KW336 : 'KW' '336'; -KW337 : 'KW' '337'; -KW338 : 'KW' '338'; -KW339 : 'KW' '339'; -KW340 : 'KW' '340'; -KW341 : 'KW' '341'; -KW342 : 'KW' '342'; -KW343 : 'KW' '343'; -KW344 : 'KW' '344'; -KW345 : 'KW' '345'; -KW346 : 'KW' '346'; -KW347 : 'KW' '347'; -KW348 : 'KW' '348'; -KW349 : 'KW' '349'; -KW350 : 'KW' '350'; -KW351 : 'KW' '351'; -KW352 : 'KW' '352'; -KW353 : 'KW' '353'; -KW354 : 'KW' '354'; -KW355 : 'KW' '355'; -KW356 : 'KW' '356'; -KW357 : 'KW' '357'; -KW358 : 'KW' '358'; -KW359 : 'KW' '359'; -KW360 : 'KW' '360'; -KW361 : 'KW' '361'; -KW362 : 'KW' '362'; -KW363 : 'KW' '363'; -KW364 : 'KW' '364'; -KW365 : 'KW' '365'; -KW366 : 'KW' '366'; -KW367 : 'KW' '367'; -KW368 : 'KW' '368'; -KW369 : 'KW' '369'; -KW370 : 'KW' '370'; -KW371 : 'KW' '371'; -KW372 : 'KW' '372'; -KW373 : 'KW' '373'; -KW374 : 'KW' '374'; -KW375 : 'KW' '375'; -KW376 : 'KW' '376'; -KW377 : 'KW' '377'; -KW378 : 'KW' '378'; -KW379 : 'KW' '379'; -KW380 : 'KW' '380'; -KW381 : 'KW' '381'; -KW382 : 'KW' '382'; -KW383 : 'KW' '383'; -KW384 : 'KW' '384'; -KW385 : 'KW' '385'; -KW386 : 'KW' '386'; -KW387 : 'KW' '387'; -KW388 : 'KW' '388'; -KW389 : 'KW' '389'; -KW390 : 'KW' '390'; -KW391 : 'KW' '391'; -KW392 : 'KW' '392'; -KW393 : 'KW' '393'; -KW394 : 'KW' '394'; -KW395 : 'KW' '395'; -KW396 : 'KW' '396'; -KW397 : 'KW' '397'; -KW398 : 'KW' '398'; -KW399 : 'KW' '399'; -KW400 : 'KW' '400'; -KW401 : 'KW' '401'; -KW402 : 'KW' '402'; -KW403 : 'KW' '403'; -KW404 : 'KW' '404'; -KW405 : 'KW' '405'; -KW406 : 'KW' '406'; -KW407 : 'KW' '407'; -KW408 : 'KW' '408'; -KW409 : 'KW' '409'; -KW410 : 'KW' '410'; -KW411 : 'KW' '411'; -KW412 : 'KW' '412'; -KW413 : 'KW' '413'; -KW414 : 'KW' '414'; -KW415 : 'KW' '415'; -KW416 : 'KW' '416'; -KW417 : 'KW' '417'; -KW418 : 'KW' '418'; -KW419 : 'KW' '419'; -KW420 : 'KW' '420'; -KW421 : 'KW' '421'; -KW422 : 'KW' '422'; -KW423 : 'KW' '423'; -KW424 : 'KW' '424'; -KW425 : 'KW' '425'; -KW426 : 'KW' '426'; -KW427 : 'KW' '427'; -KW428 : 'KW' '428'; -KW429 : 'KW' '429'; -KW430 : 'KW' '430'; -KW431 : 'KW' '431'; -KW432 : 'KW' '432'; -KW433 : 'KW' '433'; -KW434 : 'KW' '434'; -KW435 : 'KW' '435'; -KW436 : 'KW' '436'; -KW437 : 'KW' '437'; -KW438 : 'KW' '438'; -KW439 : 'KW' '439'; -KW440 : 'KW' '440'; -KW441 : 'KW' '441'; -KW442 : 'KW' '442'; -KW443 : 'KW' '443'; -KW444 : 'KW' '444'; -KW445 : 'KW' '445'; -KW446 : 'KW' '446'; -KW447 : 'KW' '447'; -KW448 : 'KW' '448'; -KW449 : 'KW' '449'; -KW450 : 'KW' '450'; -KW451 : 'KW' '451'; -KW452 : 'KW' '452'; -KW453 : 'KW' '453'; -KW454 : 'KW' '454'; -KW455 : 'KW' '455'; -KW456 : 'KW' '456'; -KW457 : 'KW' '457'; -KW458 : 'KW' '458'; -KW459 : 'KW' '459'; -KW460 : 'KW' '460'; -KW461 : 'KW' '461'; -KW462 : 'KW' '462'; -KW463 : 'KW' '463'; -KW464 : 'KW' '464'; -KW465 : 'KW' '465'; -KW466 : 'KW' '466'; -KW467 : 'KW' '467'; -KW468 : 'KW' '468'; -KW469 : 'KW' '469'; -KW470 : 'KW' '470'; -KW471 : 'KW' '471'; -KW472 : 'KW' '472'; -KW473 : 'KW' '473'; -KW474 : 'KW' '474'; -KW475 : 'KW' '475'; -KW476 : 'KW' '476'; -KW477 : 'KW' '477'; -KW478 : 'KW' '478'; -KW479 : 'KW' '479'; -KW480 : 'KW' '480'; -KW481 : 'KW' '481'; -KW482 : 'KW' '482'; -KW483 : 'KW' '483'; -KW484 : 'KW' '484'; -KW485 : 'KW' '485'; -KW486 : 'KW' '486'; -KW487 : 'KW' '487'; -KW488 : 'KW' '488'; -KW489 : 'KW' '489'; -KW490 : 'KW' '490'; -KW491 : 'KW' '491'; -KW492 : 'KW' '492'; -KW493 : 'KW' '493'; -KW494 : 'KW' '494'; -KW495 : 'KW' '495'; -KW496 : 'KW' '496'; -KW497 : 'KW' '497'; -KW498 : 'KW' '498'; -KW499 : 'KW' '499'; -KW500 : 'KW' '500'; -KW501 : 'KW' '501'; -KW502 : 'KW' '502'; -KW503 : 'KW' '503'; -KW504 : 'KW' '504'; -KW505 : 'KW' '505'; -KW506 : 'KW' '506'; -KW507 : 'KW' '507'; -KW508 : 'KW' '508'; -KW509 : 'KW' '509'; -KW510 : 'KW' '510'; -KW511 : 'KW' '511'; -KW512 : 'KW' '512'; -KW513 : 'KW' '513'; -KW514 : 'KW' '514'; -KW515 : 'KW' '515'; -KW516 : 'KW' '516'; -KW517 : 'KW' '517'; -KW518 : 'KW' '518'; -KW519 : 'KW' '519'; -KW520 : 'KW' '520'; -KW521 : 'KW' '521'; -KW522 : 'KW' '522'; -KW523 : 'KW' '523'; -KW524 : 'KW' '524'; -KW525 : 'KW' '525'; -KW526 : 'KW' '526'; -KW527 : 'KW' '527'; -KW528 : 'KW' '528'; -KW529 : 'KW' '529'; -KW530 : 'KW' '530'; -KW531 : 'KW' '531'; -KW532 : 'KW' '532'; -KW533 : 'KW' '533'; -KW534 : 'KW' '534'; -KW535 : 'KW' '535'; -KW536 : 'KW' '536'; -KW537 : 'KW' '537'; -KW538 : 'KW' '538'; -KW539 : 'KW' '539'; -KW540 : 'KW' '540'; -KW541 : 'KW' '541'; -KW542 : 'KW' '542'; -KW543 : 'KW' '543'; -KW544 : 'KW' '544'; -KW545 : 'KW' '545'; -KW546 : 'KW' '546'; -KW547 : 'KW' '547'; -KW548 : 'KW' '548'; -KW549 : 'KW' '549'; -KW550 : 'KW' '550'; -KW551 : 'KW' '551'; -KW552 : 'KW' '552'; -KW553 : 'KW' '553'; -KW554 : 'KW' '554'; -KW555 : 'KW' '555'; -KW556 : 'KW' '556'; -KW557 : 'KW' '557'; -KW558 : 'KW' '558'; -KW559 : 'KW' '559'; -KW560 : 'KW' '560'; -KW561 : 'KW' '561'; -KW562 : 'KW' '562'; -KW563 : 'KW' '563'; -KW564 : 'KW' '564'; -KW565 : 'KW' '565'; -KW566 : 'KW' '566'; -KW567 : 'KW' '567'; -KW568 : 'KW' '568'; -KW569 : 'KW' '569'; -KW570 : 'KW' '570'; -KW571 : 'KW' '571'; -KW572 : 'KW' '572'; -KW573 : 'KW' '573'; -KW574 : 'KW' '574'; -KW575 : 'KW' '575'; -KW576 : 'KW' '576'; -KW577 : 'KW' '577'; -KW578 : 'KW' '578'; -KW579 : 'KW' '579'; -KW580 : 'KW' '580'; -KW581 : 'KW' '581'; -KW582 : 'KW' '582'; -KW583 : 'KW' '583'; -KW584 : 'KW' '584'; -KW585 : 'KW' '585'; -KW586 : 'KW' '586'; -KW587 : 'KW' '587'; -KW588 : 'KW' '588'; -KW589 : 'KW' '589'; -KW590 : 'KW' '590'; -KW591 : 'KW' '591'; -KW592 : 'KW' '592'; -KW593 : 'KW' '593'; -KW594 : 'KW' '594'; -KW595 : 'KW' '595'; -KW596 : 'KW' '596'; -KW597 : 'KW' '597'; -KW598 : 'KW' '598'; -KW599 : 'KW' '599'; -KW600 : 'KW' '600'; -KW601 : 'KW' '601'; -KW602 : 'KW' '602'; -KW603 : 'KW' '603'; -KW604 : 'KW' '604'; -KW605 : 'KW' '605'; -KW606 : 'KW' '606'; -KW607 : 'KW' '607'; -KW608 : 'KW' '608'; -KW609 : 'KW' '609'; -KW610 : 'KW' '610'; -KW611 : 'KW' '611'; -KW612 : 'KW' '612'; -KW613 : 'KW' '613'; -KW614 : 'KW' '614'; -KW615 : 'KW' '615'; -KW616 : 'KW' '616'; -KW617 : 'KW' '617'; -KW618 : 'KW' '618'; -KW619 : 'KW' '619'; -KW620 : 'KW' '620'; -KW621 : 'KW' '621'; -KW622 : 'KW' '622'; -KW623 : 'KW' '623'; -KW624 : 'KW' '624'; -KW625 : 'KW' '625'; -KW626 : 'KW' '626'; -KW627 : 'KW' '627'; -KW628 : 'KW' '628'; -KW629 : 'KW' '629'; -KW630 : 'KW' '630'; -KW631 : 'KW' '631'; -KW632 : 'KW' '632'; -KW633 : 'KW' '633'; -KW634 : 'KW' '634'; -KW635 : 'KW' '635'; -KW636 : 'KW' '636'; -KW637 : 'KW' '637'; -KW638 : 'KW' '638'; -KW639 : 'KW' '639'; -KW640 : 'KW' '640'; -KW641 : 'KW' '641'; -KW642 : 'KW' '642'; -KW643 : 'KW' '643'; -KW644 : 'KW' '644'; -KW645 : 'KW' '645'; -KW646 : 'KW' '646'; -KW647 : 'KW' '647'; -KW648 : 'KW' '648'; -KW649 : 'KW' '649'; -KW650 : 'KW' '650'; -KW651 : 'KW' '651'; -KW652 : 'KW' '652'; -KW653 : 'KW' '653'; -KW654 : 'KW' '654'; -KW655 : 'KW' '655'; -KW656 : 'KW' '656'; -KW657 : 'KW' '657'; -KW658 : 'KW' '658'; -KW659 : 'KW' '659'; -KW660 : 'KW' '660'; -KW661 : 'KW' '661'; -KW662 : 'KW' '662'; -KW663 : 'KW' '663'; -KW664 : 'KW' '664'; -KW665 : 'KW' '665'; -KW666 : 'KW' '666'; -KW667 : 'KW' '667'; -KW668 : 'KW' '668'; -KW669 : 'KW' '669'; -KW670 : 'KW' '670'; -KW671 : 'KW' '671'; -KW672 : 'KW' '672'; -KW673 : 'KW' '673'; -KW674 : 'KW' '674'; -KW675 : 'KW' '675'; -KW676 : 'KW' '676'; -KW677 : 'KW' '677'; -KW678 : 'KW' '678'; -KW679 : 'KW' '679'; -KW680 : 'KW' '680'; -KW681 : 'KW' '681'; -KW682 : 'KW' '682'; -KW683 : 'KW' '683'; -KW684 : 'KW' '684'; -KW685 : 'KW' '685'; -KW686 : 'KW' '686'; -KW687 : 'KW' '687'; -KW688 : 'KW' '688'; -KW689 : 'KW' '689'; -KW690 : 'KW' '690'; -KW691 : 'KW' '691'; -KW692 : 'KW' '692'; -KW693 : 'KW' '693'; -KW694 : 'KW' '694'; -KW695 : 'KW' '695'; -KW696 : 'KW' '696'; -KW697 : 'KW' '697'; -KW698 : 'KW' '698'; -KW699 : 'KW' '699'; -KW700 : 'KW' '700'; -KW701 : 'KW' '701'; -KW702 : 'KW' '702'; -KW703 : 'KW' '703'; -KW704 : 'KW' '704'; -KW705 : 'KW' '705'; -KW706 : 'KW' '706'; -KW707 : 'KW' '707'; -KW708 : 'KW' '708'; -KW709 : 'KW' '709'; -KW710 : 'KW' '710'; -KW711 : 'KW' '711'; -KW712 : 'KW' '712'; -KW713 : 'KW' '713'; -KW714 : 'KW' '714'; -KW715 : 'KW' '715'; -KW716 : 'KW' '716'; -KW717 : 'KW' '717'; -KW718 : 'KW' '718'; -KW719 : 'KW' '719'; -KW720 : 'KW' '720'; -KW721 : 'KW' '721'; -KW722 : 'KW' '722'; -KW723 : 'KW' '723'; -KW724 : 'KW' '724'; -KW725 : 'KW' '725'; -KW726 : 'KW' '726'; -KW727 : 'KW' '727'; -KW728 : 'KW' '728'; -KW729 : 'KW' '729'; -KW730 : 'KW' '730'; -KW731 : 'KW' '731'; -KW732 : 'KW' '732'; -KW733 : 'KW' '733'; -KW734 : 'KW' '734'; -KW735 : 'KW' '735'; -KW736 : 'KW' '736'; -KW737 : 'KW' '737'; -KW738 : 'KW' '738'; -KW739 : 'KW' '739'; -KW740 : 'KW' '740'; -KW741 : 'KW' '741'; -KW742 : 'KW' '742'; -KW743 : 'KW' '743'; -KW744 : 'KW' '744'; -KW745 : 'KW' '745'; -KW746 : 'KW' '746'; -KW747 : 'KW' '747'; -KW748 : 'KW' '748'; -KW749 : 'KW' '749'; -KW750 : 'KW' '750'; -KW751 : 'KW' '751'; -KW752 : 'KW' '752'; -KW753 : 'KW' '753'; -KW754 : 'KW' '754'; -KW755 : 'KW' '755'; -KW756 : 'KW' '756'; -KW757 : 'KW' '757'; -KW758 : 'KW' '758'; -KW759 : 'KW' '759'; -KW760 : 'KW' '760'; -KW761 : 'KW' '761'; -KW762 : 'KW' '762'; -KW763 : 'KW' '763'; -KW764 : 'KW' '764'; -KW765 : 'KW' '765'; -KW766 : 'KW' '766'; -KW767 : 'KW' '767'; -KW768 : 'KW' '768'; -KW769 : 'KW' '769'; -KW770 : 'KW' '770'; -KW771 : 'KW' '771'; -KW772 : 'KW' '772'; -KW773 : 'KW' '773'; -KW774 : 'KW' '774'; -KW775 : 'KW' '775'; -KW776 : 'KW' '776'; -KW777 : 'KW' '777'; -KW778 : 'KW' '778'; -KW779 : 'KW' '779'; -KW780 : 'KW' '780'; -KW781 : 'KW' '781'; -KW782 : 'KW' '782'; -KW783 : 'KW' '783'; -KW784 : 'KW' '784'; -KW785 : 'KW' '785'; -KW786 : 'KW' '786'; -KW787 : 'KW' '787'; -KW788 : 'KW' '788'; -KW789 : 'KW' '789'; -KW790 : 'KW' '790'; -KW791 : 'KW' '791'; -KW792 : 'KW' '792'; -KW793 : 'KW' '793'; -KW794 : 'KW' '794'; -KW795 : 'KW' '795'; -KW796 : 'KW' '796'; -KW797 : 'KW' '797'; -KW798 : 'KW' '798'; -KW799 : 'KW' '799'; -KW800 : 'KW' '800'; -KW801 : 'KW' '801'; -KW802 : 'KW' '802'; -KW803 : 'KW' '803'; -KW804 : 'KW' '804'; -KW805 : 'KW' '805'; -KW806 : 'KW' '806'; -KW807 : 'KW' '807'; -KW808 : 'KW' '808'; -KW809 : 'KW' '809'; -KW810 : 'KW' '810'; -KW811 : 'KW' '811'; -KW812 : 'KW' '812'; -KW813 : 'KW' '813'; -KW814 : 'KW' '814'; -KW815 : 'KW' '815'; -KW816 : 'KW' '816'; -KW817 : 'KW' '817'; -KW818 : 'KW' '818'; -KW819 : 'KW' '819'; -KW820 : 'KW' '820'; -KW821 : 'KW' '821'; -KW822 : 'KW' '822'; -KW823 : 'KW' '823'; -KW824 : 'KW' '824'; -KW825 : 'KW' '825'; -KW826 : 'KW' '826'; -KW827 : 'KW' '827'; -KW828 : 'KW' '828'; -KW829 : 'KW' '829'; -KW830 : 'KW' '830'; -KW831 : 'KW' '831'; -KW832 : 'KW' '832'; -KW833 : 'KW' '833'; -KW834 : 'KW' '834'; -KW835 : 'KW' '835'; -KW836 : 'KW' '836'; -KW837 : 'KW' '837'; -KW838 : 'KW' '838'; -KW839 : 'KW' '839'; -KW840 : 'KW' '840'; -KW841 : 'KW' '841'; -KW842 : 'KW' '842'; -KW843 : 'KW' '843'; -KW844 : 'KW' '844'; -KW845 : 'KW' '845'; -KW846 : 'KW' '846'; -KW847 : 'KW' '847'; -KW848 : 'KW' '848'; -KW849 : 'KW' '849'; -KW850 : 'KW' '850'; -KW851 : 'KW' '851'; -KW852 : 'KW' '852'; -KW853 : 'KW' '853'; -KW854 : 'KW' '854'; -KW855 : 'KW' '855'; -KW856 : 'KW' '856'; -KW857 : 'KW' '857'; -KW858 : 'KW' '858'; -KW859 : 'KW' '859'; -KW860 : 'KW' '860'; -KW861 : 'KW' '861'; -KW862 : 'KW' '862'; -KW863 : 'KW' '863'; -KW864 : 'KW' '864'; -KW865 : 'KW' '865'; -KW866 : 'KW' '866'; -KW867 : 'KW' '867'; -KW868 : 'KW' '868'; -KW869 : 'KW' '869'; -KW870 : 'KW' '870'; -KW871 : 'KW' '871'; -KW872 : 'KW' '872'; -KW873 : 'KW' '873'; -KW874 : 'KW' '874'; -KW875 : 'KW' '875'; -KW876 : 'KW' '876'; -KW877 : 'KW' '877'; -KW878 : 'KW' '878'; -KW879 : 'KW' '879'; -KW880 : 'KW' '880'; -KW881 : 'KW' '881'; -KW882 : 'KW' '882'; -KW883 : 'KW' '883'; -KW884 : 'KW' '884'; -KW885 : 'KW' '885'; -KW886 : 'KW' '886'; -KW887 : 'KW' '887'; -KW888 : 'KW' '888'; -KW889 : 'KW' '889'; -KW890 : 'KW' '890'; -KW891 : 'KW' '891'; -KW892 : 'KW' '892'; -KW893 : 'KW' '893'; -KW894 : 'KW' '894'; -KW895 : 'KW' '895'; -KW896 : 'KW' '896'; -KW897 : 'KW' '897'; -KW898 : 'KW' '898'; -KW899 : 'KW' '899'; -KW900 : 'KW' '900'; -KW901 : 'KW' '901'; -KW902 : 'KW' '902'; -KW903 : 'KW' '903'; -KW904 : 'KW' '904'; -KW905 : 'KW' '905'; -KW906 : 'KW' '906'; -KW907 : 'KW' '907'; -KW908 : 'KW' '908'; -KW909 : 'KW' '909'; -KW910 : 'KW' '910'; -KW911 : 'KW' '911'; -KW912 : 'KW' '912'; -KW913 : 'KW' '913'; -KW914 : 'KW' '914'; -KW915 : 'KW' '915'; -KW916 : 'KW' '916'; -KW917 : 'KW' '917'; -KW918 : 'KW' '918'; -KW919 : 'KW' '919'; -KW920 : 'KW' '920'; -KW921 : 'KW' '921'; -KW922 : 'KW' '922'; -KW923 : 'KW' '923'; -KW924 : 'KW' '924'; -KW925 : 'KW' '925'; -KW926 : 'KW' '926'; -KW927 : 'KW' '927'; -KW928 : 'KW' '928'; -KW929 : 'KW' '929'; -KW930 : 'KW' '930'; -KW931 : 'KW' '931'; -KW932 : 'KW' '932'; -KW933 : 'KW' '933'; -KW934 : 'KW' '934'; -KW935 : 'KW' '935'; -KW936 : 'KW' '936'; -KW937 : 'KW' '937'; -KW938 : 'KW' '938'; -KW939 : 'KW' '939'; -KW940 : 'KW' '940'; -KW941 : 'KW' '941'; -KW942 : 'KW' '942'; -KW943 : 'KW' '943'; -KW944 : 'KW' '944'; -KW945 : 'KW' '945'; -KW946 : 'KW' '946'; -KW947 : 'KW' '947'; -KW948 : 'KW' '948'; -KW949 : 'KW' '949'; -KW950 : 'KW' '950'; -KW951 : 'KW' '951'; -KW952 : 'KW' '952'; -KW953 : 'KW' '953'; -KW954 : 'KW' '954'; -KW955 : 'KW' '955'; -KW956 : 'KW' '956'; -KW957 : 'KW' '957'; -KW958 : 'KW' '958'; -KW959 : 'KW' '959'; -KW960 : 'KW' '960'; -KW961 : 'KW' '961'; -KW962 : 'KW' '962'; -KW963 : 'KW' '963'; -KW964 : 'KW' '964'; -KW965 : 'KW' '965'; -KW966 : 'KW' '966'; -KW967 : 'KW' '967'; -KW968 : 'KW' '968'; -KW969 : 'KW' '969'; -KW970 : 'KW' '970'; -KW971 : 'KW' '971'; -KW972 : 'KW' '972'; -KW973 : 'KW' '973'; -KW974 : 'KW' '974'; -KW975 : 'KW' '975'; -KW976 : 'KW' '976'; -KW977 : 'KW' '977'; -KW978 : 'KW' '978'; -KW979 : 'KW' '979'; -KW980 : 'KW' '980'; -KW981 : 'KW' '981'; -KW982 : 'KW' '982'; -KW983 : 'KW' '983'; -KW984 : 'KW' '984'; -KW985 : 'KW' '985'; -KW986 : 'KW' '986'; -KW987 : 'KW' '987'; -KW988 : 'KW' '988'; -KW989 : 'KW' '989'; -KW990 : 'KW' '990'; -KW991 : 'KW' '991'; -KW992 : 'KW' '992'; -KW993 : 'KW' '993'; -KW994 : 'KW' '994'; -KW995 : 'KW' '995'; -KW996 : 'KW' '996'; -KW997 : 'KW' '997'; -KW998 : 'KW' '998'; -KW999 : 'KW' '999'; -KW1000 : 'KW' '1000'; -KW1001 : 'KW' '1001'; -KW1002 : 'KW' '1002'; -KW1003 : 'KW' '1003'; -KW1004 : 'KW' '1004'; -KW1005 : 'KW' '1005'; -KW1006 : 'KW' '1006'; -KW1007 : 'KW' '1007'; -KW1008 : 'KW' '1008'; -KW1009 : 'KW' '1009'; -KW1010 : 'KW' '1010'; -KW1011 : 'KW' '1011'; -KW1012 : 'KW' '1012'; -KW1013 : 'KW' '1013'; -KW1014 : 'KW' '1014'; -KW1015 : 'KW' '1015'; -KW1016 : 'KW' '1016'; -KW1017 : 'KW' '1017'; -KW1018 : 'KW' '1018'; -KW1019 : 'KW' '1019'; -KW1020 : 'KW' '1020'; -KW1021 : 'KW' '1021'; -KW1022 : 'KW' '1022'; -KW1023 : 'KW' '1023'; -KW1024 : 'KW' '1024'; -KW1025 : 'KW' '1025'; -KW1026 : 'KW' '1026'; -KW1027 : 'KW' '1027'; -KW1028 : 'KW' '1028'; -KW1029 : 'KW' '1029'; -KW1030 : 'KW' '1030'; -KW1031 : 'KW' '1031'; -KW1032 : 'KW' '1032'; -KW1033 : 'KW' '1033'; -KW1034 : 'KW' '1034'; -KW1035 : 'KW' '1035'; -KW1036 : 'KW' '1036'; -KW1037 : 'KW' '1037'; -KW1038 : 'KW' '1038'; -KW1039 : 'KW' '1039'; -KW1040 : 'KW' '1040'; -KW1041 : 'KW' '1041'; -KW1042 : 'KW' '1042'; -KW1043 : 'KW' '1043'; -KW1044 : 'KW' '1044'; -KW1045 : 'KW' '1045'; -KW1046 : 'KW' '1046'; -KW1047 : 'KW' '1047'; -KW1048 : 'KW' '1048'; -KW1049 : 'KW' '1049'; -KW1050 : 'KW' '1050'; -KW1051 : 'KW' '1051'; -KW1052 : 'KW' '1052'; -KW1053 : 'KW' '1053'; -KW1054 : 'KW' '1054'; -KW1055 : 'KW' '1055'; -KW1056 : 'KW' '1056'; -KW1057 : 'KW' '1057'; -KW1058 : 'KW' '1058'; -KW1059 : 'KW' '1059'; -KW1060 : 'KW' '1060'; -KW1061 : 'KW' '1061'; -KW1062 : 'KW' '1062'; -KW1063 : 'KW' '1063'; -KW1064 : 'KW' '1064'; -KW1065 : 'KW' '1065'; -KW1066 : 'KW' '1066'; -KW1067 : 'KW' '1067'; -KW1068 : 'KW' '1068'; -KW1069 : 'KW' '1069'; -KW1070 : 'KW' '1070'; -KW1071 : 'KW' '1071'; -KW1072 : 'KW' '1072'; -KW1073 : 'KW' '1073'; -KW1074 : 'KW' '1074'; -KW1075 : 'KW' '1075'; -KW1076 : 'KW' '1076'; -KW1077 : 'KW' '1077'; -KW1078 : 'KW' '1078'; -KW1079 : 'KW' '1079'; -KW1080 : 'KW' '1080'; -KW1081 : 'KW' '1081'; -KW1082 : 'KW' '1082'; -KW1083 : 'KW' '1083'; -KW1084 : 'KW' '1084'; -KW1085 : 'KW' '1085'; -KW1086 : 'KW' '1086'; -KW1087 : 'KW' '1087'; -KW1088 : 'KW' '1088'; -KW1089 : 'KW' '1089'; -KW1090 : 'KW' '1090'; -KW1091 : 'KW' '1091'; -KW1092 : 'KW' '1092'; -KW1093 : 'KW' '1093'; -KW1094 : 'KW' '1094'; -KW1095 : 'KW' '1095'; -KW1096 : 'KW' '1096'; -KW1097 : 'KW' '1097'; -KW1098 : 'KW' '1098'; -KW1099 : 'KW' '1099'; -KW1100 : 'KW' '1100'; -KW1101 : 'KW' '1101'; -KW1102 : 'KW' '1102'; -KW1103 : 'KW' '1103'; -KW1104 : 'KW' '1104'; -KW1105 : 'KW' '1105'; -KW1106 : 'KW' '1106'; -KW1107 : 'KW' '1107'; -KW1108 : 'KW' '1108'; -KW1109 : 'KW' '1109'; -KW1110 : 'KW' '1110'; -KW1111 : 'KW' '1111'; -KW1112 : 'KW' '1112'; -KW1113 : 'KW' '1113'; -KW1114 : 'KW' '1114'; -KW1115 : 'KW' '1115'; -KW1116 : 'KW' '1116'; -KW1117 : 'KW' '1117'; -KW1118 : 'KW' '1118'; -KW1119 : 'KW' '1119'; -KW1120 : 'KW' '1120'; -KW1121 : 'KW' '1121'; -KW1122 : 'KW' '1122'; -KW1123 : 'KW' '1123'; -KW1124 : 'KW' '1124'; -KW1125 : 'KW' '1125'; -KW1126 : 'KW' '1126'; -KW1127 : 'KW' '1127'; -KW1128 : 'KW' '1128'; -KW1129 : 'KW' '1129'; -KW1130 : 'KW' '1130'; -KW1131 : 'KW' '1131'; -KW1132 : 'KW' '1132'; -KW1133 : 'KW' '1133'; -KW1134 : 'KW' '1134'; -KW1135 : 'KW' '1135'; -KW1136 : 'KW' '1136'; -KW1137 : 'KW' '1137'; -KW1138 : 'KW' '1138'; -KW1139 : 'KW' '1139'; -KW1140 : 'KW' '1140'; -KW1141 : 'KW' '1141'; -KW1142 : 'KW' '1142'; -KW1143 : 'KW' '1143'; -KW1144 : 'KW' '1144'; -KW1145 : 'KW' '1145'; -KW1146 : 'KW' '1146'; -KW1147 : 'KW' '1147'; -KW1148 : 'KW' '1148'; -KW1149 : 'KW' '1149'; -KW1150 : 'KW' '1150'; -KW1151 : 'KW' '1151'; -KW1152 : 'KW' '1152'; -KW1153 : 'KW' '1153'; -KW1154 : 'KW' '1154'; -KW1155 : 'KW' '1155'; -KW1156 : 'KW' '1156'; -KW1157 : 'KW' '1157'; -KW1158 : 'KW' '1158'; -KW1159 : 'KW' '1159'; -KW1160 : 'KW' '1160'; -KW1161 : 'KW' '1161'; -KW1162 : 'KW' '1162'; -KW1163 : 'KW' '1163'; -KW1164 : 'KW' '1164'; -KW1165 : 'KW' '1165'; -KW1166 : 'KW' '1166'; -KW1167 : 'KW' '1167'; -KW1168 : 'KW' '1168'; -KW1169 : 'KW' '1169'; -KW1170 : 'KW' '1170'; -KW1171 : 'KW' '1171'; -KW1172 : 'KW' '1172'; -KW1173 : 'KW' '1173'; -KW1174 : 'KW' '1174'; -KW1175 : 'KW' '1175'; -KW1176 : 'KW' '1176'; -KW1177 : 'KW' '1177'; -KW1178 : 'KW' '1178'; -KW1179 : 'KW' '1179'; -KW1180 : 'KW' '1180'; -KW1181 : 'KW' '1181'; -KW1182 : 'KW' '1182'; -KW1183 : 'KW' '1183'; -KW1184 : 'KW' '1184'; -KW1185 : 'KW' '1185'; -KW1186 : 'KW' '1186'; -KW1187 : 'KW' '1187'; -KW1188 : 'KW' '1188'; -KW1189 : 'KW' '1189'; -KW1190 : 'KW' '1190'; -KW1191 : 'KW' '1191'; -KW1192 : 'KW' '1192'; -KW1193 : 'KW' '1193'; -KW1194 : 'KW' '1194'; -KW1195 : 'KW' '1195'; -KW1196 : 'KW' '1196'; -KW1197 : 'KW' '1197'; -KW1198 : 'KW' '1198'; -KW1199 : 'KW' '1199'; -KW1200 : 'KW' '1200'; -KW1201 : 'KW' '1201'; -KW1202 : 'KW' '1202'; -KW1203 : 'KW' '1203'; -KW1204 : 'KW' '1204'; -KW1205 : 'KW' '1205'; -KW1206 : 'KW' '1206'; -KW1207 : 'KW' '1207'; -KW1208 : 'KW' '1208'; -KW1209 : 'KW' '1209'; -KW1210 : 'KW' '1210'; -KW1211 : 'KW' '1211'; -KW1212 : 'KW' '1212'; -KW1213 : 'KW' '1213'; -KW1214 : 'KW' '1214'; -KW1215 : 'KW' '1215'; -KW1216 : 'KW' '1216'; -KW1217 : 'KW' '1217'; -KW1218 : 'KW' '1218'; -KW1219 : 'KW' '1219'; -KW1220 : 'KW' '1220'; -KW1221 : 'KW' '1221'; -KW1222 : 'KW' '1222'; -KW1223 : 'KW' '1223'; -KW1224 : 'KW' '1224'; -KW1225 : 'KW' '1225'; -KW1226 : 'KW' '1226'; -KW1227 : 'KW' '1227'; -KW1228 : 'KW' '1228'; -KW1229 : 'KW' '1229'; -KW1230 : 'KW' '1230'; -KW1231 : 'KW' '1231'; -KW1232 : 'KW' '1232'; -KW1233 : 'KW' '1233'; -KW1234 : 'KW' '1234'; -KW1235 : 'KW' '1235'; -KW1236 : 'KW' '1236'; -KW1237 : 'KW' '1237'; -KW1238 : 'KW' '1238'; -KW1239 : 'KW' '1239'; -KW1240 : 'KW' '1240'; -KW1241 : 'KW' '1241'; -KW1242 : 'KW' '1242'; -KW1243 : 'KW' '1243'; -KW1244 : 'KW' '1244'; -KW1245 : 'KW' '1245'; -KW1246 : 'KW' '1246'; -KW1247 : 'KW' '1247'; -KW1248 : 'KW' '1248'; -KW1249 : 'KW' '1249'; -KW1250 : 'KW' '1250'; -KW1251 : 'KW' '1251'; -KW1252 : 'KW' '1252'; -KW1253 : 'KW' '1253'; -KW1254 : 'KW' '1254'; -KW1255 : 'KW' '1255'; -KW1256 : 'KW' '1256'; -KW1257 : 'KW' '1257'; -KW1258 : 'KW' '1258'; -KW1259 : 'KW' '1259'; -KW1260 : 'KW' '1260'; -KW1261 : 'KW' '1261'; -KW1262 : 'KW' '1262'; -KW1263 : 'KW' '1263'; -KW1264 : 'KW' '1264'; -KW1265 : 'KW' '1265'; -KW1266 : 'KW' '1266'; -KW1267 : 'KW' '1267'; -KW1268 : 'KW' '1268'; -KW1269 : 'KW' '1269'; -KW1270 : 'KW' '1270'; -KW1271 : 'KW' '1271'; -KW1272 : 'KW' '1272'; -KW1273 : 'KW' '1273'; -KW1274 : 'KW' '1274'; -KW1275 : 'KW' '1275'; -KW1276 : 'KW' '1276'; -KW1277 : 'KW' '1277'; -KW1278 : 'KW' '1278'; -KW1279 : 'KW' '1279'; -KW1280 : 'KW' '1280'; -KW1281 : 'KW' '1281'; -KW1282 : 'KW' '1282'; -KW1283 : 'KW' '1283'; -KW1284 : 'KW' '1284'; -KW1285 : 'KW' '1285'; -KW1286 : 'KW' '1286'; -KW1287 : 'KW' '1287'; -KW1288 : 'KW' '1288'; -KW1289 : 'KW' '1289'; -KW1290 : 'KW' '1290'; -KW1291 : 'KW' '1291'; -KW1292 : 'KW' '1292'; -KW1293 : 'KW' '1293'; -KW1294 : 'KW' '1294'; -KW1295 : 'KW' '1295'; -KW1296 : 'KW' '1296'; -KW1297 : 'KW' '1297'; -KW1298 : 'KW' '1298'; -KW1299 : 'KW' '1299'; -KW1300 : 'KW' '1300'; -KW1301 : 'KW' '1301'; -KW1302 : 'KW' '1302'; -KW1303 : 'KW' '1303'; -KW1304 : 'KW' '1304'; -KW1305 : 'KW' '1305'; -KW1306 : 'KW' '1306'; -KW1307 : 'KW' '1307'; -KW1308 : 'KW' '1308'; -KW1309 : 'KW' '1309'; -KW1310 : 'KW' '1310'; -KW1311 : 'KW' '1311'; -KW1312 : 'KW' '1312'; -KW1313 : 'KW' '1313'; -KW1314 : 'KW' '1314'; -KW1315 : 'KW' '1315'; -KW1316 : 'KW' '1316'; -KW1317 : 'KW' '1317'; -KW1318 : 'KW' '1318'; -KW1319 : 'KW' '1319'; -KW1320 : 'KW' '1320'; -KW1321 : 'KW' '1321'; -KW1322 : 'KW' '1322'; -KW1323 : 'KW' '1323'; -KW1324 : 'KW' '1324'; -KW1325 : 'KW' '1325'; -KW1326 : 'KW' '1326'; -KW1327 : 'KW' '1327'; -KW1328 : 'KW' '1328'; -KW1329 : 'KW' '1329'; -KW1330 : 'KW' '1330'; -KW1331 : 'KW' '1331'; -KW1332 : 'KW' '1332'; -KW1333 : 'KW' '1333'; -KW1334 : 'KW' '1334'; -KW1335 : 'KW' '1335'; -KW1336 : 'KW' '1336'; -KW1337 : 'KW' '1337'; -KW1338 : 'KW' '1338'; -KW1339 : 'KW' '1339'; -KW1340 : 'KW' '1340'; -KW1341 : 'KW' '1341'; -KW1342 : 'KW' '1342'; -KW1343 : 'KW' '1343'; -KW1344 : 'KW' '1344'; -KW1345 : 'KW' '1345'; -KW1346 : 'KW' '1346'; -KW1347 : 'KW' '1347'; -KW1348 : 'KW' '1348'; -KW1349 : 'KW' '1349'; -KW1350 : 'KW' '1350'; -KW1351 : 'KW' '1351'; -KW1352 : 'KW' '1352'; -KW1353 : 'KW' '1353'; -KW1354 : 'KW' '1354'; -KW1355 : 'KW' '1355'; -KW1356 : 'KW' '1356'; -KW1357 : 'KW' '1357'; -KW1358 : 'KW' '1358'; -KW1359 : 'KW' '1359'; -KW1360 : 'KW' '1360'; -KW1361 : 'KW' '1361'; -KW1362 : 'KW' '1362'; -KW1363 : 'KW' '1363'; -KW1364 : 'KW' '1364'; -KW1365 : 'KW' '1365'; -KW1366 : 'KW' '1366'; -KW1367 : 'KW' '1367'; -KW1368 : 'KW' '1368'; -KW1369 : 'KW' '1369'; -KW1370 : 'KW' '1370'; -KW1371 : 'KW' '1371'; -KW1372 : 'KW' '1372'; -KW1373 : 'KW' '1373'; -KW1374 : 'KW' '1374'; -KW1375 : 'KW' '1375'; -KW1376 : 'KW' '1376'; -KW1377 : 'KW' '1377'; -KW1378 : 'KW' '1378'; -KW1379 : 'KW' '1379'; -KW1380 : 'KW' '1380'; -KW1381 : 'KW' '1381'; -KW1382 : 'KW' '1382'; -KW1383 : 'KW' '1383'; -KW1384 : 'KW' '1384'; -KW1385 : 'KW' '1385'; -KW1386 : 'KW' '1386'; -KW1387 : 'KW' '1387'; -KW1388 : 'KW' '1388'; -KW1389 : 'KW' '1389'; -KW1390 : 'KW' '1390'; -KW1391 : 'KW' '1391'; -KW1392 : 'KW' '1392'; -KW1393 : 'KW' '1393'; -KW1394 : 'KW' '1394'; -KW1395 : 'KW' '1395'; -KW1396 : 'KW' '1396'; -KW1397 : 'KW' '1397'; -KW1398 : 'KW' '1398'; -KW1399 : 'KW' '1399'; -KW1400 : 'KW' '1400'; -KW1401 : 'KW' '1401'; -KW1402 : 'KW' '1402'; -KW1403 : 'KW' '1403'; -KW1404 : 'KW' '1404'; -KW1405 : 'KW' '1405'; -KW1406 : 'KW' '1406'; -KW1407 : 'KW' '1407'; -KW1408 : 'KW' '1408'; -KW1409 : 'KW' '1409'; -KW1410 : 'KW' '1410'; -KW1411 : 'KW' '1411'; -KW1412 : 'KW' '1412'; -KW1413 : 'KW' '1413'; -KW1414 : 'KW' '1414'; -KW1415 : 'KW' '1415'; -KW1416 : 'KW' '1416'; -KW1417 : 'KW' '1417'; -KW1418 : 'KW' '1418'; -KW1419 : 'KW' '1419'; -KW1420 : 'KW' '1420'; -KW1421 : 'KW' '1421'; -KW1422 : 'KW' '1422'; -KW1423 : 'KW' '1423'; -KW1424 : 'KW' '1424'; -KW1425 : 'KW' '1425'; -KW1426 : 'KW' '1426'; -KW1427 : 'KW' '1427'; -KW1428 : 'KW' '1428'; -KW1429 : 'KW' '1429'; -KW1430 : 'KW' '1430'; -KW1431 : 'KW' '1431'; -KW1432 : 'KW' '1432'; -KW1433 : 'KW' '1433'; -KW1434 : 'KW' '1434'; -KW1435 : 'KW' '1435'; -KW1436 : 'KW' '1436'; -KW1437 : 'KW' '1437'; -KW1438 : 'KW' '1438'; -KW1439 : 'KW' '1439'; -KW1440 : 'KW' '1440'; -KW1441 : 'KW' '1441'; -KW1442 : 'KW' '1442'; -KW1443 : 'KW' '1443'; -KW1444 : 'KW' '1444'; -KW1445 : 'KW' '1445'; -KW1446 : 'KW' '1446'; -KW1447 : 'KW' '1447'; -KW1448 : 'KW' '1448'; -KW1449 : 'KW' '1449'; -KW1450 : 'KW' '1450'; -KW1451 : 'KW' '1451'; -KW1452 : 'KW' '1452'; -KW1453 : 'KW' '1453'; -KW1454 : 'KW' '1454'; -KW1455 : 'KW' '1455'; -KW1456 : 'KW' '1456'; -KW1457 : 'KW' '1457'; -KW1458 : 'KW' '1458'; -KW1459 : 'KW' '1459'; -KW1460 : 'KW' '1460'; -KW1461 : 'KW' '1461'; -KW1462 : 'KW' '1462'; -KW1463 : 'KW' '1463'; -KW1464 : 'KW' '1464'; -KW1465 : 'KW' '1465'; -KW1466 : 'KW' '1466'; -KW1467 : 'KW' '1467'; -KW1468 : 'KW' '1468'; -KW1469 : 'KW' '1469'; -KW1470 : 'KW' '1470'; -KW1471 : 'KW' '1471'; -KW1472 : 'KW' '1472'; -KW1473 : 'KW' '1473'; -KW1474 : 'KW' '1474'; -KW1475 : 'KW' '1475'; -KW1476 : 'KW' '1476'; -KW1477 : 'KW' '1477'; -KW1478 : 'KW' '1478'; -KW1479 : 'KW' '1479'; -KW1480 : 'KW' '1480'; -KW1481 : 'KW' '1481'; -KW1482 : 'KW' '1482'; -KW1483 : 'KW' '1483'; -KW1484 : 'KW' '1484'; -KW1485 : 'KW' '1485'; -KW1486 : 'KW' '1486'; -KW1487 : 'KW' '1487'; -KW1488 : 'KW' '1488'; -KW1489 : 'KW' '1489'; -KW1490 : 'KW' '1490'; -KW1491 : 'KW' '1491'; -KW1492 : 'KW' '1492'; -KW1493 : 'KW' '1493'; -KW1494 : 'KW' '1494'; -KW1495 : 'KW' '1495'; -KW1496 : 'KW' '1496'; -KW1497 : 'KW' '1497'; -KW1498 : 'KW' '1498'; -KW1499 : 'KW' '1499'; -KW1500 : 'KW' '1500'; -KW1501 : 'KW' '1501'; -KW1502 : 'KW' '1502'; -KW1503 : 'KW' '1503'; -KW1504 : 'KW' '1504'; -KW1505 : 'KW' '1505'; -KW1506 : 'KW' '1506'; -KW1507 : 'KW' '1507'; -KW1508 : 'KW' '1508'; -KW1509 : 'KW' '1509'; -KW1510 : 'KW' '1510'; -KW1511 : 'KW' '1511'; -KW1512 : 'KW' '1512'; -KW1513 : 'KW' '1513'; -KW1514 : 'KW' '1514'; -KW1515 : 'KW' '1515'; -KW1516 : 'KW' '1516'; -KW1517 : 'KW' '1517'; -KW1518 : 'KW' '1518'; -KW1519 : 'KW' '1519'; -KW1520 : 'KW' '1520'; -KW1521 : 'KW' '1521'; -KW1522 : 'KW' '1522'; -KW1523 : 'KW' '1523'; -KW1524 : 'KW' '1524'; -KW1525 : 'KW' '1525'; -KW1526 : 'KW' '1526'; -KW1527 : 'KW' '1527'; -KW1528 : 'KW' '1528'; -KW1529 : 'KW' '1529'; -KW1530 : 'KW' '1530'; -KW1531 : 'KW' '1531'; -KW1532 : 'KW' '1532'; -KW1533 : 'KW' '1533'; -KW1534 : 'KW' '1534'; -KW1535 : 'KW' '1535'; -KW1536 : 'KW' '1536'; -KW1537 : 'KW' '1537'; -KW1538 : 'KW' '1538'; -KW1539 : 'KW' '1539'; -KW1540 : 'KW' '1540'; -KW1541 : 'KW' '1541'; -KW1542 : 'KW' '1542'; -KW1543 : 'KW' '1543'; -KW1544 : 'KW' '1544'; -KW1545 : 'KW' '1545'; -KW1546 : 'KW' '1546'; -KW1547 : 'KW' '1547'; -KW1548 : 'KW' '1548'; -KW1549 : 'KW' '1549'; -KW1550 : 'KW' '1550'; -KW1551 : 'KW' '1551'; -KW1552 : 'KW' '1552'; -KW1553 : 'KW' '1553'; -KW1554 : 'KW' '1554'; -KW1555 : 'KW' '1555'; -KW1556 : 'KW' '1556'; -KW1557 : 'KW' '1557'; -KW1558 : 'KW' '1558'; -KW1559 : 'KW' '1559'; -KW1560 : 'KW' '1560'; -KW1561 : 'KW' '1561'; -KW1562 : 'KW' '1562'; -KW1563 : 'KW' '1563'; -KW1564 : 'KW' '1564'; -KW1565 : 'KW' '1565'; -KW1566 : 'KW' '1566'; -KW1567 : 'KW' '1567'; -KW1568 : 'KW' '1568'; -KW1569 : 'KW' '1569'; -KW1570 : 'KW' '1570'; -KW1571 : 'KW' '1571'; -KW1572 : 'KW' '1572'; -KW1573 : 'KW' '1573'; -KW1574 : 'KW' '1574'; -KW1575 : 'KW' '1575'; -KW1576 : 'KW' '1576'; -KW1577 : 'KW' '1577'; -KW1578 : 'KW' '1578'; -KW1579 : 'KW' '1579'; -KW1580 : 'KW' '1580'; -KW1581 : 'KW' '1581'; -KW1582 : 'KW' '1582'; -KW1583 : 'KW' '1583'; -KW1584 : 'KW' '1584'; -KW1585 : 'KW' '1585'; -KW1586 : 'KW' '1586'; -KW1587 : 'KW' '1587'; -KW1588 : 'KW' '1588'; -KW1589 : 'KW' '1589'; -KW1590 : 'KW' '1590'; -KW1591 : 'KW' '1591'; -KW1592 : 'KW' '1592'; -KW1593 : 'KW' '1593'; -KW1594 : 'KW' '1594'; -KW1595 : 'KW' '1595'; -KW1596 : 'KW' '1596'; -KW1597 : 'KW' '1597'; -KW1598 : 'KW' '1598'; -KW1599 : 'KW' '1599'; -KW1600 : 'KW' '1600'; -KW1601 : 'KW' '1601'; -KW1602 : 'KW' '1602'; -KW1603 : 'KW' '1603'; -KW1604 : 'KW' '1604'; -KW1605 : 'KW' '1605'; -KW1606 : 'KW' '1606'; -KW1607 : 'KW' '1607'; -KW1608 : 'KW' '1608'; -KW1609 : 'KW' '1609'; -KW1610 : 'KW' '1610'; -KW1611 : 'KW' '1611'; -KW1612 : 'KW' '1612'; -KW1613 : 'KW' '1613'; -KW1614 : 'KW' '1614'; -KW1615 : 'KW' '1615'; -KW1616 : 'KW' '1616'; -KW1617 : 'KW' '1617'; -KW1618 : 'KW' '1618'; -KW1619 : 'KW' '1619'; -KW1620 : 'KW' '1620'; -KW1621 : 'KW' '1621'; -KW1622 : 'KW' '1622'; -KW1623 : 'KW' '1623'; -KW1624 : 'KW' '1624'; -KW1625 : 'KW' '1625'; -KW1626 : 'KW' '1626'; -KW1627 : 'KW' '1627'; -KW1628 : 'KW' '1628'; -KW1629 : 'KW' '1629'; -KW1630 : 'KW' '1630'; -KW1631 : 'KW' '1631'; -KW1632 : 'KW' '1632'; -KW1633 : 'KW' '1633'; -KW1634 : 'KW' '1634'; -KW1635 : 'KW' '1635'; -KW1636 : 'KW' '1636'; -KW1637 : 'KW' '1637'; -KW1638 : 'KW' '1638'; -KW1639 : 'KW' '1639'; -KW1640 : 'KW' '1640'; -KW1641 : 'KW' '1641'; -KW1642 : 'KW' '1642'; -KW1643 : 'KW' '1643'; -KW1644 : 'KW' '1644'; -KW1645 : 'KW' '1645'; -KW1646 : 'KW' '1646'; -KW1647 : 'KW' '1647'; -KW1648 : 'KW' '1648'; -KW1649 : 'KW' '1649'; -KW1650 : 'KW' '1650'; -KW1651 : 'KW' '1651'; -KW1652 : 'KW' '1652'; -KW1653 : 'KW' '1653'; -KW1654 : 'KW' '1654'; -KW1655 : 'KW' '1655'; -KW1656 : 'KW' '1656'; -KW1657 : 'KW' '1657'; -KW1658 : 'KW' '1658'; -KW1659 : 'KW' '1659'; -KW1660 : 'KW' '1660'; -KW1661 : 'KW' '1661'; -KW1662 : 'KW' '1662'; -KW1663 : 'KW' '1663'; -KW1664 : 'KW' '1664'; -KW1665 : 'KW' '1665'; -KW1666 : 'KW' '1666'; -KW1667 : 'KW' '1667'; -KW1668 : 'KW' '1668'; -KW1669 : 'KW' '1669'; -KW1670 : 'KW' '1670'; -KW1671 : 'KW' '1671'; -KW1672 : 'KW' '1672'; -KW1673 : 'KW' '1673'; -KW1674 : 'KW' '1674'; -KW1675 : 'KW' '1675'; -KW1676 : 'KW' '1676'; -KW1677 : 'KW' '1677'; -KW1678 : 'KW' '1678'; -KW1679 : 'KW' '1679'; -KW1680 : 'KW' '1680'; -KW1681 : 'KW' '1681'; -KW1682 : 'KW' '1682'; -KW1683 : 'KW' '1683'; -KW1684 : 'KW' '1684'; -KW1685 : 'KW' '1685'; -KW1686 : 'KW' '1686'; -KW1687 : 'KW' '1687'; -KW1688 : 'KW' '1688'; -KW1689 : 'KW' '1689'; -KW1690 : 'KW' '1690'; -KW1691 : 'KW' '1691'; -KW1692 : 'KW' '1692'; -KW1693 : 'KW' '1693'; -KW1694 : 'KW' '1694'; -KW1695 : 'KW' '1695'; -KW1696 : 'KW' '1696'; -KW1697 : 'KW' '1697'; -KW1698 : 'KW' '1698'; -KW1699 : 'KW' '1699'; -KW1700 : 'KW' '1700'; -KW1701 : 'KW' '1701'; -KW1702 : 'KW' '1702'; -KW1703 : 'KW' '1703'; -KW1704 : 'KW' '1704'; -KW1705 : 'KW' '1705'; -KW1706 : 'KW' '1706'; -KW1707 : 'KW' '1707'; -KW1708 : 'KW' '1708'; -KW1709 : 'KW' '1709'; -KW1710 : 'KW' '1710'; -KW1711 : 'KW' '1711'; -KW1712 : 'KW' '1712'; -KW1713 : 'KW' '1713'; -KW1714 : 'KW' '1714'; -KW1715 : 'KW' '1715'; -KW1716 : 'KW' '1716'; -KW1717 : 'KW' '1717'; -KW1718 : 'KW' '1718'; -KW1719 : 'KW' '1719'; -KW1720 : 'KW' '1720'; -KW1721 : 'KW' '1721'; -KW1722 : 'KW' '1722'; -KW1723 : 'KW' '1723'; -KW1724 : 'KW' '1724'; -KW1725 : 'KW' '1725'; -KW1726 : 'KW' '1726'; -KW1727 : 'KW' '1727'; -KW1728 : 'KW' '1728'; -KW1729 : 'KW' '1729'; -KW1730 : 'KW' '1730'; -KW1731 : 'KW' '1731'; -KW1732 : 'KW' '1732'; -KW1733 : 'KW' '1733'; -KW1734 : 'KW' '1734'; -KW1735 : 'KW' '1735'; -KW1736 : 'KW' '1736'; -KW1737 : 'KW' '1737'; -KW1738 : 'KW' '1738'; -KW1739 : 'KW' '1739'; -KW1740 : 'KW' '1740'; -KW1741 : 'KW' '1741'; -KW1742 : 'KW' '1742'; -KW1743 : 'KW' '1743'; -KW1744 : 'KW' '1744'; -KW1745 : 'KW' '1745'; -KW1746 : 'KW' '1746'; -KW1747 : 'KW' '1747'; -KW1748 : 'KW' '1748'; -KW1749 : 'KW' '1749'; -KW1750 : 'KW' '1750'; -KW1751 : 'KW' '1751'; -KW1752 : 'KW' '1752'; -KW1753 : 'KW' '1753'; -KW1754 : 'KW' '1754'; -KW1755 : 'KW' '1755'; -KW1756 : 'KW' '1756'; -KW1757 : 'KW' '1757'; -KW1758 : 'KW' '1758'; -KW1759 : 'KW' '1759'; -KW1760 : 'KW' '1760'; -KW1761 : 'KW' '1761'; -KW1762 : 'KW' '1762'; -KW1763 : 'KW' '1763'; -KW1764 : 'KW' '1764'; -KW1765 : 'KW' '1765'; -KW1766 : 'KW' '1766'; -KW1767 : 'KW' '1767'; -KW1768 : 'KW' '1768'; -KW1769 : 'KW' '1769'; -KW1770 : 'KW' '1770'; -KW1771 : 'KW' '1771'; -KW1772 : 'KW' '1772'; -KW1773 : 'KW' '1773'; -KW1774 : 'KW' '1774'; -KW1775 : 'KW' '1775'; -KW1776 : 'KW' '1776'; -KW1777 : 'KW' '1777'; -KW1778 : 'KW' '1778'; -KW1779 : 'KW' '1779'; -KW1780 : 'KW' '1780'; -KW1781 : 'KW' '1781'; -KW1782 : 'KW' '1782'; -KW1783 : 'KW' '1783'; -KW1784 : 'KW' '1784'; -KW1785 : 'KW' '1785'; -KW1786 : 'KW' '1786'; -KW1787 : 'KW' '1787'; -KW1788 : 'KW' '1788'; -KW1789 : 'KW' '1789'; -KW1790 : 'KW' '1790'; -KW1791 : 'KW' '1791'; -KW1792 : 'KW' '1792'; -KW1793 : 'KW' '1793'; -KW1794 : 'KW' '1794'; -KW1795 : 'KW' '1795'; -KW1796 : 'KW' '1796'; -KW1797 : 'KW' '1797'; -KW1798 : 'KW' '1798'; -KW1799 : 'KW' '1799'; -KW1800 : 'KW' '1800'; -KW1801 : 'KW' '1801'; -KW1802 : 'KW' '1802'; -KW1803 : 'KW' '1803'; -KW1804 : 'KW' '1804'; -KW1805 : 'KW' '1805'; -KW1806 : 'KW' '1806'; -KW1807 : 'KW' '1807'; -KW1808 : 'KW' '1808'; -KW1809 : 'KW' '1809'; -KW1810 : 'KW' '1810'; -KW1811 : 'KW' '1811'; -KW1812 : 'KW' '1812'; -KW1813 : 'KW' '1813'; -KW1814 : 'KW' '1814'; -KW1815 : 'KW' '1815'; -KW1816 : 'KW' '1816'; -KW1817 : 'KW' '1817'; -KW1818 : 'KW' '1818'; -KW1819 : 'KW' '1819'; -KW1820 : 'KW' '1820'; -KW1821 : 'KW' '1821'; -KW1822 : 'KW' '1822'; -KW1823 : 'KW' '1823'; -KW1824 : 'KW' '1824'; -KW1825 : 'KW' '1825'; -KW1826 : 'KW' '1826'; -KW1827 : 'KW' '1827'; -KW1828 : 'KW' '1828'; -KW1829 : 'KW' '1829'; -KW1830 : 'KW' '1830'; -KW1831 : 'KW' '1831'; -KW1832 : 'KW' '1832'; -KW1833 : 'KW' '1833'; -KW1834 : 'KW' '1834'; -KW1835 : 'KW' '1835'; -KW1836 : 'KW' '1836'; -KW1837 : 'KW' '1837'; -KW1838 : 'KW' '1838'; -KW1839 : 'KW' '1839'; -KW1840 : 'KW' '1840'; -KW1841 : 'KW' '1841'; -KW1842 : 'KW' '1842'; -KW1843 : 'KW' '1843'; -KW1844 : 'KW' '1844'; -KW1845 : 'KW' '1845'; -KW1846 : 'KW' '1846'; -KW1847 : 'KW' '1847'; -KW1848 : 'KW' '1848'; -KW1849 : 'KW' '1849'; -KW1850 : 'KW' '1850'; -KW1851 : 'KW' '1851'; -KW1852 : 'KW' '1852'; -KW1853 : 'KW' '1853'; -KW1854 : 'KW' '1854'; -KW1855 : 'KW' '1855'; -KW1856 : 'KW' '1856'; -KW1857 : 'KW' '1857'; -KW1858 : 'KW' '1858'; -KW1859 : 'KW' '1859'; -KW1860 : 'KW' '1860'; -KW1861 : 'KW' '1861'; -KW1862 : 'KW' '1862'; -KW1863 : 'KW' '1863'; -KW1864 : 'KW' '1864'; -KW1865 : 'KW' '1865'; -KW1866 : 'KW' '1866'; -KW1867 : 'KW' '1867'; -KW1868 : 'KW' '1868'; -KW1869 : 'KW' '1869'; -KW1870 : 'KW' '1870'; -KW1871 : 'KW' '1871'; -KW1872 : 'KW' '1872'; -KW1873 : 'KW' '1873'; -KW1874 : 'KW' '1874'; -KW1875 : 'KW' '1875'; -KW1876 : 'KW' '1876'; -KW1877 : 'KW' '1877'; -KW1878 : 'KW' '1878'; -KW1879 : 'KW' '1879'; -KW1880 : 'KW' '1880'; -KW1881 : 'KW' '1881'; -KW1882 : 'KW' '1882'; -KW1883 : 'KW' '1883'; -KW1884 : 'KW' '1884'; -KW1885 : 'KW' '1885'; -KW1886 : 'KW' '1886'; -KW1887 : 'KW' '1887'; -KW1888 : 'KW' '1888'; -KW1889 : 'KW' '1889'; -KW1890 : 'KW' '1890'; -KW1891 : 'KW' '1891'; -KW1892 : 'KW' '1892'; -KW1893 : 'KW' '1893'; -KW1894 : 'KW' '1894'; -KW1895 : 'KW' '1895'; -KW1896 : 'KW' '1896'; -KW1897 : 'KW' '1897'; -KW1898 : 'KW' '1898'; -KW1899 : 'KW' '1899'; -KW1900 : 'KW' '1900'; -KW1901 : 'KW' '1901'; -KW1902 : 'KW' '1902'; -KW1903 : 'KW' '1903'; -KW1904 : 'KW' '1904'; -KW1905 : 'KW' '1905'; -KW1906 : 'KW' '1906'; -KW1907 : 'KW' '1907'; -KW1908 : 'KW' '1908'; -KW1909 : 'KW' '1909'; -KW1910 : 'KW' '1910'; -KW1911 : 'KW' '1911'; -KW1912 : 'KW' '1912'; -KW1913 : 'KW' '1913'; -KW1914 : 'KW' '1914'; -KW1915 : 'KW' '1915'; -KW1916 : 'KW' '1916'; -KW1917 : 'KW' '1917'; -KW1918 : 'KW' '1918'; -KW1919 : 'KW' '1919'; -KW1920 : 'KW' '1920'; -KW1921 : 'KW' '1921'; -KW1922 : 'KW' '1922'; -KW1923 : 'KW' '1923'; -KW1924 : 'KW' '1924'; -KW1925 : 'KW' '1925'; -KW1926 : 'KW' '1926'; -KW1927 : 'KW' '1927'; -KW1928 : 'KW' '1928'; -KW1929 : 'KW' '1929'; -KW1930 : 'KW' '1930'; -KW1931 : 'KW' '1931'; -KW1932 : 'KW' '1932'; -KW1933 : 'KW' '1933'; -KW1934 : 'KW' '1934'; -KW1935 : 'KW' '1935'; -KW1936 : 'KW' '1936'; -KW1937 : 'KW' '1937'; -KW1938 : 'KW' '1938'; -KW1939 : 'KW' '1939'; -KW1940 : 'KW' '1940'; -KW1941 : 'KW' '1941'; -KW1942 : 'KW' '1942'; -KW1943 : 'KW' '1943'; -KW1944 : 'KW' '1944'; -KW1945 : 'KW' '1945'; -KW1946 : 'KW' '1946'; -KW1947 : 'KW' '1947'; -KW1948 : 'KW' '1948'; -KW1949 : 'KW' '1949'; -KW1950 : 'KW' '1950'; -KW1951 : 'KW' '1951'; -KW1952 : 'KW' '1952'; -KW1953 : 'KW' '1953'; -KW1954 : 'KW' '1954'; -KW1955 : 'KW' '1955'; -KW1956 : 'KW' '1956'; -KW1957 : 'KW' '1957'; -KW1958 : 'KW' '1958'; -KW1959 : 'KW' '1959'; -KW1960 : 'KW' '1960'; -KW1961 : 'KW' '1961'; -KW1962 : 'KW' '1962'; -KW1963 : 'KW' '1963'; -KW1964 : 'KW' '1964'; -KW1965 : 'KW' '1965'; -KW1966 : 'KW' '1966'; -KW1967 : 'KW' '1967'; -KW1968 : 'KW' '1968'; -KW1969 : 'KW' '1969'; -KW1970 : 'KW' '1970'; -KW1971 : 'KW' '1971'; -KW1972 : 'KW' '1972'; -KW1973 : 'KW' '1973'; -KW1974 : 'KW' '1974'; -KW1975 : 'KW' '1975'; -KW1976 : 'KW' '1976'; -KW1977 : 'KW' '1977'; -KW1978 : 'KW' '1978'; -KW1979 : 'KW' '1979'; -KW1980 : 'KW' '1980'; -KW1981 : 'KW' '1981'; -KW1982 : 'KW' '1982'; -KW1983 : 'KW' '1983'; -KW1984 : 'KW' '1984'; -KW1985 : 'KW' '1985'; -KW1986 : 'KW' '1986'; -KW1987 : 'KW' '1987'; -KW1988 : 'KW' '1988'; -KW1989 : 'KW' '1989'; -KW1990 : 'KW' '1990'; -KW1991 : 'KW' '1991'; -KW1992 : 'KW' '1992'; -KW1993 : 'KW' '1993'; -KW1994 : 'KW' '1994'; -KW1995 : 'KW' '1995'; -KW1996 : 'KW' '1996'; -KW1997 : 'KW' '1997'; -KW1998 : 'KW' '1998'; -KW1999 : 'KW' '1999'; -KW2000 : 'KW' '2000'; -KW2001 : 'KW' '2001'; -KW2002 : 'KW' '2002'; -KW2003 : 'KW' '2003'; -KW2004 : 'KW' '2004'; -KW2005 : 'KW' '2005'; -KW2006 : 'KW' '2006'; -KW2007 : 'KW' '2007'; -KW2008 : 'KW' '2008'; -KW2009 : 'KW' '2009'; -KW2010 : 'KW' '2010'; -KW2011 : 'KW' '2011'; -KW2012 : 'KW' '2012'; -KW2013 : 'KW' '2013'; -KW2014 : 'KW' '2014'; -KW2015 : 'KW' '2015'; -KW2016 : 'KW' '2016'; -KW2017 : 'KW' '2017'; -KW2018 : 'KW' '2018'; -KW2019 : 'KW' '2019'; -KW2020 : 'KW' '2020'; -KW2021 : 'KW' '2021'; -KW2022 : 'KW' '2022'; -KW2023 : 'KW' '2023'; -KW2024 : 'KW' '2024'; -KW2025 : 'KW' '2025'; -KW2026 : 'KW' '2026'; -KW2027 : 'KW' '2027'; -KW2028 : 'KW' '2028'; -KW2029 : 'KW' '2029'; -KW2030 : 'KW' '2030'; -KW2031 : 'KW' '2031'; -KW2032 : 'KW' '2032'; -KW2033 : 'KW' '2033'; -KW2034 : 'KW' '2034'; -KW2035 : 'KW' '2035'; -KW2036 : 'KW' '2036'; -KW2037 : 'KW' '2037'; -KW2038 : 'KW' '2038'; -KW2039 : 'KW' '2039'; -KW2040 : 'KW' '2040'; -KW2041 : 'KW' '2041'; -KW2042 : 'KW' '2042'; -KW2043 : 'KW' '2043'; -KW2044 : 'KW' '2044'; -KW2045 : 'KW' '2045'; -KW2046 : 'KW' '2046'; -KW2047 : 'KW' '2047'; -KW2048 : 'KW' '2048'; -KW2049 : 'KW' '2049'; -KW2050 : 'KW' '2050'; -KW2051 : 'KW' '2051'; -KW2052 : 'KW' '2052'; -KW2053 : 'KW' '2053'; -KW2054 : 'KW' '2054'; -KW2055 : 'KW' '2055'; -KW2056 : 'KW' '2056'; -KW2057 : 'KW' '2057'; -KW2058 : 'KW' '2058'; -KW2059 : 'KW' '2059'; -KW2060 : 'KW' '2060'; -KW2061 : 'KW' '2061'; -KW2062 : 'KW' '2062'; -KW2063 : 'KW' '2063'; -KW2064 : 'KW' '2064'; -KW2065 : 'KW' '2065'; -KW2066 : 'KW' '2066'; -KW2067 : 'KW' '2067'; -KW2068 : 'KW' '2068'; -KW2069 : 'KW' '2069'; -KW2070 : 'KW' '2070'; -KW2071 : 'KW' '2071'; -KW2072 : 'KW' '2072'; -KW2073 : 'KW' '2073'; -KW2074 : 'KW' '2074'; -KW2075 : 'KW' '2075'; -KW2076 : 'KW' '2076'; -KW2077 : 'KW' '2077'; -KW2078 : 'KW' '2078'; -KW2079 : 'KW' '2079'; -KW2080 : 'KW' '2080'; -KW2081 : 'KW' '2081'; -KW2082 : 'KW' '2082'; -KW2083 : 'KW' '2083'; -KW2084 : 'KW' '2084'; -KW2085 : 'KW' '2085'; -KW2086 : 'KW' '2086'; -KW2087 : 'KW' '2087'; -KW2088 : 'KW' '2088'; -KW2089 : 'KW' '2089'; -KW2090 : 'KW' '2090'; -KW2091 : 'KW' '2091'; -KW2092 : 'KW' '2092'; -KW2093 : 'KW' '2093'; -KW2094 : 'KW' '2094'; -KW2095 : 'KW' '2095'; -KW2096 : 'KW' '2096'; -KW2097 : 'KW' '2097'; -KW2098 : 'KW' '2098'; -KW2099 : 'KW' '2099'; -KW2100 : 'KW' '2100'; -KW2101 : 'KW' '2101'; -KW2102 : 'KW' '2102'; -KW2103 : 'KW' '2103'; -KW2104 : 'KW' '2104'; -KW2105 : 'KW' '2105'; -KW2106 : 'KW' '2106'; -KW2107 : 'KW' '2107'; -KW2108 : 'KW' '2108'; -KW2109 : 'KW' '2109'; -KW2110 : 'KW' '2110'; -KW2111 : 'KW' '2111'; -KW2112 : 'KW' '2112'; -KW2113 : 'KW' '2113'; -KW2114 : 'KW' '2114'; -KW2115 : 'KW' '2115'; -KW2116 : 'KW' '2116'; -KW2117 : 'KW' '2117'; -KW2118 : 'KW' '2118'; -KW2119 : 'KW' '2119'; -KW2120 : 'KW' '2120'; -KW2121 : 'KW' '2121'; -KW2122 : 'KW' '2122'; -KW2123 : 'KW' '2123'; -KW2124 : 'KW' '2124'; -KW2125 : 'KW' '2125'; -KW2126 : 'KW' '2126'; -KW2127 : 'KW' '2127'; -KW2128 : 'KW' '2128'; -KW2129 : 'KW' '2129'; -KW2130 : 'KW' '2130'; -KW2131 : 'KW' '2131'; -KW2132 : 'KW' '2132'; -KW2133 : 'KW' '2133'; -KW2134 : 'KW' '2134'; -KW2135 : 'KW' '2135'; -KW2136 : 'KW' '2136'; -KW2137 : 'KW' '2137'; -KW2138 : 'KW' '2138'; -KW2139 : 'KW' '2139'; -KW2140 : 'KW' '2140'; -KW2141 : 'KW' '2141'; -KW2142 : 'KW' '2142'; -KW2143 : 'KW' '2143'; -KW2144 : 'KW' '2144'; -KW2145 : 'KW' '2145'; -KW2146 : 'KW' '2146'; -KW2147 : 'KW' '2147'; -KW2148 : 'KW' '2148'; -KW2149 : 'KW' '2149'; -KW2150 : 'KW' '2150'; -KW2151 : 'KW' '2151'; -KW2152 : 'KW' '2152'; -KW2153 : 'KW' '2153'; -KW2154 : 'KW' '2154'; -KW2155 : 'KW' '2155'; -KW2156 : 'KW' '2156'; -KW2157 : 'KW' '2157'; -KW2158 : 'KW' '2158'; -KW2159 : 'KW' '2159'; -KW2160 : 'KW' '2160'; -KW2161 : 'KW' '2161'; -KW2162 : 'KW' '2162'; -KW2163 : 'KW' '2163'; -KW2164 : 'KW' '2164'; -KW2165 : 'KW' '2165'; -KW2166 : 'KW' '2166'; -KW2167 : 'KW' '2167'; -KW2168 : 'KW' '2168'; -KW2169 : 'KW' '2169'; -KW2170 : 'KW' '2170'; -KW2171 : 'KW' '2171'; -KW2172 : 'KW' '2172'; -KW2173 : 'KW' '2173'; -KW2174 : 'KW' '2174'; -KW2175 : 'KW' '2175'; -KW2176 : 'KW' '2176'; -KW2177 : 'KW' '2177'; -KW2178 : 'KW' '2178'; -KW2179 : 'KW' '2179'; -KW2180 : 'KW' '2180'; -KW2181 : 'KW' '2181'; -KW2182 : 'KW' '2182'; -KW2183 : 'KW' '2183'; -KW2184 : 'KW' '2184'; -KW2185 : 'KW' '2185'; -KW2186 : 'KW' '2186'; -KW2187 : 'KW' '2187'; -KW2188 : 'KW' '2188'; -KW2189 : 'KW' '2189'; -KW2190 : 'KW' '2190'; -KW2191 : 'KW' '2191'; -KW2192 : 'KW' '2192'; -KW2193 : 'KW' '2193'; -KW2194 : 'KW' '2194'; -KW2195 : 'KW' '2195'; -KW2196 : 'KW' '2196'; -KW2197 : 'KW' '2197'; -KW2198 : 'KW' '2198'; -KW2199 : 'KW' '2199'; -KW2200 : 'KW' '2200'; -KW2201 : 'KW' '2201'; -KW2202 : 'KW' '2202'; -KW2203 : 'KW' '2203'; -KW2204 : 'KW' '2204'; -KW2205 : 'KW' '2205'; -KW2206 : 'KW' '2206'; -KW2207 : 'KW' '2207'; -KW2208 : 'KW' '2208'; -KW2209 : 'KW' '2209'; -KW2210 : 'KW' '2210'; -KW2211 : 'KW' '2211'; -KW2212 : 'KW' '2212'; -KW2213 : 'KW' '2213'; -KW2214 : 'KW' '2214'; -KW2215 : 'KW' '2215'; -KW2216 : 'KW' '2216'; -KW2217 : 'KW' '2217'; -KW2218 : 'KW' '2218'; -KW2219 : 'KW' '2219'; -KW2220 : 'KW' '2220'; -KW2221 : 'KW' '2221'; -KW2222 : 'KW' '2222'; -KW2223 : 'KW' '2223'; -KW2224 : 'KW' '2224'; -KW2225 : 'KW' '2225'; -KW2226 : 'KW' '2226'; -KW2227 : 'KW' '2227'; -KW2228 : 'KW' '2228'; -KW2229 : 'KW' '2229'; -KW2230 : 'KW' '2230'; -KW2231 : 'KW' '2231'; -KW2232 : 'KW' '2232'; -KW2233 : 'KW' '2233'; -KW2234 : 'KW' '2234'; -KW2235 : 'KW' '2235'; -KW2236 : 'KW' '2236'; -KW2237 : 'KW' '2237'; -KW2238 : 'KW' '2238'; -KW2239 : 'KW' '2239'; -KW2240 : 'KW' '2240'; -KW2241 : 'KW' '2241'; -KW2242 : 'KW' '2242'; -KW2243 : 'KW' '2243'; -KW2244 : 'KW' '2244'; -KW2245 : 'KW' '2245'; -KW2246 : 'KW' '2246'; -KW2247 : 'KW' '2247'; -KW2248 : 'KW' '2248'; -KW2249 : 'KW' '2249'; -KW2250 : 'KW' '2250'; -KW2251 : 'KW' '2251'; -KW2252 : 'KW' '2252'; -KW2253 : 'KW' '2253'; -KW2254 : 'KW' '2254'; -KW2255 : 'KW' '2255'; -KW2256 : 'KW' '2256'; -KW2257 : 'KW' '2257'; -KW2258 : 'KW' '2258'; -KW2259 : 'KW' '2259'; -KW2260 : 'KW' '2260'; -KW2261 : 'KW' '2261'; -KW2262 : 'KW' '2262'; -KW2263 : 'KW' '2263'; -KW2264 : 'KW' '2264'; -KW2265 : 'KW' '2265'; -KW2266 : 'KW' '2266'; -KW2267 : 'KW' '2267'; -KW2268 : 'KW' '2268'; -KW2269 : 'KW' '2269'; -KW2270 : 'KW' '2270'; -KW2271 : 'KW' '2271'; -KW2272 : 'KW' '2272'; -KW2273 : 'KW' '2273'; -KW2274 : 'KW' '2274'; -KW2275 : 'KW' '2275'; -KW2276 : 'KW' '2276'; -KW2277 : 'KW' '2277'; -KW2278 : 'KW' '2278'; -KW2279 : 'KW' '2279'; -KW2280 : 'KW' '2280'; -KW2281 : 'KW' '2281'; -KW2282 : 'KW' '2282'; -KW2283 : 'KW' '2283'; -KW2284 : 'KW' '2284'; -KW2285 : 'KW' '2285'; -KW2286 : 'KW' '2286'; -KW2287 : 'KW' '2287'; -KW2288 : 'KW' '2288'; -KW2289 : 'KW' '2289'; -KW2290 : 'KW' '2290'; -KW2291 : 'KW' '2291'; -KW2292 : 'KW' '2292'; -KW2293 : 'KW' '2293'; -KW2294 : 'KW' '2294'; -KW2295 : 'KW' '2295'; -KW2296 : 'KW' '2296'; -KW2297 : 'KW' '2297'; -KW2298 : 'KW' '2298'; -KW2299 : 'KW' '2299'; -KW2300 : 'KW' '2300'; -KW2301 : 'KW' '2301'; -KW2302 : 'KW' '2302'; -KW2303 : 'KW' '2303'; -KW2304 : 'KW' '2304'; -KW2305 : 'KW' '2305'; -KW2306 : 'KW' '2306'; -KW2307 : 'KW' '2307'; -KW2308 : 'KW' '2308'; -KW2309 : 'KW' '2309'; -KW2310 : 'KW' '2310'; -KW2311 : 'KW' '2311'; -KW2312 : 'KW' '2312'; -KW2313 : 'KW' '2313'; -KW2314 : 'KW' '2314'; -KW2315 : 'KW' '2315'; -KW2316 : 'KW' '2316'; -KW2317 : 'KW' '2317'; -KW2318 : 'KW' '2318'; -KW2319 : 'KW' '2319'; -KW2320 : 'KW' '2320'; -KW2321 : 'KW' '2321'; -KW2322 : 'KW' '2322'; -KW2323 : 'KW' '2323'; -KW2324 : 'KW' '2324'; -KW2325 : 'KW' '2325'; -KW2326 : 'KW' '2326'; -KW2327 : 'KW' '2327'; -KW2328 : 'KW' '2328'; -KW2329 : 'KW' '2329'; -KW2330 : 'KW' '2330'; -KW2331 : 'KW' '2331'; -KW2332 : 'KW' '2332'; -KW2333 : 'KW' '2333'; -KW2334 : 'KW' '2334'; -KW2335 : 'KW' '2335'; -KW2336 : 'KW' '2336'; -KW2337 : 'KW' '2337'; -KW2338 : 'KW' '2338'; -KW2339 : 'KW' '2339'; -KW2340 : 'KW' '2340'; -KW2341 : 'KW' '2341'; -KW2342 : 'KW' '2342'; -KW2343 : 'KW' '2343'; -KW2344 : 'KW' '2344'; -KW2345 : 'KW' '2345'; -KW2346 : 'KW' '2346'; -KW2347 : 'KW' '2347'; -KW2348 : 'KW' '2348'; -KW2349 : 'KW' '2349'; -KW2350 : 'KW' '2350'; -KW2351 : 'KW' '2351'; -KW2352 : 'KW' '2352'; -KW2353 : 'KW' '2353'; -KW2354 : 'KW' '2354'; -KW2355 : 'KW' '2355'; -KW2356 : 'KW' '2356'; -KW2357 : 'KW' '2357'; -KW2358 : 'KW' '2358'; -KW2359 : 'KW' '2359'; -KW2360 : 'KW' '2360'; -KW2361 : 'KW' '2361'; -KW2362 : 'KW' '2362'; -KW2363 : 'KW' '2363'; -KW2364 : 'KW' '2364'; -KW2365 : 'KW' '2365'; -KW2366 : 'KW' '2366'; -KW2367 : 'KW' '2367'; -KW2368 : 'KW' '2368'; -KW2369 : 'KW' '2369'; -KW2370 : 'KW' '2370'; -KW2371 : 'KW' '2371'; -KW2372 : 'KW' '2372'; -KW2373 : 'KW' '2373'; -KW2374 : 'KW' '2374'; -KW2375 : 'KW' '2375'; -KW2376 : 'KW' '2376'; -KW2377 : 'KW' '2377'; -KW2378 : 'KW' '2378'; -KW2379 : 'KW' '2379'; -KW2380 : 'KW' '2380'; -KW2381 : 'KW' '2381'; -KW2382 : 'KW' '2382'; -KW2383 : 'KW' '2383'; -KW2384 : 'KW' '2384'; -KW2385 : 'KW' '2385'; -KW2386 : 'KW' '2386'; -KW2387 : 'KW' '2387'; -KW2388 : 'KW' '2388'; -KW2389 : 'KW' '2389'; -KW2390 : 'KW' '2390'; -KW2391 : 'KW' '2391'; -KW2392 : 'KW' '2392'; -KW2393 : 'KW' '2393'; -KW2394 : 'KW' '2394'; -KW2395 : 'KW' '2395'; -KW2396 : 'KW' '2396'; -KW2397 : 'KW' '2397'; -KW2398 : 'KW' '2398'; -KW2399 : 'KW' '2399'; -KW2400 : 'KW' '2400'; -KW2401 : 'KW' '2401'; -KW2402 : 'KW' '2402'; -KW2403 : 'KW' '2403'; -KW2404 : 'KW' '2404'; -KW2405 : 'KW' '2405'; -KW2406 : 'KW' '2406'; -KW2407 : 'KW' '2407'; -KW2408 : 'KW' '2408'; -KW2409 : 'KW' '2409'; -KW2410 : 'KW' '2410'; -KW2411 : 'KW' '2411'; -KW2412 : 'KW' '2412'; -KW2413 : 'KW' '2413'; -KW2414 : 'KW' '2414'; -KW2415 : 'KW' '2415'; -KW2416 : 'KW' '2416'; -KW2417 : 'KW' '2417'; -KW2418 : 'KW' '2418'; -KW2419 : 'KW' '2419'; -KW2420 : 'KW' '2420'; -KW2421 : 'KW' '2421'; -KW2422 : 'KW' '2422'; -KW2423 : 'KW' '2423'; -KW2424 : 'KW' '2424'; -KW2425 : 'KW' '2425'; -KW2426 : 'KW' '2426'; -KW2427 : 'KW' '2427'; -KW2428 : 'KW' '2428'; -KW2429 : 'KW' '2429'; -KW2430 : 'KW' '2430'; -KW2431 : 'KW' '2431'; -KW2432 : 'KW' '2432'; -KW2433 : 'KW' '2433'; -KW2434 : 'KW' '2434'; -KW2435 : 'KW' '2435'; -KW2436 : 'KW' '2436'; -KW2437 : 'KW' '2437'; -KW2438 : 'KW' '2438'; -KW2439 : 'KW' '2439'; -KW2440 : 'KW' '2440'; -KW2441 : 'KW' '2441'; -KW2442 : 'KW' '2442'; -KW2443 : 'KW' '2443'; -KW2444 : 'KW' '2444'; -KW2445 : 'KW' '2445'; -KW2446 : 'KW' '2446'; -KW2447 : 'KW' '2447'; -KW2448 : 'KW' '2448'; -KW2449 : 'KW' '2449'; -KW2450 : 'KW' '2450'; -KW2451 : 'KW' '2451'; -KW2452 : 'KW' '2452'; -KW2453 : 'KW' '2453'; -KW2454 : 'KW' '2454'; -KW2455 : 'KW' '2455'; -KW2456 : 'KW' '2456'; -KW2457 : 'KW' '2457'; -KW2458 : 'KW' '2458'; -KW2459 : 'KW' '2459'; -KW2460 : 'KW' '2460'; -KW2461 : 'KW' '2461'; -KW2462 : 'KW' '2462'; -KW2463 : 'KW' '2463'; -KW2464 : 'KW' '2464'; -KW2465 : 'KW' '2465'; -KW2466 : 'KW' '2466'; -KW2467 : 'KW' '2467'; -KW2468 : 'KW' '2468'; -KW2469 : 'KW' '2469'; -KW2470 : 'KW' '2470'; -KW2471 : 'KW' '2471'; -KW2472 : 'KW' '2472'; -KW2473 : 'KW' '2473'; -KW2474 : 'KW' '2474'; -KW2475 : 'KW' '2475'; -KW2476 : 'KW' '2476'; -KW2477 : 'KW' '2477'; -KW2478 : 'KW' '2478'; -KW2479 : 'KW' '2479'; -KW2480 : 'KW' '2480'; -KW2481 : 'KW' '2481'; -KW2482 : 'KW' '2482'; -KW2483 : 'KW' '2483'; -KW2484 : 'KW' '2484'; -KW2485 : 'KW' '2485'; -KW2486 : 'KW' '2486'; -KW2487 : 'KW' '2487'; -KW2488 : 'KW' '2488'; -KW2489 : 'KW' '2489'; -KW2490 : 'KW' '2490'; -KW2491 : 'KW' '2491'; -KW2492 : 'KW' '2492'; -KW2493 : 'KW' '2493'; -KW2494 : 'KW' '2494'; -KW2495 : 'KW' '2495'; -KW2496 : 'KW' '2496'; -KW2497 : 'KW' '2497'; -KW2498 : 'KW' '2498'; -KW2499 : 'KW' '2499'; -KW2500 : 'KW' '2500'; -KW2501 : 'KW' '2501'; -KW2502 : 'KW' '2502'; -KW2503 : 'KW' '2503'; -KW2504 : 'KW' '2504'; -KW2505 : 'KW' '2505'; -KW2506 : 'KW' '2506'; -KW2507 : 'KW' '2507'; -KW2508 : 'KW' '2508'; -KW2509 : 'KW' '2509'; -KW2510 : 'KW' '2510'; -KW2511 : 'KW' '2511'; -KW2512 : 'KW' '2512'; -KW2513 : 'KW' '2513'; -KW2514 : 'KW' '2514'; -KW2515 : 'KW' '2515'; -KW2516 : 'KW' '2516'; -KW2517 : 'KW' '2517'; -KW2518 : 'KW' '2518'; -KW2519 : 'KW' '2519'; -KW2520 : 'KW' '2520'; -KW2521 : 'KW' '2521'; -KW2522 : 'KW' '2522'; -KW2523 : 'KW' '2523'; -KW2524 : 'KW' '2524'; -KW2525 : 'KW' '2525'; -KW2526 : 'KW' '2526'; -KW2527 : 'KW' '2527'; -KW2528 : 'KW' '2528'; -KW2529 : 'KW' '2529'; -KW2530 : 'KW' '2530'; -KW2531 : 'KW' '2531'; -KW2532 : 'KW' '2532'; -KW2533 : 'KW' '2533'; -KW2534 : 'KW' '2534'; -KW2535 : 'KW' '2535'; -KW2536 : 'KW' '2536'; -KW2537 : 'KW' '2537'; -KW2538 : 'KW' '2538'; -KW2539 : 'KW' '2539'; -KW2540 : 'KW' '2540'; -KW2541 : 'KW' '2541'; -KW2542 : 'KW' '2542'; -KW2543 : 'KW' '2543'; -KW2544 : 'KW' '2544'; -KW2545 : 'KW' '2545'; -KW2546 : 'KW' '2546'; -KW2547 : 'KW' '2547'; -KW2548 : 'KW' '2548'; -KW2549 : 'KW' '2549'; -KW2550 : 'KW' '2550'; -KW2551 : 'KW' '2551'; -KW2552 : 'KW' '2552'; -KW2553 : 'KW' '2553'; -KW2554 : 'KW' '2554'; -KW2555 : 'KW' '2555'; -KW2556 : 'KW' '2556'; -KW2557 : 'KW' '2557'; -KW2558 : 'KW' '2558'; -KW2559 : 'KW' '2559'; -KW2560 : 'KW' '2560'; -KW2561 : 'KW' '2561'; -KW2562 : 'KW' '2562'; -KW2563 : 'KW' '2563'; -KW2564 : 'KW' '2564'; -KW2565 : 'KW' '2565'; -KW2566 : 'KW' '2566'; -KW2567 : 'KW' '2567'; -KW2568 : 'KW' '2568'; -KW2569 : 'KW' '2569'; -KW2570 : 'KW' '2570'; -KW2571 : 'KW' '2571'; -KW2572 : 'KW' '2572'; -KW2573 : 'KW' '2573'; -KW2574 : 'KW' '2574'; -KW2575 : 'KW' '2575'; -KW2576 : 'KW' '2576'; -KW2577 : 'KW' '2577'; -KW2578 : 'KW' '2578'; -KW2579 : 'KW' '2579'; -KW2580 : 'KW' '2580'; -KW2581 : 'KW' '2581'; -KW2582 : 'KW' '2582'; -KW2583 : 'KW' '2583'; -KW2584 : 'KW' '2584'; -KW2585 : 'KW' '2585'; -KW2586 : 'KW' '2586'; -KW2587 : 'KW' '2587'; -KW2588 : 'KW' '2588'; -KW2589 : 'KW' '2589'; -KW2590 : 'KW' '2590'; -KW2591 : 'KW' '2591'; -KW2592 : 'KW' '2592'; -KW2593 : 'KW' '2593'; -KW2594 : 'KW' '2594'; -KW2595 : 'KW' '2595'; -KW2596 : 'KW' '2596'; -KW2597 : 'KW' '2597'; -KW2598 : 'KW' '2598'; -KW2599 : 'KW' '2599'; -KW2600 : 'KW' '2600'; -KW2601 : 'KW' '2601'; -KW2602 : 'KW' '2602'; -KW2603 : 'KW' '2603'; -KW2604 : 'KW' '2604'; -KW2605 : 'KW' '2605'; -KW2606 : 'KW' '2606'; -KW2607 : 'KW' '2607'; -KW2608 : 'KW' '2608'; -KW2609 : 'KW' '2609'; -KW2610 : 'KW' '2610'; -KW2611 : 'KW' '2611'; -KW2612 : 'KW' '2612'; -KW2613 : 'KW' '2613'; -KW2614 : 'KW' '2614'; -KW2615 : 'KW' '2615'; -KW2616 : 'KW' '2616'; -KW2617 : 'KW' '2617'; -KW2618 : 'KW' '2618'; -KW2619 : 'KW' '2619'; -KW2620 : 'KW' '2620'; -KW2621 : 'KW' '2621'; -KW2622 : 'KW' '2622'; -KW2623 : 'KW' '2623'; -KW2624 : 'KW' '2624'; -KW2625 : 'KW' '2625'; -KW2626 : 'KW' '2626'; -KW2627 : 'KW' '2627'; -KW2628 : 'KW' '2628'; -KW2629 : 'KW' '2629'; -KW2630 : 'KW' '2630'; -KW2631 : 'KW' '2631'; -KW2632 : 'KW' '2632'; -KW2633 : 'KW' '2633'; -KW2634 : 'KW' '2634'; -KW2635 : 'KW' '2635'; -KW2636 : 'KW' '2636'; -KW2637 : 'KW' '2637'; -KW2638 : 'KW' '2638'; -KW2639 : 'KW' '2639'; -KW2640 : 'KW' '2640'; -KW2641 : 'KW' '2641'; -KW2642 : 'KW' '2642'; -KW2643 : 'KW' '2643'; -KW2644 : 'KW' '2644'; -KW2645 : 'KW' '2645'; -KW2646 : 'KW' '2646'; -KW2647 : 'KW' '2647'; -KW2648 : 'KW' '2648'; -KW2649 : 'KW' '2649'; -KW2650 : 'KW' '2650'; -KW2651 : 'KW' '2651'; -KW2652 : 'KW' '2652'; -KW2653 : 'KW' '2653'; -KW2654 : 'KW' '2654'; -KW2655 : 'KW' '2655'; -KW2656 : 'KW' '2656'; -KW2657 : 'KW' '2657'; -KW2658 : 'KW' '2658'; -KW2659 : 'KW' '2659'; -KW2660 : 'KW' '2660'; -KW2661 : 'KW' '2661'; -KW2662 : 'KW' '2662'; -KW2663 : 'KW' '2663'; -KW2664 : 'KW' '2664'; -KW2665 : 'KW' '2665'; -KW2666 : 'KW' '2666'; -KW2667 : 'KW' '2667'; -KW2668 : 'KW' '2668'; -KW2669 : 'KW' '2669'; -KW2670 : 'KW' '2670'; -KW2671 : 'KW' '2671'; -KW2672 : 'KW' '2672'; -KW2673 : 'KW' '2673'; -KW2674 : 'KW' '2674'; -KW2675 : 'KW' '2675'; -KW2676 : 'KW' '2676'; -KW2677 : 'KW' '2677'; -KW2678 : 'KW' '2678'; -KW2679 : 'KW' '2679'; -KW2680 : 'KW' '2680'; -KW2681 : 'KW' '2681'; -KW2682 : 'KW' '2682'; -KW2683 : 'KW' '2683'; -KW2684 : 'KW' '2684'; -KW2685 : 'KW' '2685'; -KW2686 : 'KW' '2686'; -KW2687 : 'KW' '2687'; -KW2688 : 'KW' '2688'; -KW2689 : 'KW' '2689'; -KW2690 : 'KW' '2690'; -KW2691 : 'KW' '2691'; -KW2692 : 'KW' '2692'; -KW2693 : 'KW' '2693'; -KW2694 : 'KW' '2694'; -KW2695 : 'KW' '2695'; -KW2696 : 'KW' '2696'; -KW2697 : 'KW' '2697'; -KW2698 : 'KW' '2698'; -KW2699 : 'KW' '2699'; -KW2700 : 'KW' '2700'; -KW2701 : 'KW' '2701'; -KW2702 : 'KW' '2702'; -KW2703 : 'KW' '2703'; -KW2704 : 'KW' '2704'; -KW2705 : 'KW' '2705'; -KW2706 : 'KW' '2706'; -KW2707 : 'KW' '2707'; -KW2708 : 'KW' '2708'; -KW2709 : 'KW' '2709'; -KW2710 : 'KW' '2710'; -KW2711 : 'KW' '2711'; -KW2712 : 'KW' '2712'; -KW2713 : 'KW' '2713'; -KW2714 : 'KW' '2714'; -KW2715 : 'KW' '2715'; -KW2716 : 'KW' '2716'; -KW2717 : 'KW' '2717'; -KW2718 : 'KW' '2718'; -KW2719 : 'KW' '2719'; -KW2720 : 'KW' '2720'; -KW2721 : 'KW' '2721'; -KW2722 : 'KW' '2722'; -KW2723 : 'KW' '2723'; -KW2724 : 'KW' '2724'; -KW2725 : 'KW' '2725'; -KW2726 : 'KW' '2726'; -KW2727 : 'KW' '2727'; -KW2728 : 'KW' '2728'; -KW2729 : 'KW' '2729'; -KW2730 : 'KW' '2730'; -KW2731 : 'KW' '2731'; -KW2732 : 'KW' '2732'; -KW2733 : 'KW' '2733'; -KW2734 : 'KW' '2734'; -KW2735 : 'KW' '2735'; -KW2736 : 'KW' '2736'; -KW2737 : 'KW' '2737'; -KW2738 : 'KW' '2738'; -KW2739 : 'KW' '2739'; -KW2740 : 'KW' '2740'; -KW2741 : 'KW' '2741'; -KW2742 : 'KW' '2742'; -KW2743 : 'KW' '2743'; -KW2744 : 'KW' '2744'; -KW2745 : 'KW' '2745'; -KW2746 : 'KW' '2746'; -KW2747 : 'KW' '2747'; -KW2748 : 'KW' '2748'; -KW2749 : 'KW' '2749'; -KW2750 : 'KW' '2750'; -KW2751 : 'KW' '2751'; -KW2752 : 'KW' '2752'; -KW2753 : 'KW' '2753'; -KW2754 : 'KW' '2754'; -KW2755 : 'KW' '2755'; -KW2756 : 'KW' '2756'; -KW2757 : 'KW' '2757'; -KW2758 : 'KW' '2758'; -KW2759 : 'KW' '2759'; -KW2760 : 'KW' '2760'; -KW2761 : 'KW' '2761'; -KW2762 : 'KW' '2762'; -KW2763 : 'KW' '2763'; -KW2764 : 'KW' '2764'; -KW2765 : 'KW' '2765'; -KW2766 : 'KW' '2766'; -KW2767 : 'KW' '2767'; -KW2768 : 'KW' '2768'; -KW2769 : 'KW' '2769'; -KW2770 : 'KW' '2770'; -KW2771 : 'KW' '2771'; -KW2772 : 'KW' '2772'; -KW2773 : 'KW' '2773'; -KW2774 : 'KW' '2774'; -KW2775 : 'KW' '2775'; -KW2776 : 'KW' '2776'; -KW2777 : 'KW' '2777'; -KW2778 : 'KW' '2778'; -KW2779 : 'KW' '2779'; -KW2780 : 'KW' '2780'; -KW2781 : 'KW' '2781'; -KW2782 : 'KW' '2782'; -KW2783 : 'KW' '2783'; -KW2784 : 'KW' '2784'; -KW2785 : 'KW' '2785'; -KW2786 : 'KW' '2786'; -KW2787 : 'KW' '2787'; -KW2788 : 'KW' '2788'; -KW2789 : 'KW' '2789'; -KW2790 : 'KW' '2790'; -KW2791 : 'KW' '2791'; -KW2792 : 'KW' '2792'; -KW2793 : 'KW' '2793'; -KW2794 : 'KW' '2794'; -KW2795 : 'KW' '2795'; -KW2796 : 'KW' '2796'; -KW2797 : 'KW' '2797'; -KW2798 : 'KW' '2798'; -KW2799 : 'KW' '2799'; -KW2800 : 'KW' '2800'; -KW2801 : 'KW' '2801'; -KW2802 : 'KW' '2802'; -KW2803 : 'KW' '2803'; -KW2804 : 'KW' '2804'; -KW2805 : 'KW' '2805'; -KW2806 : 'KW' '2806'; -KW2807 : 'KW' '2807'; -KW2808 : 'KW' '2808'; -KW2809 : 'KW' '2809'; -KW2810 : 'KW' '2810'; -KW2811 : 'KW' '2811'; -KW2812 : 'KW' '2812'; -KW2813 : 'KW' '2813'; -KW2814 : 'KW' '2814'; -KW2815 : 'KW' '2815'; -KW2816 : 'KW' '2816'; -KW2817 : 'KW' '2817'; -KW2818 : 'KW' '2818'; -KW2819 : 'KW' '2819'; -KW2820 : 'KW' '2820'; -KW2821 : 'KW' '2821'; -KW2822 : 'KW' '2822'; -KW2823 : 'KW' '2823'; -KW2824 : 'KW' '2824'; -KW2825 : 'KW' '2825'; -KW2826 : 'KW' '2826'; -KW2827 : 'KW' '2827'; -KW2828 : 'KW' '2828'; -KW2829 : 'KW' '2829'; -KW2830 : 'KW' '2830'; -KW2831 : 'KW' '2831'; -KW2832 : 'KW' '2832'; -KW2833 : 'KW' '2833'; -KW2834 : 'KW' '2834'; -KW2835 : 'KW' '2835'; -KW2836 : 'KW' '2836'; -KW2837 : 'KW' '2837'; -KW2838 : 'KW' '2838'; -KW2839 : 'KW' '2839'; -KW2840 : 'KW' '2840'; -KW2841 : 'KW' '2841'; -KW2842 : 'KW' '2842'; -KW2843 : 'KW' '2843'; -KW2844 : 'KW' '2844'; -KW2845 : 'KW' '2845'; -KW2846 : 'KW' '2846'; -KW2847 : 'KW' '2847'; -KW2848 : 'KW' '2848'; -KW2849 : 'KW' '2849'; -KW2850 : 'KW' '2850'; -KW2851 : 'KW' '2851'; -KW2852 : 'KW' '2852'; -KW2853 : 'KW' '2853'; -KW2854 : 'KW' '2854'; -KW2855 : 'KW' '2855'; -KW2856 : 'KW' '2856'; -KW2857 : 'KW' '2857'; -KW2858 : 'KW' '2858'; -KW2859 : 'KW' '2859'; -KW2860 : 'KW' '2860'; -KW2861 : 'KW' '2861'; -KW2862 : 'KW' '2862'; -KW2863 : 'KW' '2863'; -KW2864 : 'KW' '2864'; -KW2865 : 'KW' '2865'; -KW2866 : 'KW' '2866'; -KW2867 : 'KW' '2867'; -KW2868 : 'KW' '2868'; -KW2869 : 'KW' '2869'; -KW2870 : 'KW' '2870'; -KW2871 : 'KW' '2871'; -KW2872 : 'KW' '2872'; -KW2873 : 'KW' '2873'; -KW2874 : 'KW' '2874'; -KW2875 : 'KW' '2875'; -KW2876 : 'KW' '2876'; -KW2877 : 'KW' '2877'; -KW2878 : 'KW' '2878'; -KW2879 : 'KW' '2879'; -KW2880 : 'KW' '2880'; -KW2881 : 'KW' '2881'; -KW2882 : 'KW' '2882'; -KW2883 : 'KW' '2883'; -KW2884 : 'KW' '2884'; -KW2885 : 'KW' '2885'; -KW2886 : 'KW' '2886'; -KW2887 : 'KW' '2887'; -KW2888 : 'KW' '2888'; -KW2889 : 'KW' '2889'; -KW2890 : 'KW' '2890'; -KW2891 : 'KW' '2891'; -KW2892 : 'KW' '2892'; -KW2893 : 'KW' '2893'; -KW2894 : 'KW' '2894'; -KW2895 : 'KW' '2895'; -KW2896 : 'KW' '2896'; -KW2897 : 'KW' '2897'; -KW2898 : 'KW' '2898'; -KW2899 : 'KW' '2899'; -KW2900 : 'KW' '2900'; -KW2901 : 'KW' '2901'; -KW2902 : 'KW' '2902'; -KW2903 : 'KW' '2903'; -KW2904 : 'KW' '2904'; -KW2905 : 'KW' '2905'; -KW2906 : 'KW' '2906'; -KW2907 : 'KW' '2907'; -KW2908 : 'KW' '2908'; -KW2909 : 'KW' '2909'; -KW2910 : 'KW' '2910'; -KW2911 : 'KW' '2911'; -KW2912 : 'KW' '2912'; -KW2913 : 'KW' '2913'; -KW2914 : 'KW' '2914'; -KW2915 : 'KW' '2915'; -KW2916 : 'KW' '2916'; -KW2917 : 'KW' '2917'; -KW2918 : 'KW' '2918'; -KW2919 : 'KW' '2919'; -KW2920 : 'KW' '2920'; -KW2921 : 'KW' '2921'; -KW2922 : 'KW' '2922'; -KW2923 : 'KW' '2923'; -KW2924 : 'KW' '2924'; -KW2925 : 'KW' '2925'; -KW2926 : 'KW' '2926'; -KW2927 : 'KW' '2927'; -KW2928 : 'KW' '2928'; -KW2929 : 'KW' '2929'; -KW2930 : 'KW' '2930'; -KW2931 : 'KW' '2931'; -KW2932 : 'KW' '2932'; -KW2933 : 'KW' '2933'; -KW2934 : 'KW' '2934'; -KW2935 : 'KW' '2935'; -KW2936 : 'KW' '2936'; -KW2937 : 'KW' '2937'; -KW2938 : 'KW' '2938'; -KW2939 : 'KW' '2939'; -KW2940 : 'KW' '2940'; -KW2941 : 'KW' '2941'; -KW2942 : 'KW' '2942'; -KW2943 : 'KW' '2943'; -KW2944 : 'KW' '2944'; -KW2945 : 'KW' '2945'; -KW2946 : 'KW' '2946'; -KW2947 : 'KW' '2947'; -KW2948 : 'KW' '2948'; -KW2949 : 'KW' '2949'; -KW2950 : 'KW' '2950'; -KW2951 : 'KW' '2951'; -KW2952 : 'KW' '2952'; -KW2953 : 'KW' '2953'; -KW2954 : 'KW' '2954'; -KW2955 : 'KW' '2955'; -KW2956 : 'KW' '2956'; -KW2957 : 'KW' '2957'; -KW2958 : 'KW' '2958'; -KW2959 : 'KW' '2959'; -KW2960 : 'KW' '2960'; -KW2961 : 'KW' '2961'; -KW2962 : 'KW' '2962'; -KW2963 : 'KW' '2963'; -KW2964 : 'KW' '2964'; -KW2965 : 'KW' '2965'; -KW2966 : 'KW' '2966'; -KW2967 : 'KW' '2967'; -KW2968 : 'KW' '2968'; -KW2969 : 'KW' '2969'; -KW2970 : 'KW' '2970'; -KW2971 : 'KW' '2971'; -KW2972 : 'KW' '2972'; -KW2973 : 'KW' '2973'; -KW2974 : 'KW' '2974'; -KW2975 : 'KW' '2975'; -KW2976 : 'KW' '2976'; -KW2977 : 'KW' '2977'; -KW2978 : 'KW' '2978'; -KW2979 : 'KW' '2979'; -KW2980 : 'KW' '2980'; -KW2981 : 'KW' '2981'; -KW2982 : 'KW' '2982'; -KW2983 : 'KW' '2983'; -KW2984 : 'KW' '2984'; -KW2985 : 'KW' '2985'; -KW2986 : 'KW' '2986'; -KW2987 : 'KW' '2987'; -KW2988 : 'KW' '2988'; -KW2989 : 'KW' '2989'; -KW2990 : 'KW' '2990'; -KW2991 : 'KW' '2991'; -KW2992 : 'KW' '2992'; -KW2993 : 'KW' '2993'; -KW2994 : 'KW' '2994'; -KW2995 : 'KW' '2995'; -KW2996 : 'KW' '2996'; -KW2997 : 'KW' '2997'; -KW2998 : 'KW' '2998'; -KW2999 : 'KW' '2999'; -KW3000 : 'KW' '3000'; -KW3001 : 'KW' '3001'; -KW3002 : 'KW' '3002'; -KW3003 : 'KW' '3003'; -KW3004 : 'KW' '3004'; -KW3005 : 'KW' '3005'; -KW3006 : 'KW' '3006'; -KW3007 : 'KW' '3007'; -KW3008 : 'KW' '3008'; -KW3009 : 'KW' '3009'; -KW3010 : 'KW' '3010'; -KW3011 : 'KW' '3011'; -KW3012 : 'KW' '3012'; -KW3013 : 'KW' '3013'; -KW3014 : 'KW' '3014'; -KW3015 : 'KW' '3015'; -KW3016 : 'KW' '3016'; -KW3017 : 'KW' '3017'; -KW3018 : 'KW' '3018'; -KW3019 : 'KW' '3019'; -KW3020 : 'KW' '3020'; -KW3021 : 'KW' '3021'; -KW3022 : 'KW' '3022'; -KW3023 : 'KW' '3023'; -KW3024 : 'KW' '3024'; -KW3025 : 'KW' '3025'; -KW3026 : 'KW' '3026'; -KW3027 : 'KW' '3027'; -KW3028 : 'KW' '3028'; -KW3029 : 'KW' '3029'; -KW3030 : 'KW' '3030'; -KW3031 : 'KW' '3031'; -KW3032 : 'KW' '3032'; -KW3033 : 'KW' '3033'; -KW3034 : 'KW' '3034'; -KW3035 : 'KW' '3035'; -KW3036 : 'KW' '3036'; -KW3037 : 'KW' '3037'; -KW3038 : 'KW' '3038'; -KW3039 : 'KW' '3039'; -KW3040 : 'KW' '3040'; -KW3041 : 'KW' '3041'; -KW3042 : 'KW' '3042'; -KW3043 : 'KW' '3043'; -KW3044 : 'KW' '3044'; -KW3045 : 'KW' '3045'; -KW3046 : 'KW' '3046'; -KW3047 : 'KW' '3047'; -KW3048 : 'KW' '3048'; -KW3049 : 'KW' '3049'; -KW3050 : 'KW' '3050'; -KW3051 : 'KW' '3051'; -KW3052 : 'KW' '3052'; -KW3053 : 'KW' '3053'; -KW3054 : 'KW' '3054'; -KW3055 : 'KW' '3055'; -KW3056 : 'KW' '3056'; -KW3057 : 'KW' '3057'; -KW3058 : 'KW' '3058'; -KW3059 : 'KW' '3059'; -KW3060 : 'KW' '3060'; -KW3061 : 'KW' '3061'; -KW3062 : 'KW' '3062'; -KW3063 : 'KW' '3063'; -KW3064 : 'KW' '3064'; -KW3065 : 'KW' '3065'; -KW3066 : 'KW' '3066'; -KW3067 : 'KW' '3067'; -KW3068 : 'KW' '3068'; -KW3069 : 'KW' '3069'; -KW3070 : 'KW' '3070'; -KW3071 : 'KW' '3071'; -KW3072 : 'KW' '3072'; -KW3073 : 'KW' '3073'; -KW3074 : 'KW' '3074'; -KW3075 : 'KW' '3075'; -KW3076 : 'KW' '3076'; -KW3077 : 'KW' '3077'; -KW3078 : 'KW' '3078'; -KW3079 : 'KW' '3079'; -KW3080 : 'KW' '3080'; -KW3081 : 'KW' '3081'; -KW3082 : 'KW' '3082'; -KW3083 : 'KW' '3083'; -KW3084 : 'KW' '3084'; -KW3085 : 'KW' '3085'; -KW3086 : 'KW' '3086'; -KW3087 : 'KW' '3087'; -KW3088 : 'KW' '3088'; -KW3089 : 'KW' '3089'; -KW3090 : 'KW' '3090'; -KW3091 : 'KW' '3091'; -KW3092 : 'KW' '3092'; -KW3093 : 'KW' '3093'; -KW3094 : 'KW' '3094'; -KW3095 : 'KW' '3095'; -KW3096 : 'KW' '3096'; -KW3097 : 'KW' '3097'; -KW3098 : 'KW' '3098'; -KW3099 : 'KW' '3099'; -KW3100 : 'KW' '3100'; -KW3101 : 'KW' '3101'; -KW3102 : 'KW' '3102'; -KW3103 : 'KW' '3103'; -KW3104 : 'KW' '3104'; -KW3105 : 'KW' '3105'; -KW3106 : 'KW' '3106'; -KW3107 : 'KW' '3107'; -KW3108 : 'KW' '3108'; -KW3109 : 'KW' '3109'; -KW3110 : 'KW' '3110'; -KW3111 : 'KW' '3111'; -KW3112 : 'KW' '3112'; -KW3113 : 'KW' '3113'; -KW3114 : 'KW' '3114'; -KW3115 : 'KW' '3115'; -KW3116 : 'KW' '3116'; -KW3117 : 'KW' '3117'; -KW3118 : 'KW' '3118'; -KW3119 : 'KW' '3119'; -KW3120 : 'KW' '3120'; -KW3121 : 'KW' '3121'; -KW3122 : 'KW' '3122'; -KW3123 : 'KW' '3123'; -KW3124 : 'KW' '3124'; -KW3125 : 'KW' '3125'; -KW3126 : 'KW' '3126'; -KW3127 : 'KW' '3127'; -KW3128 : 'KW' '3128'; -KW3129 : 'KW' '3129'; -KW3130 : 'KW' '3130'; -KW3131 : 'KW' '3131'; -KW3132 : 'KW' '3132'; -KW3133 : 'KW' '3133'; -KW3134 : 'KW' '3134'; -KW3135 : 'KW' '3135'; -KW3136 : 'KW' '3136'; -KW3137 : 'KW' '3137'; -KW3138 : 'KW' '3138'; -KW3139 : 'KW' '3139'; -KW3140 : 'KW' '3140'; -KW3141 : 'KW' '3141'; -KW3142 : 'KW' '3142'; -KW3143 : 'KW' '3143'; -KW3144 : 'KW' '3144'; -KW3145 : 'KW' '3145'; -KW3146 : 'KW' '3146'; -KW3147 : 'KW' '3147'; -KW3148 : 'KW' '3148'; -KW3149 : 'KW' '3149'; -KW3150 : 'KW' '3150'; -KW3151 : 'KW' '3151'; -KW3152 : 'KW' '3152'; -KW3153 : 'KW' '3153'; -KW3154 : 'KW' '3154'; -KW3155 : 'KW' '3155'; -KW3156 : 'KW' '3156'; -KW3157 : 'KW' '3157'; -KW3158 : 'KW' '3158'; -KW3159 : 'KW' '3159'; -KW3160 : 'KW' '3160'; -KW3161 : 'KW' '3161'; -KW3162 : 'KW' '3162'; -KW3163 : 'KW' '3163'; -KW3164 : 'KW' '3164'; -KW3165 : 'KW' '3165'; -KW3166 : 'KW' '3166'; -KW3167 : 'KW' '3167'; -KW3168 : 'KW' '3168'; -KW3169 : 'KW' '3169'; -KW3170 : 'KW' '3170'; -KW3171 : 'KW' '3171'; -KW3172 : 'KW' '3172'; -KW3173 : 'KW' '3173'; -KW3174 : 'KW' '3174'; -KW3175 : 'KW' '3175'; -KW3176 : 'KW' '3176'; -KW3177 : 'KW' '3177'; -KW3178 : 'KW' '3178'; -KW3179 : 'KW' '3179'; -KW3180 : 'KW' '3180'; -KW3181 : 'KW' '3181'; -KW3182 : 'KW' '3182'; -KW3183 : 'KW' '3183'; -KW3184 : 'KW' '3184'; -KW3185 : 'KW' '3185'; -KW3186 : 'KW' '3186'; -KW3187 : 'KW' '3187'; -KW3188 : 'KW' '3188'; -KW3189 : 'KW' '3189'; -KW3190 : 'KW' '3190'; -KW3191 : 'KW' '3191'; -KW3192 : 'KW' '3192'; -KW3193 : 'KW' '3193'; -KW3194 : 'KW' '3194'; -KW3195 : 'KW' '3195'; -KW3196 : 'KW' '3196'; -KW3197 : 'KW' '3197'; -KW3198 : 'KW' '3198'; -KW3199 : 'KW' '3199'; -KW3200 : 'KW' '3200'; -KW3201 : 'KW' '3201'; -KW3202 : 'KW' '3202'; -KW3203 : 'KW' '3203'; -KW3204 : 'KW' '3204'; -KW3205 : 'KW' '3205'; -KW3206 : 'KW' '3206'; -KW3207 : 'KW' '3207'; -KW3208 : 'KW' '3208'; -KW3209 : 'KW' '3209'; -KW3210 : 'KW' '3210'; -KW3211 : 'KW' '3211'; -KW3212 : 'KW' '3212'; -KW3213 : 'KW' '3213'; -KW3214 : 'KW' '3214'; -KW3215 : 'KW' '3215'; -KW3216 : 'KW' '3216'; -KW3217 : 'KW' '3217'; -KW3218 : 'KW' '3218'; -KW3219 : 'KW' '3219'; -KW3220 : 'KW' '3220'; -KW3221 : 'KW' '3221'; -KW3222 : 'KW' '3222'; -KW3223 : 'KW' '3223'; -KW3224 : 'KW' '3224'; -KW3225 : 'KW' '3225'; -KW3226 : 'KW' '3226'; -KW3227 : 'KW' '3227'; -KW3228 : 'KW' '3228'; -KW3229 : 'KW' '3229'; -KW3230 : 'KW' '3230'; -KW3231 : 'KW' '3231'; -KW3232 : 'KW' '3232'; -KW3233 : 'KW' '3233'; -KW3234 : 'KW' '3234'; -KW3235 : 'KW' '3235'; -KW3236 : 'KW' '3236'; -KW3237 : 'KW' '3237'; -KW3238 : 'KW' '3238'; -KW3239 : 'KW' '3239'; -KW3240 : 'KW' '3240'; -KW3241 : 'KW' '3241'; -KW3242 : 'KW' '3242'; -KW3243 : 'KW' '3243'; -KW3244 : 'KW' '3244'; -KW3245 : 'KW' '3245'; -KW3246 : 'KW' '3246'; -KW3247 : 'KW' '3247'; -KW3248 : 'KW' '3248'; -KW3249 : 'KW' '3249'; -KW3250 : 'KW' '3250'; -KW3251 : 'KW' '3251'; -KW3252 : 'KW' '3252'; -KW3253 : 'KW' '3253'; -KW3254 : 'KW' '3254'; -KW3255 : 'KW' '3255'; -KW3256 : 'KW' '3256'; -KW3257 : 'KW' '3257'; -KW3258 : 'KW' '3258'; -KW3259 : 'KW' '3259'; -KW3260 : 'KW' '3260'; -KW3261 : 'KW' '3261'; -KW3262 : 'KW' '3262'; -KW3263 : 'KW' '3263'; -KW3264 : 'KW' '3264'; -KW3265 : 'KW' '3265'; -KW3266 : 'KW' '3266'; -KW3267 : 'KW' '3267'; -KW3268 : 'KW' '3268'; -KW3269 : 'KW' '3269'; -KW3270 : 'KW' '3270'; -KW3271 : 'KW' '3271'; -KW3272 : 'KW' '3272'; -KW3273 : 'KW' '3273'; -KW3274 : 'KW' '3274'; -KW3275 : 'KW' '3275'; -KW3276 : 'KW' '3276'; -KW3277 : 'KW' '3277'; -KW3278 : 'KW' '3278'; -KW3279 : 'KW' '3279'; -KW3280 : 'KW' '3280'; -KW3281 : 'KW' '3281'; -KW3282 : 'KW' '3282'; -KW3283 : 'KW' '3283'; -KW3284 : 'KW' '3284'; -KW3285 : 'KW' '3285'; -KW3286 : 'KW' '3286'; -KW3287 : 'KW' '3287'; -KW3288 : 'KW' '3288'; -KW3289 : 'KW' '3289'; -KW3290 : 'KW' '3290'; -KW3291 : 'KW' '3291'; -KW3292 : 'KW' '3292'; -KW3293 : 'KW' '3293'; -KW3294 : 'KW' '3294'; -KW3295 : 'KW' '3295'; -KW3296 : 'KW' '3296'; -KW3297 : 'KW' '3297'; -KW3298 : 'KW' '3298'; -KW3299 : 'KW' '3299'; -KW3300 : 'KW' '3300'; -KW3301 : 'KW' '3301'; -KW3302 : 'KW' '3302'; -KW3303 : 'KW' '3303'; -KW3304 : 'KW' '3304'; -KW3305 : 'KW' '3305'; -KW3306 : 'KW' '3306'; -KW3307 : 'KW' '3307'; -KW3308 : 'KW' '3308'; -KW3309 : 'KW' '3309'; -KW3310 : 'KW' '3310'; -KW3311 : 'KW' '3311'; -KW3312 : 'KW' '3312'; -KW3313 : 'KW' '3313'; -KW3314 : 'KW' '3314'; -KW3315 : 'KW' '3315'; -KW3316 : 'KW' '3316'; -KW3317 : 'KW' '3317'; -KW3318 : 'KW' '3318'; -KW3319 : 'KW' '3319'; -KW3320 : 'KW' '3320'; -KW3321 : 'KW' '3321'; -KW3322 : 'KW' '3322'; -KW3323 : 'KW' '3323'; -KW3324 : 'KW' '3324'; -KW3325 : 'KW' '3325'; -KW3326 : 'KW' '3326'; -KW3327 : 'KW' '3327'; -KW3328 : 'KW' '3328'; -KW3329 : 'KW' '3329'; -KW3330 : 'KW' '3330'; -KW3331 : 'KW' '3331'; -KW3332 : 'KW' '3332'; -KW3333 : 'KW' '3333'; -KW3334 : 'KW' '3334'; -KW3335 : 'KW' '3335'; -KW3336 : 'KW' '3336'; -KW3337 : 'KW' '3337'; -KW3338 : 'KW' '3338'; -KW3339 : 'KW' '3339'; -KW3340 : 'KW' '3340'; -KW3341 : 'KW' '3341'; -KW3342 : 'KW' '3342'; -KW3343 : 'KW' '3343'; -KW3344 : 'KW' '3344'; -KW3345 : 'KW' '3345'; -KW3346 : 'KW' '3346'; -KW3347 : 'KW' '3347'; -KW3348 : 'KW' '3348'; -KW3349 : 'KW' '3349'; -KW3350 : 'KW' '3350'; -KW3351 : 'KW' '3351'; -KW3352 : 'KW' '3352'; -KW3353 : 'KW' '3353'; -KW3354 : 'KW' '3354'; -KW3355 : 'KW' '3355'; -KW3356 : 'KW' '3356'; -KW3357 : 'KW' '3357'; -KW3358 : 'KW' '3358'; -KW3359 : 'KW' '3359'; -KW3360 : 'KW' '3360'; -KW3361 : 'KW' '3361'; -KW3362 : 'KW' '3362'; -KW3363 : 'KW' '3363'; -KW3364 : 'KW' '3364'; -KW3365 : 'KW' '3365'; -KW3366 : 'KW' '3366'; -KW3367 : 'KW' '3367'; -KW3368 : 'KW' '3368'; -KW3369 : 'KW' '3369'; -KW3370 : 'KW' '3370'; -KW3371 : 'KW' '3371'; -KW3372 : 'KW' '3372'; -KW3373 : 'KW' '3373'; -KW3374 : 'KW' '3374'; -KW3375 : 'KW' '3375'; -KW3376 : 'KW' '3376'; -KW3377 : 'KW' '3377'; -KW3378 : 'KW' '3378'; -KW3379 : 'KW' '3379'; -KW3380 : 'KW' '3380'; -KW3381 : 'KW' '3381'; -KW3382 : 'KW' '3382'; -KW3383 : 'KW' '3383'; -KW3384 : 'KW' '3384'; -KW3385 : 'KW' '3385'; -KW3386 : 'KW' '3386'; -KW3387 : 'KW' '3387'; -KW3388 : 'KW' '3388'; -KW3389 : 'KW' '3389'; -KW3390 : 'KW' '3390'; -KW3391 : 'KW' '3391'; -KW3392 : 'KW' '3392'; -KW3393 : 'KW' '3393'; -KW3394 : 'KW' '3394'; -KW3395 : 'KW' '3395'; -KW3396 : 'KW' '3396'; -KW3397 : 'KW' '3397'; -KW3398 : 'KW' '3398'; -KW3399 : 'KW' '3399'; -KW3400 : 'KW' '3400'; -KW3401 : 'KW' '3401'; -KW3402 : 'KW' '3402'; -KW3403 : 'KW' '3403'; -KW3404 : 'KW' '3404'; -KW3405 : 'KW' '3405'; -KW3406 : 'KW' '3406'; -KW3407 : 'KW' '3407'; -KW3408 : 'KW' '3408'; -KW3409 : 'KW' '3409'; -KW3410 : 'KW' '3410'; -KW3411 : 'KW' '3411'; -KW3412 : 'KW' '3412'; -KW3413 : 'KW' '3413'; -KW3414 : 'KW' '3414'; -KW3415 : 'KW' '3415'; -KW3416 : 'KW' '3416'; -KW3417 : 'KW' '3417'; -KW3418 : 'KW' '3418'; -KW3419 : 'KW' '3419'; -KW3420 : 'KW' '3420'; -KW3421 : 'KW' '3421'; -KW3422 : 'KW' '3422'; -KW3423 : 'KW' '3423'; -KW3424 : 'KW' '3424'; -KW3425 : 'KW' '3425'; -KW3426 : 'KW' '3426'; -KW3427 : 'KW' '3427'; -KW3428 : 'KW' '3428'; -KW3429 : 'KW' '3429'; -KW3430 : 'KW' '3430'; -KW3431 : 'KW' '3431'; -KW3432 : 'KW' '3432'; -KW3433 : 'KW' '3433'; -KW3434 : 'KW' '3434'; -KW3435 : 'KW' '3435'; -KW3436 : 'KW' '3436'; -KW3437 : 'KW' '3437'; -KW3438 : 'KW' '3438'; -KW3439 : 'KW' '3439'; -KW3440 : 'KW' '3440'; -KW3441 : 'KW' '3441'; -KW3442 : 'KW' '3442'; -KW3443 : 'KW' '3443'; -KW3444 : 'KW' '3444'; -KW3445 : 'KW' '3445'; -KW3446 : 'KW' '3446'; -KW3447 : 'KW' '3447'; -KW3448 : 'KW' '3448'; -KW3449 : 'KW' '3449'; -KW3450 : 'KW' '3450'; -KW3451 : 'KW' '3451'; -KW3452 : 'KW' '3452'; -KW3453 : 'KW' '3453'; -KW3454 : 'KW' '3454'; -KW3455 : 'KW' '3455'; -KW3456 : 'KW' '3456'; -KW3457 : 'KW' '3457'; -KW3458 : 'KW' '3458'; -KW3459 : 'KW' '3459'; -KW3460 : 'KW' '3460'; -KW3461 : 'KW' '3461'; -KW3462 : 'KW' '3462'; -KW3463 : 'KW' '3463'; -KW3464 : 'KW' '3464'; -KW3465 : 'KW' '3465'; -KW3466 : 'KW' '3466'; -KW3467 : 'KW' '3467'; -KW3468 : 'KW' '3468'; -KW3469 : 'KW' '3469'; -KW3470 : 'KW' '3470'; -KW3471 : 'KW' '3471'; -KW3472 : 'KW' '3472'; -KW3473 : 'KW' '3473'; -KW3474 : 'KW' '3474'; -KW3475 : 'KW' '3475'; -KW3476 : 'KW' '3476'; -KW3477 : 'KW' '3477'; -KW3478 : 'KW' '3478'; -KW3479 : 'KW' '3479'; -KW3480 : 'KW' '3480'; -KW3481 : 'KW' '3481'; -KW3482 : 'KW' '3482'; -KW3483 : 'KW' '3483'; -KW3484 : 'KW' '3484'; -KW3485 : 'KW' '3485'; -KW3486 : 'KW' '3486'; -KW3487 : 'KW' '3487'; -KW3488 : 'KW' '3488'; -KW3489 : 'KW' '3489'; -KW3490 : 'KW' '3490'; -KW3491 : 'KW' '3491'; -KW3492 : 'KW' '3492'; -KW3493 : 'KW' '3493'; -KW3494 : 'KW' '3494'; -KW3495 : 'KW' '3495'; -KW3496 : 'KW' '3496'; -KW3497 : 'KW' '3497'; -KW3498 : 'KW' '3498'; -KW3499 : 'KW' '3499'; -KW3500 : 'KW' '3500'; -KW3501 : 'KW' '3501'; -KW3502 : 'KW' '3502'; -KW3503 : 'KW' '3503'; -KW3504 : 'KW' '3504'; -KW3505 : 'KW' '3505'; -KW3506 : 'KW' '3506'; -KW3507 : 'KW' '3507'; -KW3508 : 'KW' '3508'; -KW3509 : 'KW' '3509'; -KW3510 : 'KW' '3510'; -KW3511 : 'KW' '3511'; -KW3512 : 'KW' '3512'; -KW3513 : 'KW' '3513'; -KW3514 : 'KW' '3514'; -KW3515 : 'KW' '3515'; -KW3516 : 'KW' '3516'; -KW3517 : 'KW' '3517'; -KW3518 : 'KW' '3518'; -KW3519 : 'KW' '3519'; -KW3520 : 'KW' '3520'; -KW3521 : 'KW' '3521'; -KW3522 : 'KW' '3522'; -KW3523 : 'KW' '3523'; -KW3524 : 'KW' '3524'; -KW3525 : 'KW' '3525'; -KW3526 : 'KW' '3526'; -KW3527 : 'KW' '3527'; -KW3528 : 'KW' '3528'; -KW3529 : 'KW' '3529'; -KW3530 : 'KW' '3530'; -KW3531 : 'KW' '3531'; -KW3532 : 'KW' '3532'; -KW3533 : 'KW' '3533'; -KW3534 : 'KW' '3534'; -KW3535 : 'KW' '3535'; -KW3536 : 'KW' '3536'; -KW3537 : 'KW' '3537'; -KW3538 : 'KW' '3538'; -KW3539 : 'KW' '3539'; -KW3540 : 'KW' '3540'; -KW3541 : 'KW' '3541'; -KW3542 : 'KW' '3542'; -KW3543 : 'KW' '3543'; -KW3544 : 'KW' '3544'; -KW3545 : 'KW' '3545'; -KW3546 : 'KW' '3546'; -KW3547 : 'KW' '3547'; -KW3548 : 'KW' '3548'; -KW3549 : 'KW' '3549'; -KW3550 : 'KW' '3550'; -KW3551 : 'KW' '3551'; -KW3552 : 'KW' '3552'; -KW3553 : 'KW' '3553'; -KW3554 : 'KW' '3554'; -KW3555 : 'KW' '3555'; -KW3556 : 'KW' '3556'; -KW3557 : 'KW' '3557'; -KW3558 : 'KW' '3558'; -KW3559 : 'KW' '3559'; -KW3560 : 'KW' '3560'; -KW3561 : 'KW' '3561'; -KW3562 : 'KW' '3562'; -KW3563 : 'KW' '3563'; -KW3564 : 'KW' '3564'; -KW3565 : 'KW' '3565'; -KW3566 : 'KW' '3566'; -KW3567 : 'KW' '3567'; -KW3568 : 'KW' '3568'; -KW3569 : 'KW' '3569'; -KW3570 : 'KW' '3570'; -KW3571 : 'KW' '3571'; -KW3572 : 'KW' '3572'; -KW3573 : 'KW' '3573'; -KW3574 : 'KW' '3574'; -KW3575 : 'KW' '3575'; -KW3576 : 'KW' '3576'; -KW3577 : 'KW' '3577'; -KW3578 : 'KW' '3578'; -KW3579 : 'KW' '3579'; -KW3580 : 'KW' '3580'; -KW3581 : 'KW' '3581'; -KW3582 : 'KW' '3582'; -KW3583 : 'KW' '3583'; -KW3584 : 'KW' '3584'; -KW3585 : 'KW' '3585'; -KW3586 : 'KW' '3586'; -KW3587 : 'KW' '3587'; -KW3588 : 'KW' '3588'; -KW3589 : 'KW' '3589'; -KW3590 : 'KW' '3590'; -KW3591 : 'KW' '3591'; -KW3592 : 'KW' '3592'; -KW3593 : 'KW' '3593'; -KW3594 : 'KW' '3594'; -KW3595 : 'KW' '3595'; -KW3596 : 'KW' '3596'; -KW3597 : 'KW' '3597'; -KW3598 : 'KW' '3598'; -KW3599 : 'KW' '3599'; -KW3600 : 'KW' '3600'; -KW3601 : 'KW' '3601'; -KW3602 : 'KW' '3602'; -KW3603 : 'KW' '3603'; -KW3604 : 'KW' '3604'; -KW3605 : 'KW' '3605'; -KW3606 : 'KW' '3606'; -KW3607 : 'KW' '3607'; -KW3608 : 'KW' '3608'; -KW3609 : 'KW' '3609'; -KW3610 : 'KW' '3610'; -KW3611 : 'KW' '3611'; -KW3612 : 'KW' '3612'; -KW3613 : 'KW' '3613'; -KW3614 : 'KW' '3614'; -KW3615 : 'KW' '3615'; -KW3616 : 'KW' '3616'; -KW3617 : 'KW' '3617'; -KW3618 : 'KW' '3618'; -KW3619 : 'KW' '3619'; -KW3620 : 'KW' '3620'; -KW3621 : 'KW' '3621'; -KW3622 : 'KW' '3622'; -KW3623 : 'KW' '3623'; -KW3624 : 'KW' '3624'; -KW3625 : 'KW' '3625'; -KW3626 : 'KW' '3626'; -KW3627 : 'KW' '3627'; -KW3628 : 'KW' '3628'; -KW3629 : 'KW' '3629'; -KW3630 : 'KW' '3630'; -KW3631 : 'KW' '3631'; -KW3632 : 'KW' '3632'; -KW3633 : 'KW' '3633'; -KW3634 : 'KW' '3634'; -KW3635 : 'KW' '3635'; -KW3636 : 'KW' '3636'; -KW3637 : 'KW' '3637'; -KW3638 : 'KW' '3638'; -KW3639 : 'KW' '3639'; -KW3640 : 'KW' '3640'; -KW3641 : 'KW' '3641'; -KW3642 : 'KW' '3642'; -KW3643 : 'KW' '3643'; -KW3644 : 'KW' '3644'; -KW3645 : 'KW' '3645'; -KW3646 : 'KW' '3646'; -KW3647 : 'KW' '3647'; -KW3648 : 'KW' '3648'; -KW3649 : 'KW' '3649'; -KW3650 : 'KW' '3650'; -KW3651 : 'KW' '3651'; -KW3652 : 'KW' '3652'; -KW3653 : 'KW' '3653'; -KW3654 : 'KW' '3654'; -KW3655 : 'KW' '3655'; -KW3656 : 'KW' '3656'; -KW3657 : 'KW' '3657'; -KW3658 : 'KW' '3658'; -KW3659 : 'KW' '3659'; -KW3660 : 'KW' '3660'; -KW3661 : 'KW' '3661'; -KW3662 : 'KW' '3662'; -KW3663 : 'KW' '3663'; -KW3664 : 'KW' '3664'; -KW3665 : 'KW' '3665'; -KW3666 : 'KW' '3666'; -KW3667 : 'KW' '3667'; -KW3668 : 'KW' '3668'; -KW3669 : 'KW' '3669'; -KW3670 : 'KW' '3670'; -KW3671 : 'KW' '3671'; -KW3672 : 'KW' '3672'; -KW3673 : 'KW' '3673'; -KW3674 : 'KW' '3674'; -KW3675 : 'KW' '3675'; -KW3676 : 'KW' '3676'; -KW3677 : 'KW' '3677'; -KW3678 : 'KW' '3678'; -KW3679 : 'KW' '3679'; -KW3680 : 'KW' '3680'; -KW3681 : 'KW' '3681'; -KW3682 : 'KW' '3682'; -KW3683 : 'KW' '3683'; -KW3684 : 'KW' '3684'; -KW3685 : 'KW' '3685'; -KW3686 : 'KW' '3686'; -KW3687 : 'KW' '3687'; -KW3688 : 'KW' '3688'; -KW3689 : 'KW' '3689'; -KW3690 : 'KW' '3690'; -KW3691 : 'KW' '3691'; -KW3692 : 'KW' '3692'; -KW3693 : 'KW' '3693'; -KW3694 : 'KW' '3694'; -KW3695 : 'KW' '3695'; -KW3696 : 'KW' '3696'; -KW3697 : 'KW' '3697'; -KW3698 : 'KW' '3698'; -KW3699 : 'KW' '3699'; -KW3700 : 'KW' '3700'; -KW3701 : 'KW' '3701'; -KW3702 : 'KW' '3702'; -KW3703 : 'KW' '3703'; -KW3704 : 'KW' '3704'; -KW3705 : 'KW' '3705'; -KW3706 : 'KW' '3706'; -KW3707 : 'KW' '3707'; -KW3708 : 'KW' '3708'; -KW3709 : 'KW' '3709'; -KW3710 : 'KW' '3710'; -KW3711 : 'KW' '3711'; -KW3712 : 'KW' '3712'; -KW3713 : 'KW' '3713'; -KW3714 : 'KW' '3714'; -KW3715 : 'KW' '3715'; -KW3716 : 'KW' '3716'; -KW3717 : 'KW' '3717'; -KW3718 : 'KW' '3718'; -KW3719 : 'KW' '3719'; -KW3720 : 'KW' '3720'; -KW3721 : 'KW' '3721'; -KW3722 : 'KW' '3722'; -KW3723 : 'KW' '3723'; -KW3724 : 'KW' '3724'; -KW3725 : 'KW' '3725'; -KW3726 : 'KW' '3726'; -KW3727 : 'KW' '3727'; -KW3728 : 'KW' '3728'; -KW3729 : 'KW' '3729'; -KW3730 : 'KW' '3730'; -KW3731 : 'KW' '3731'; -KW3732 : 'KW' '3732'; -KW3733 : 'KW' '3733'; -KW3734 : 'KW' '3734'; -KW3735 : 'KW' '3735'; -KW3736 : 'KW' '3736'; -KW3737 : 'KW' '3737'; -KW3738 : 'KW' '3738'; -KW3739 : 'KW' '3739'; -KW3740 : 'KW' '3740'; -KW3741 : 'KW' '3741'; -KW3742 : 'KW' '3742'; -KW3743 : 'KW' '3743'; -KW3744 : 'KW' '3744'; -KW3745 : 'KW' '3745'; -KW3746 : 'KW' '3746'; -KW3747 : 'KW' '3747'; -KW3748 : 'KW' '3748'; -KW3749 : 'KW' '3749'; -KW3750 : 'KW' '3750'; -KW3751 : 'KW' '3751'; -KW3752 : 'KW' '3752'; -KW3753 : 'KW' '3753'; -KW3754 : 'KW' '3754'; -KW3755 : 'KW' '3755'; -KW3756 : 'KW' '3756'; -KW3757 : 'KW' '3757'; -KW3758 : 'KW' '3758'; -KW3759 : 'KW' '3759'; -KW3760 : 'KW' '3760'; -KW3761 : 'KW' '3761'; -KW3762 : 'KW' '3762'; -KW3763 : 'KW' '3763'; -KW3764 : 'KW' '3764'; -KW3765 : 'KW' '3765'; -KW3766 : 'KW' '3766'; -KW3767 : 'KW' '3767'; -KW3768 : 'KW' '3768'; -KW3769 : 'KW' '3769'; -KW3770 : 'KW' '3770'; -KW3771 : 'KW' '3771'; -KW3772 : 'KW' '3772'; -KW3773 : 'KW' '3773'; -KW3774 : 'KW' '3774'; -KW3775 : 'KW' '3775'; -KW3776 : 'KW' '3776'; -KW3777 : 'KW' '3777'; -KW3778 : 'KW' '3778'; -KW3779 : 'KW' '3779'; -KW3780 : 'KW' '3780'; -KW3781 : 'KW' '3781'; -KW3782 : 'KW' '3782'; -KW3783 : 'KW' '3783'; -KW3784 : 'KW' '3784'; -KW3785 : 'KW' '3785'; -KW3786 : 'KW' '3786'; -KW3787 : 'KW' '3787'; -KW3788 : 'KW' '3788'; -KW3789 : 'KW' '3789'; -KW3790 : 'KW' '3790'; -KW3791 : 'KW' '3791'; -KW3792 : 'KW' '3792'; -KW3793 : 'KW' '3793'; -KW3794 : 'KW' '3794'; -KW3795 : 'KW' '3795'; -KW3796 : 'KW' '3796'; -KW3797 : 'KW' '3797'; -KW3798 : 'KW' '3798'; -KW3799 : 'KW' '3799'; -KW3800 : 'KW' '3800'; -KW3801 : 'KW' '3801'; -KW3802 : 'KW' '3802'; -KW3803 : 'KW' '3803'; -KW3804 : 'KW' '3804'; -KW3805 : 'KW' '3805'; -KW3806 : 'KW' '3806'; -KW3807 : 'KW' '3807'; -KW3808 : 'KW' '3808'; -KW3809 : 'KW' '3809'; -KW3810 : 'KW' '3810'; -KW3811 : 'KW' '3811'; -KW3812 : 'KW' '3812'; -KW3813 : 'KW' '3813'; -KW3814 : 'KW' '3814'; -KW3815 : 'KW' '3815'; -KW3816 : 'KW' '3816'; -KW3817 : 'KW' '3817'; -KW3818 : 'KW' '3818'; -KW3819 : 'KW' '3819'; -KW3820 : 'KW' '3820'; -KW3821 : 'KW' '3821'; -KW3822 : 'KW' '3822'; -KW3823 : 'KW' '3823'; -KW3824 : 'KW' '3824'; -KW3825 : 'KW' '3825'; -KW3826 : 'KW' '3826'; -KW3827 : 'KW' '3827'; -KW3828 : 'KW' '3828'; -KW3829 : 'KW' '3829'; -KW3830 : 'KW' '3830'; -KW3831 : 'KW' '3831'; -KW3832 : 'KW' '3832'; -KW3833 : 'KW' '3833'; -KW3834 : 'KW' '3834'; -KW3835 : 'KW' '3835'; -KW3836 : 'KW' '3836'; -KW3837 : 'KW' '3837'; -KW3838 : 'KW' '3838'; -KW3839 : 'KW' '3839'; -KW3840 : 'KW' '3840'; -KW3841 : 'KW' '3841'; -KW3842 : 'KW' '3842'; -KW3843 : 'KW' '3843'; -KW3844 : 'KW' '3844'; -KW3845 : 'KW' '3845'; -KW3846 : 'KW' '3846'; -KW3847 : 'KW' '3847'; -KW3848 : 'KW' '3848'; -KW3849 : 'KW' '3849'; -KW3850 : 'KW' '3850'; -KW3851 : 'KW' '3851'; -KW3852 : 'KW' '3852'; -KW3853 : 'KW' '3853'; -KW3854 : 'KW' '3854'; -KW3855 : 'KW' '3855'; -KW3856 : 'KW' '3856'; -KW3857 : 'KW' '3857'; -KW3858 : 'KW' '3858'; -KW3859 : 'KW' '3859'; -KW3860 : 'KW' '3860'; -KW3861 : 'KW' '3861'; -KW3862 : 'KW' '3862'; -KW3863 : 'KW' '3863'; -KW3864 : 'KW' '3864'; -KW3865 : 'KW' '3865'; -KW3866 : 'KW' '3866'; -KW3867 : 'KW' '3867'; -KW3868 : 'KW' '3868'; -KW3869 : 'KW' '3869'; -KW3870 : 'KW' '3870'; -KW3871 : 'KW' '3871'; -KW3872 : 'KW' '3872'; -KW3873 : 'KW' '3873'; -KW3874 : 'KW' '3874'; -KW3875 : 'KW' '3875'; -KW3876 : 'KW' '3876'; -KW3877 : 'KW' '3877'; -KW3878 : 'KW' '3878'; -KW3879 : 'KW' '3879'; -KW3880 : 'KW' '3880'; -KW3881 : 'KW' '3881'; -KW3882 : 'KW' '3882'; -KW3883 : 'KW' '3883'; -KW3884 : 'KW' '3884'; -KW3885 : 'KW' '3885'; -KW3886 : 'KW' '3886'; -KW3887 : 'KW' '3887'; -KW3888 : 'KW' '3888'; -KW3889 : 'KW' '3889'; -KW3890 : 'KW' '3890'; -KW3891 : 'KW' '3891'; -KW3892 : 'KW' '3892'; -KW3893 : 'KW' '3893'; -KW3894 : 'KW' '3894'; -KW3895 : 'KW' '3895'; -KW3896 : 'KW' '3896'; -KW3897 : 'KW' '3897'; -KW3898 : 'KW' '3898'; -KW3899 : 'KW' '3899'; -KW3900 : 'KW' '3900'; -KW3901 : 'KW' '3901'; -KW3902 : 'KW' '3902'; -KW3903 : 'KW' '3903'; -KW3904 : 'KW' '3904'; -KW3905 : 'KW' '3905'; -KW3906 : 'KW' '3906'; -KW3907 : 'KW' '3907'; -KW3908 : 'KW' '3908'; -KW3909 : 'KW' '3909'; -KW3910 : 'KW' '3910'; -KW3911 : 'KW' '3911'; -KW3912 : 'KW' '3912'; -KW3913 : 'KW' '3913'; -KW3914 : 'KW' '3914'; -KW3915 : 'KW' '3915'; -KW3916 : 'KW' '3916'; -KW3917 : 'KW' '3917'; -KW3918 : 'KW' '3918'; -KW3919 : 'KW' '3919'; -KW3920 : 'KW' '3920'; -KW3921 : 'KW' '3921'; -KW3922 : 'KW' '3922'; -KW3923 : 'KW' '3923'; -KW3924 : 'KW' '3924'; -KW3925 : 'KW' '3925'; -KW3926 : 'KW' '3926'; -KW3927 : 'KW' '3927'; -KW3928 : 'KW' '3928'; -KW3929 : 'KW' '3929'; -KW3930 : 'KW' '3930'; -KW3931 : 'KW' '3931'; -KW3932 : 'KW' '3932'; -KW3933 : 'KW' '3933'; -KW3934 : 'KW' '3934'; -KW3935 : 'KW' '3935'; -KW3936 : 'KW' '3936'; -KW3937 : 'KW' '3937'; -KW3938 : 'KW' '3938'; -KW3939 : 'KW' '3939'; -KW3940 : 'KW' '3940'; -KW3941 : 'KW' '3941'; -KW3942 : 'KW' '3942'; -KW3943 : 'KW' '3943'; -KW3944 : 'KW' '3944'; -KW3945 : 'KW' '3945'; -KW3946 : 'KW' '3946'; -KW3947 : 'KW' '3947'; -KW3948 : 'KW' '3948'; -KW3949 : 'KW' '3949'; -KW3950 : 'KW' '3950'; -KW3951 : 'KW' '3951'; -KW3952 : 'KW' '3952'; -KW3953 : 'KW' '3953'; -KW3954 : 'KW' '3954'; -KW3955 : 'KW' '3955'; -KW3956 : 'KW' '3956'; -KW3957 : 'KW' '3957'; -KW3958 : 'KW' '3958'; -KW3959 : 'KW' '3959'; -KW3960 : 'KW' '3960'; -KW3961 : 'KW' '3961'; -KW3962 : 'KW' '3962'; -KW3963 : 'KW' '3963'; -KW3964 : 'KW' '3964'; -KW3965 : 'KW' '3965'; -KW3966 : 'KW' '3966'; -KW3967 : 'KW' '3967'; -KW3968 : 'KW' '3968'; -KW3969 : 'KW' '3969'; -KW3970 : 'KW' '3970'; -KW3971 : 'KW' '3971'; -KW3972 : 'KW' '3972'; -KW3973 : 'KW' '3973'; -KW3974 : 'KW' '3974'; -KW3975 : 'KW' '3975'; -KW3976 : 'KW' '3976'; -KW3977 : 'KW' '3977'; -KW3978 : 'KW' '3978'; -KW3979 : 'KW' '3979'; -KW3980 : 'KW' '3980'; -KW3981 : 'KW' '3981'; -KW3982 : 'KW' '3982'; -KW3983 : 'KW' '3983'; -KW3984 : 'KW' '3984'; -KW3985 : 'KW' '3985'; -KW3986 : 'KW' '3986'; -KW3987 : 'KW' '3987'; -KW3988 : 'KW' '3988'; -KW3989 : 'KW' '3989'; -KW3990 : 'KW' '3990'; -KW3991 : 'KW' '3991'; -KW3992 : 'KW' '3992'; -KW3993 : 'KW' '3993'; -KW3994 : 'KW' '3994'; -KW3995 : 'KW' '3995'; -KW3996 : 'KW' '3996'; -KW3997 : 'KW' '3997'; -KW3998 : 'KW' '3998'; -KW3999 : 'KW' '3999'; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/NonGreedyClosure.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/NonGreedyClosure.st deleted file mode 100644 index 52044892e..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/NonGreedyClosure.st +++ /dev/null @@ -1,3 +0,0 @@ -lexer grammar ; -CMT : '//' .*? '\n' CMT*?; -WS : (' '|'\t')+; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/NonGreedyConfigs.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/NonGreedyConfigs.st deleted file mode 100644 index 4b37c0453..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/NonGreedyConfigs.st +++ /dev/null @@ -1,4 +0,0 @@ -lexer grammar ; -I : .*? ('a' | 'ab') {} ; -WS : (' '|'\n') -> skip ; -J : . {}; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/NonGreedyOptional.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/NonGreedyOptional.st deleted file mode 100644 index 1e7ecad97..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/NonGreedyOptional.st +++ /dev/null @@ -1,3 +0,0 @@ -lexer grammar ; -CMT : '//' .*? '\n' CMT??; -WS : (' '|'\t')+; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/NonGreedyPositiveClosure.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/NonGreedyPositiveClosure.st deleted file mode 100644 index a0de047f8..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/NonGreedyPositiveClosure.st +++ /dev/null @@ -1,3 +0,0 @@ -lexer grammar ; -CMT : ('//' .*? '\n')+?; -WS : (' '|'\t')+; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/NonGreedyTermination1.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/NonGreedyTermination1.st deleted file mode 100644 index 25bcd9b13..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/NonGreedyTermination1.st +++ /dev/null @@ -1,2 +0,0 @@ -lexer grammar ; -STRING : '"' ('""' | .)*? '"'; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/NonGreedyTermination2.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/NonGreedyTermination2.st deleted file mode 100644 index 2028d9b52..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/NonGreedyTermination2.st +++ /dev/null @@ -1,2 +0,0 @@ -lexer grammar ; -STRING : '"' ('""' | .)+? '"'; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/Parentheses.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/Parentheses.st deleted file mode 100644 index d280a188a..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/Parentheses.st +++ /dev/null @@ -1,7 +0,0 @@ -lexer grammar ; -START_BLOCK: '-.-.-'; -ID : (LETTER SEPARATOR) (LETTER SEPARATOR)+; -fragment LETTER: L_A|L_K; -fragment L_A: '.-'; -fragment L_K: '-.-'; -SEPARATOR: '!'; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/PositionAdjustingLexer.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/PositionAdjustingLexer.st deleted file mode 100644 index aa4f642bd..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/PositionAdjustingLexer.st +++ /dev/null @@ -1,34 +0,0 @@ -lexer grammar PositionAdjustingLexer; - -@members { - -} - -ASSIGN : '=' ; -PLUS_ASSIGN : '+=' ; -LCURLY: '{'; - -// 'tokens' followed by '{' -TOKENS : 'tokens' IGNORED '{'; - -// IDENTIFIER followed by '+=' or '=' -LABEL - : IDENTIFIER IGNORED '+'? '=' - ; - -IDENTIFIER - : [a-zA-Z_] [a-zA-Z0-9_]* - ; - -fragment -IGNORED - : [ \t\r\n]* - ; - -NEWLINE - : [\r\n]+ -> skip - ; - -WS - : [ \t]+ -> skip - ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/QuoteTranslation.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/QuoteTranslation.st deleted file mode 100644 index 77e4b5c1a..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/QuoteTranslation.st +++ /dev/null @@ -1,2 +0,0 @@ -lexer grammar ; -QUOTE : '"' ; // make sure this compiles diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/RecursiveLexerRuleRefWithWildcardPlus.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/RecursiveLexerRuleRefWithWildcardPlus.st deleted file mode 100644 index 0eeb4b13f..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/RecursiveLexerRuleRefWithWildcardPlus.st +++ /dev/null @@ -1,3 +0,0 @@ -lexer grammar ; -CMT : '/*' (CMT | .)+? '*/' ; -WS : (' '|'\n')+; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/RecursiveLexerRuleRefWithWildcardStar.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/RecursiveLexerRuleRefWithWildcardStar.st deleted file mode 100644 index 865beab3e..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/RecursiveLexerRuleRefWithWildcardStar.st +++ /dev/null @@ -1,3 +0,0 @@ -lexer grammar ; -CMT : '/*' (CMT | .)*? '*/' ; -WS : (' '|'\n')+; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/RefToRuleDoesNotSetTokenNorEmitAnother.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/RefToRuleDoesNotSetTokenNorEmitAnother.st deleted file mode 100644 index 4bb5beb33..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/RefToRuleDoesNotSetTokenNorEmitAnother.st +++ /dev/null @@ -1,4 +0,0 @@ -lexer grammar ; -A : '-' I ; -I : '0'..'9'+ ; -WS : (' '|'\n') -> skip ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/Slashes.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/Slashes.st deleted file mode 100644 index 457e0dbc9..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/Slashes.st +++ /dev/null @@ -1,6 +0,0 @@ -lexer grammar ; -Backslash : '\\\\'; -Slash : '/'; -Vee : '\\\\/'; -Wedge : '/\\\\'; -WS : [ \t] -> skip; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/ZeroLengthToken.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/ZeroLengthToken.st deleted file mode 100644 index 60b0f086a..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/LexerExec/ZeroLengthToken.st +++ /dev/null @@ -1,9 +0,0 @@ -lexer grammar ; -BeginString - : '\'' -> more, pushMode(StringMode) - ; -mode StringMode; - StringMode_X : 'x' -> more; - StringMode_Done : -> more, mode(EndStringMode); -mode EndStringMode; - EndString : '\'' -> popMode; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/Listeners/Basic.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/Listeners/Basic.st deleted file mode 100644 index 589fbd7f7..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/Listeners/Basic.st +++ /dev/null @@ -1,23 +0,0 @@ -grammar ; -@parser::header { - -} - -@parser::members { - -} - -s -@after { - - -} - : r=a ; -a : INT INT - | ID - ; -MULT: '*' ; -ADD : '+' ; -INT : [0-9]+ ; -ID : [a-z]+ ; -WS : [ \t\n]+ -> skip ; \ No newline at end of file diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/Listeners/LR.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/Listeners/LR.st deleted file mode 100644 index 03bec4568..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/Listeners/LR.st +++ /dev/null @@ -1,24 +0,0 @@ -grammar ; -@parser::header { - -} - -@parser::members { - -} - -s -@after { - - -} - : r=e ; -e : e op='*' e - | e op='+' e - | INT - ; -MULT: '*' ; -ADD : '+' ; -INT : [0-9]+ ; -ID : [a-z]+ ; -WS : [ \t\n]+ -> skip ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/Listeners/LRWithLabels.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/Listeners/LRWithLabels.st deleted file mode 100644 index 635088bd2..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/Listeners/LRWithLabels.st +++ /dev/null @@ -1,24 +0,0 @@ -grammar ; -@parser::header { - -} - -@parser::members { - -} - -s -@after { - - -} - : r=e ; -e : e '(' eList ')' # Call - | INT # Int - ; -eList : e (',' e)* ; -MULT: '*' ; -ADD : '+' ; -INT : [0-9]+ ; -ID : [a-z]+ ; -WS : [ \t\n]+ -> skip ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/Listeners/RuleGetters.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/Listeners/RuleGetters.st deleted file mode 100644 index 3eff8165d..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/Listeners/RuleGetters.st +++ /dev/null @@ -1,24 +0,0 @@ -grammar ; -@parser::header { - -} - -@parser::members { - -} - -s -@after { - - -} - : r=a ; -a : b b // forces list - | b // a list still - ; -b : ID | INT; -MULT: '*' ; -ADD : '+' ; -INT : [0-9]+ ; -ID : [a-z]+ ; -WS : [ \t\n]+ -> skip ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/Listeners/TokenGetters.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/Listeners/TokenGetters.st deleted file mode 100644 index 3d89d2b2e..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/Listeners/TokenGetters.st +++ /dev/null @@ -1,23 +0,0 @@ -grammar ; -@parser::header { - -} - -@parser::members { - -} - -s -@after { - - -} - : r=a ; -a : INT INT - | ID - ; -MULT: '*' ; -ADD : '+' ; -INT : [0-9]+ ; -ID : [a-z]+ ; -WS : [ \t\n]+ -> skip ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParseTrees/2AltLoop.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/ParseTrees/2AltLoop.st deleted file mode 100644 index 332b4b8a0..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParseTrees/2AltLoop.st +++ /dev/null @@ -1,11 +0,0 @@ -grammar ; -s -@init { - -} -@after { - -} - : r=a ; -a : ('x' | 'y')* 'z' - ; \ No newline at end of file diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParseTrees/2Alts.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/ParseTrees/2Alts.st deleted file mode 100644 index aa5e44672..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParseTrees/2Alts.st +++ /dev/null @@ -1,11 +0,0 @@ -grammar ; -s -@init { - -} -@after { - -} - : r=a ; -a : 'x' | 'y' - ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParseTrees/ExtraToken.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/ParseTrees/ExtraToken.st deleted file mode 100644 index a7620ea07..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParseTrees/ExtraToken.st +++ /dev/null @@ -1,14 +0,0 @@ -grammar ; -s -@init { - -} -@after { - -} - : r=a ; -a : 'x' 'y' - ; -Z : 'z' - ; - diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParseTrees/NoViableAlt.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/ParseTrees/NoViableAlt.st deleted file mode 100644 index be903effe..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParseTrees/NoViableAlt.st +++ /dev/null @@ -1,14 +0,0 @@ -grammar ; -s -@init { - -} -@after { - -} - : r=a ; -a : 'x' | 'y' - ; -Z : 'z' - ; - \ No newline at end of file diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParseTrees/RuleRef.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/ParseTrees/RuleRef.st deleted file mode 100644 index e35e3a801..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParseTrees/RuleRef.st +++ /dev/null @@ -1,13 +0,0 @@ -grammar ; -s -@init { - -} -@after { - -} - : r=a ; -a : b 'x' - ; -b : 'y' - ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParseTrees/Sync.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/ParseTrees/Sync.st deleted file mode 100644 index 9584bedb0..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParseTrees/Sync.st +++ /dev/null @@ -1,13 +0,0 @@ -grammar ; -s -@init { - -} -@after { - -} - : r=a ; -a : 'x' 'y'* '!' - ; -Z : 'z' - ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParseTrees/Token2.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/ParseTrees/Token2.st deleted file mode 100644 index 979f30694..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParseTrees/Token2.st +++ /dev/null @@ -1,11 +0,0 @@ -grammar ; -s -@init { - -} -@after { - -} - : r=a ; -a : 'x' 'y' - ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParseTrees/TokenAndRuleContextString.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/ParseTrees/TokenAndRuleContextString.st deleted file mode 100644 index 8903bd5bb..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParseTrees/TokenAndRuleContextString.st +++ /dev/null @@ -1,12 +0,0 @@ -grammar ; -s -@init { - -} -@after { - -} - : r=a ; -a : 'x' { - -} ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/ConjuringUpToken.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/ConjuringUpToken.st deleted file mode 100644 index c4ba9d33a..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/ConjuringUpToken.st +++ /dev/null @@ -1,2 +0,0 @@ -grammar ; -a : 'a' x='b' {} 'c' ; \ No newline at end of file diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/ConjuringUpTokenFromSet.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/ConjuringUpTokenFromSet.st deleted file mode 100644 index 5030a368d..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/ConjuringUpTokenFromSet.st +++ /dev/null @@ -1,2 +0,0 @@ -grammar ; -a : 'a' x=('b'|'c') {} 'd' ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/ContextListGetters.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/ContextListGetters.st deleted file mode 100644 index 225cb5d9b..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/ContextListGetters.st +++ /dev/null @@ -1,7 +0,0 @@ -grammar ; -@parser::members{ - -} -s : (a | b)+; -a : 'a' {}; -b : 'b' {}; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/DuplicatedLeftRecursiveCall.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/DuplicatedLeftRecursiveCall.st deleted file mode 100644 index b0f163958..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/DuplicatedLeftRecursiveCall.st +++ /dev/null @@ -1,5 +0,0 @@ -grammar ; -start : expr EOF; -expr : 'x' - | expr expr - ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/InvalidATNStateRemoval.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/InvalidATNStateRemoval.st deleted file mode 100644 index bd9e2f93f..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/InvalidATNStateRemoval.st +++ /dev/null @@ -1,5 +0,0 @@ -grammar ; -start : ID ':' expr; -expr : primary expr? {} | expr '->' ID; -primary : ID; -ID : [a-z]+; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/InvalidEmptyInput.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/InvalidEmptyInput.st deleted file mode 100644 index 7e4e111ee..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/InvalidEmptyInput.st +++ /dev/null @@ -1,3 +0,0 @@ -grammar ; -start : ID+; -ID : [a-z]+; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/LL1ErrorInfo.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/LL1ErrorInfo.st deleted file mode 100644 index ebd21c8b3..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/LL1ErrorInfo.st +++ /dev/null @@ -1,14 +0,0 @@ -grammar ; -start : animal (AND acClass)? service EOF; -animal : (DOG | CAT ); -service : (HARDWARE | SOFTWARE) ; -AND : 'and'; -DOG : 'dog'; -CAT : 'cat'; -HARDWARE: 'hardware'; -SOFTWARE: 'software'; -WS : ' ' -> skip ; -acClass -@init -{} - : ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/LL2.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/LL2.st deleted file mode 100644 index a6ea5030b..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/LL2.st +++ /dev/null @@ -1,5 +0,0 @@ -grammar ; -a : 'a' 'b' - | 'a' 'c' -; -q : 'e' ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/LL3.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/LL3.st deleted file mode 100644 index 412d44c02..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/LL3.st +++ /dev/null @@ -1,5 +0,0 @@ -grammar ; -a : 'a' 'b'* 'c' - | 'a' 'b' 'd' -; -q : 'e' ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/LLStar.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/LLStar.st deleted file mode 100644 index bc2e46412..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/LLStar.st +++ /dev/null @@ -1,5 +0,0 @@ -grammar ; -a : 'a'+ 'b' - | 'a'+ 'c' -; -q : 'e' ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/MultiTokenDeletionBeforeLoop.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/MultiTokenDeletionBeforeLoop.st deleted file mode 100644 index d1ee35ca9..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/MultiTokenDeletionBeforeLoop.st +++ /dev/null @@ -1,2 +0,0 @@ -grammar ; -a : 'a' 'b'* 'c'; \ No newline at end of file diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/MultiTokenDeletionBeforeLoop2.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/MultiTokenDeletionBeforeLoop2.st deleted file mode 100644 index 1c4c62b21..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/MultiTokenDeletionBeforeLoop2.st +++ /dev/null @@ -1,2 +0,0 @@ -grammar ; -a : 'a' ('b'|'z'{})* 'c'; \ No newline at end of file diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/MultiTokenDeletionDuringLoop.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/MultiTokenDeletionDuringLoop.st deleted file mode 100644 index b7417c923..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/MultiTokenDeletionDuringLoop.st +++ /dev/null @@ -1,2 +0,0 @@ -grammar ; -a : 'a' 'b'* 'c' ; \ No newline at end of file diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/MultiTokenDeletionDuringLoop2.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/MultiTokenDeletionDuringLoop2.st deleted file mode 100644 index d0404f1c6..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/MultiTokenDeletionDuringLoop2.st +++ /dev/null @@ -1,2 +0,0 @@ -grammar ; -a : 'a' ('b'|'z'{})* 'c' ; \ No newline at end of file diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/NoViableAltAvoidance.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/NoViableAltAvoidance.st deleted file mode 100644 index 139e3ded2..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/NoViableAltAvoidance.st +++ /dev/null @@ -1,7 +0,0 @@ -grammar ; -s : e '!' ; -e : 'a' 'b' - | 'a' - ; -DOT : '.' ; -WS : [ \t\r\n]+ -> skip; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/SingleSetInsertion.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/SingleSetInsertion.st deleted file mode 100644 index d5f724b4a..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/SingleSetInsertion.st +++ /dev/null @@ -1,2 +0,0 @@ -grammar ; -a : 'a' ('b'|'c') 'd' ; \ No newline at end of file diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/SingleSetInsertionConsumption.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/SingleSetInsertionConsumption.st deleted file mode 100644 index 3e8ac85f6..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/SingleSetInsertionConsumption.st +++ /dev/null @@ -1,3 +0,0 @@ -grammar ; -myset: ('b'|'c') ; -a: 'a' myset 'd' {} ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/SingleTokenDeletion.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/SingleTokenDeletion.st deleted file mode 100644 index daebfb144..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/SingleTokenDeletion.st +++ /dev/null @@ -1,2 +0,0 @@ -grammar ; -a : 'a' 'b' ; \ No newline at end of file diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/SingleTokenDeletionBeforeLoop.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/SingleTokenDeletionBeforeLoop.st deleted file mode 100644 index c2b5d9037..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/SingleTokenDeletionBeforeLoop.st +++ /dev/null @@ -1,2 +0,0 @@ -grammar ; -a : 'a' 'b'* ; \ No newline at end of file diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/SingleTokenDeletionBeforeLoop2.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/SingleTokenDeletionBeforeLoop2.st deleted file mode 100644 index 9169f8f00..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/SingleTokenDeletionBeforeLoop2.st +++ /dev/null @@ -1,2 +0,0 @@ -grammar ; -a : 'a' ('b'|'z'{})*; \ No newline at end of file diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/SingleTokenDeletionConsumption.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/SingleTokenDeletionConsumption.st deleted file mode 100644 index 3e8ac85f6..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/SingleTokenDeletionConsumption.st +++ /dev/null @@ -1,3 +0,0 @@ -grammar ; -myset: ('b'|'c') ; -a: 'a' myset 'd' {} ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/SingleTokenDeletionDuringLoop.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/SingleTokenDeletionDuringLoop.st deleted file mode 100644 index b7417c923..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/SingleTokenDeletionDuringLoop.st +++ /dev/null @@ -1,2 +0,0 @@ -grammar ; -a : 'a' 'b'* 'c' ; \ No newline at end of file diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/SingleTokenDeletionDuringLoop2.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/SingleTokenDeletionDuringLoop2.st deleted file mode 100644 index d0404f1c6..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/SingleTokenDeletionDuringLoop2.st +++ /dev/null @@ -1,2 +0,0 @@ -grammar ; -a : 'a' ('b'|'z'{})* 'c' ; \ No newline at end of file diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/SingleTokenDeletionExpectingSet.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/SingleTokenDeletionExpectingSet.st deleted file mode 100644 index 6280f7599..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/SingleTokenDeletionExpectingSet.st +++ /dev/null @@ -1,2 +0,0 @@ -grammar ; -a : 'a' ('b'|'c') ; \ No newline at end of file diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/SingleTokenInsertion.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/SingleTokenInsertion.st deleted file mode 100644 index 013403fce..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/SingleTokenInsertion.st +++ /dev/null @@ -1,2 +0,0 @@ -grammar ; -a : 'a' 'b' 'c' ; \ No newline at end of file diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/TokenMismatch.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/TokenMismatch.st deleted file mode 100644 index daebfb144..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/TokenMismatch.st +++ /dev/null @@ -1,2 +0,0 @@ -grammar ; -a : 'a' 'b' ; \ No newline at end of file diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/TokenMismatch2.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/TokenMismatch2.st deleted file mode 100644 index 6f9d19930..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserErrors/TokenMismatch2.st +++ /dev/null @@ -1,9 +0,0 @@ -grammar ; - -stat: ( '(' expr? ')' )? EOF ; -expr: ID '=' STR ; - -ERR : '~FORCE_ERROR~' ; -ID : [a-zA-Z]+ ; -STR : '"' ~["]* '"' ; -WS : [ \t\r\n]+ -> skip ; \ No newline at end of file diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/APlus.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/APlus.st deleted file mode 100644 index 24e7d443d..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/APlus.st +++ /dev/null @@ -1,6 +0,0 @@ -grammar ; -a : ID+ { - -}; -ID : 'a'..'z'+; -WS : (' '|'\n') -> skip; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/AStar.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/AStar.st deleted file mode 100644 index cd360a422..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/AStar.st +++ /dev/null @@ -1,6 +0,0 @@ -grammar ; -a : ID* { - -}; -ID : 'a'..'z'+; -WS : (' '|'\n') -> skip; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/AlternateQuotes.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/AlternateQuotes.st deleted file mode 100644 index 6fd73cda0..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/AlternateQuotes.st +++ /dev/null @@ -1,6 +0,0 @@ -parser grammar ModeTagsParser; -options { tokenVocab=ModeTagsLexer; } // use tokens from ModeTagsLexer.g4 -file_: (tag | TEXT)* ; -tag : '«' ID '»' - | '«' '/' ID '»' - ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/AlternateQuotes_ModeTagsLexer.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/AlternateQuotes_ModeTagsLexer.st deleted file mode 100644 index c5205bc12..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/AlternateQuotes_ModeTagsLexer.st +++ /dev/null @@ -1,8 +0,0 @@ -lexer grammar ModeTagsLexer; -// Default mode rules (the SEA) -OPEN : '«' -> mode(ISLAND) ; // switch to ISLAND mode -TEXT : ~'«'+ ; // clump all text together -mode ISLAND; -CLOSE : '»' -> mode(DEFAULT_MODE) ; // back to SEA mode -SLASH : '/' ; -ID : [a-zA-Z]+ ; // match/send ID in tag to parser diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/AorAPlus.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/AorAPlus.st deleted file mode 100644 index abecc96ce..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/AorAPlus.st +++ /dev/null @@ -1,6 +0,0 @@ -grammar ; -a : (ID|ID)+ { - -}; -ID : 'a'..'z'+; -WS : (' '|'\n') -> skip; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/AorAStar.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/AorAStar.st deleted file mode 100644 index dca57fd81..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/AorAStar.st +++ /dev/null @@ -1,6 +0,0 @@ -grammar ; -a : (ID|ID)* { - -}; -ID : 'a'..'z'+; -WS : (' '|'\n') -> skip; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/AorB.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/AorB.st deleted file mode 100644 index 73291af32..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/AorB.st +++ /dev/null @@ -1,9 +0,0 @@ -grammar ; -a : ID { - -} | INT { - -}; -ID : 'a'..'z'+ ; -INT : '0'..'9'+; -WS : (' '|'\\n') -> skip ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/AorBPlus.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/AorBPlus.st deleted file mode 100644 index 99636d8b1..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/AorBPlus.st +++ /dev/null @@ -1,8 +0,0 @@ -grammar ; -a : (ID|INT{ -})+ { - -}; -ID : 'a'..'z'+ ; -INT : '0'..'9'+; -WS : (' '|'\\n') -> skip ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/AorBStar.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/AorBStar.st deleted file mode 100644 index 24376c2e4..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/AorBStar.st +++ /dev/null @@ -1,8 +0,0 @@ -grammar ; -a : (ID|INT{ -})* { - -}; -ID : 'a'..'z'+ ; -INT : '0'..'9'+; -WS : (' '|'\\n') -> skip ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/Basic.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/Basic.st deleted file mode 100644 index 51c474cea..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/Basic.st +++ /dev/null @@ -1,7 +0,0 @@ -grammar ; -a : ID INT { - -}; -ID : 'a'..'z'+ ; -INT : '0'..'9'+; -WS : (' '|'\n') -> skip; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/EOFInClosure.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/EOFInClosure.st deleted file mode 100644 index 7c5e1f6d1..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/EOFInClosure.st +++ /dev/null @@ -1,3 +0,0 @@ -grammar ; -prog : stat EOF; -stat : 'x' ('y' | EOF)*?; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/IfIfElseGreedyBinding1.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/IfIfElseGreedyBinding1.st deleted file mode 100644 index c0cb51e80..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/IfIfElseGreedyBinding1.st +++ /dev/null @@ -1,8 +0,0 @@ -grammar ; -start : statement+ ; -statement : 'x' | ifStatement; -ifStatement : 'if' 'y' statement ('else' statement)? { - -}; -ID : 'a'..'z'+ ; -WS : (' '|'\n') -> channel(HIDDEN); diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/IfIfElseGreedyBinding2.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/IfIfElseGreedyBinding2.st deleted file mode 100644 index fd8a7d653..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/IfIfElseGreedyBinding2.st +++ /dev/null @@ -1,8 +0,0 @@ -grammar ; -start : statement+ ; -statement : 'x' | ifStatement; -ifStatement : 'if' 'y' statement ('else' statement|) { - -}; -ID : 'a'..'z'+ ; -WS : (' '|'\n') -> channel(HIDDEN); diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/IfIfElseNonGreedyBinding1.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/IfIfElseNonGreedyBinding1.st deleted file mode 100644 index 2953acb57..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/IfIfElseNonGreedyBinding1.st +++ /dev/null @@ -1,8 +0,0 @@ -grammar ; -start : statement+ ; -statement : 'x' | ifStatement; -ifStatement : 'if' 'y' statement ('else' statement)?? { - -}; -ID : 'a'..'z'+ ; -WS : (' '|'\n') -> channel(HIDDEN); diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/IfIfElseNonGreedyBinding2.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/IfIfElseNonGreedyBinding2.st deleted file mode 100644 index 561d52271..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/IfIfElseNonGreedyBinding2.st +++ /dev/null @@ -1,8 +0,0 @@ -grammar ; -start : statement+ ; -statement : 'x' | ifStatement; -ifStatement : 'if' 'y' statement (|'else' statement) { - -}; -ID : 'a'..'z'+ ; -WS : (' '|'\n') -> channel(HIDDEN); diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/LL1OptionalBlock.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/LL1OptionalBlock.st deleted file mode 100644 index 31bec9d98..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/LL1OptionalBlock.st +++ /dev/null @@ -1,7 +0,0 @@ -grammar ; -a : (ID|{}INT)? { - -}; -ID : 'a'..'z'+; -INT : '0'..'9'+ ; -WS : (' '|'\n') -> skip; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/LabelAliasingAcrossLabeledAlternatives.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/LabelAliasingAcrossLabeledAlternatives.st deleted file mode 100644 index 8e823dd2a..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/LabelAliasingAcrossLabeledAlternatives.st +++ /dev/null @@ -1,8 +0,0 @@ -grammar ; -start : a* EOF; -a - : label=subrule {} #One - | label='y' {} #Two - ; -subrule : 'x'; -WS : (' '|'\n') -> skip ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/Labels.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/Labels.st deleted file mode 100644 index 28925ca96..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/Labels.st +++ /dev/null @@ -1,6 +0,0 @@ -grammar ; -a : b1=b b2+=b* b3+=';' ; -b : id_=ID val+=INT*; -ID : 'a'..'z'+ ; -INT : '0'..'9'+; -WS : (' '|'\n') -> skip ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/ListLabelForClosureContext.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/ListLabelForClosureContext.st deleted file mode 100644 index 71f8cbdc7..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/ListLabelForClosureContext.st +++ /dev/null @@ -1,20 +0,0 @@ -grammar ; -ifStatement -@after { - -} - : 'if' expression - ( ( 'then' - executableStatement* - elseIfStatement* // \<--- problem is here - elseStatement? - 'end' 'if' - ) | executableStatement ) - ; - -elseIfStatement - : 'else' 'if' expression 'then' executableStatement* - ; -expression : 'a' ; -executableStatement : 'a' ; -elseStatement : 'a' ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/ListLabelsOnSet.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/ListLabelsOnSet.st deleted file mode 100644 index 7f666f8f9..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/ListLabelsOnSet.st +++ /dev/null @@ -1,7 +0,0 @@ -grammar ; -a : b b* ';' ; -b : ID val+=(INT | FLOAT)*; -ID : 'a'..'z'+ ; -INT : '0'..'9'+; -FLOAT : [0-9]+ '.' [0-9]+; -WS : (' '|'\n') -> skip ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/MultipleEOFHandling.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/MultipleEOFHandling.st deleted file mode 100644 index 660c71623..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/MultipleEOFHandling.st +++ /dev/null @@ -1,2 +0,0 @@ -grammar ; -prog : ('x' | 'x' 'y') EOF EOF; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/Optional.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/Optional.st deleted file mode 100644 index 408a53447..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/Optional.st +++ /dev/null @@ -1,4 +0,0 @@ -grammar ; -stat : ifstat | 'x'; -ifstat : 'if' stat ('else' stat)?; -WS : [ \n\t]+ -> skip ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/ParserProperty.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/ParserProperty.st deleted file mode 100644 index c2d5a5adc..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/ParserProperty.st +++ /dev/null @@ -1,6 +0,0 @@ -grammar ; - -a : {$parser.Property()}? ID {} - ; -ID : 'a'..'z'+ ; -WS : (' '|'\n') -> skip ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/PredicatedIfIfElse.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/PredicatedIfIfElse.st deleted file mode 100644 index 32d320973..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/PredicatedIfIfElse.st +++ /dev/null @@ -1,7 +0,0 @@ -grammar ; -s : stmt EOF ; -stmt : ifStmt | ID; -ifStmt : 'if' ID stmt ('else' stmt | { }?); -ELSE : 'else'; -ID : [a-zA-Z]+; -WS : [ \\n\\t]+ -> skip; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/PredictionIssue334.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/PredictionIssue334.st deleted file mode 100644 index 56190fd61..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/PredictionIssue334.st +++ /dev/null @@ -1,14 +0,0 @@ -grammar ; -file_ @init{ - -} -@after { - -} - : item (SEMICOLON item)* SEMICOLON? EOF ; -item : A B?; -SEMICOLON: ';'; -A : 'a'|'A'; -B : 'b'|'B'; -WS : [ \r\t\n]+ -> skip; - diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/ReferenceToATN.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/ReferenceToATN.st deleted file mode 100644 index 1099affbc..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/ReferenceToATN.st +++ /dev/null @@ -1,5 +0,0 @@ -grammar ; -a : (ID|ATN_)* ATN_? {} ; -ID : 'a'..'z'+ ; -ATN_ : '0'..'9'+; -WS : (' '|'\n') -> skip ; \ No newline at end of file diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/StartRuleWithoutEOF.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/StartRuleWithoutEOF.st deleted file mode 100644 index 0d142c518..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/ParserExec/StartRuleWithoutEOF.st +++ /dev/null @@ -1,6 +0,0 @@ -grammar ; -s @after { } - : ID | ID INT ID ; -ID : 'a'..'z'+ ; -INT : '0'..'9'+; -WS : (' '|'\t'|'\n') -> skip; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalLexer/DisableRule.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalLexer/DisableRule.st deleted file mode 100644 index ec1263a71..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalLexer/DisableRule.st +++ /dev/null @@ -1,5 +0,0 @@ -lexer grammar ; -E1 : 'enum' { }? ; -E2 : 'enum' { }? ; // winner not E1 or ID -ID : 'a'..'z'+ ; -WS : (' '|'\n') -> skip; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalLexer/EnumNotID.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalLexer/EnumNotID.st deleted file mode 100644 index e32eb23e7..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalLexer/EnumNotID.st +++ /dev/null @@ -1,4 +0,0 @@ -lexer grammar ; -ENUM : [a-z]+ { }? ; -ID : [a-z]+ ; -WS : (' '|'\n') -> skip; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalLexer/IDnotEnum.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalLexer/IDnotEnum.st deleted file mode 100644 index c055aa9db..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalLexer/IDnotEnum.st +++ /dev/null @@ -1,4 +0,0 @@ -lexer grammar ; -ENUM : [a-z]+ { }? ; -ID : [a-z]+ ; -WS : (' '|'\n') -> skip; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalLexer/IDvsEnum.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalLexer/IDvsEnum.st deleted file mode 100644 index c5180a1ed..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalLexer/IDvsEnum.st +++ /dev/null @@ -1,4 +0,0 @@ -lexer grammar ; -ENUM : 'enum' { }? ; -ID : 'a'..'z'+ ; -WS : (' '|'\n') -> skip; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalLexer/Indent.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalLexer/Indent.st deleted file mode 100644 index 9aa936553..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalLexer/Indent.st +++ /dev/null @@ -1,6 +0,0 @@ -lexer grammar ; -ID : [a-z]+ ; -INDENT : [ \t]+ { }? - { } ; -NL : '\n'; -WS : [ \t]+ ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalLexer/LexerInputPositionSensitivePredicates.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalLexer/LexerInputPositionSensitivePredicates.st deleted file mode 100644 index 70890ceaf..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalLexer/LexerInputPositionSensitivePredicates.st +++ /dev/null @@ -1,6 +0,0 @@ -lexer grammar ; -WORD1 : ID1+ { } ; -WORD2 : ID2+ { } ; -fragment ID1 : { \< 2 }? [a-zA-Z]; -fragment ID2 : { >= 2 }? [a-zA-Z]; -WS : (' '|'\n') -> skip; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalLexer/PredicatedKeywords.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalLexer/PredicatedKeywords.st deleted file mode 100644 index 2f3127cd5..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalLexer/PredicatedKeywords.st +++ /dev/null @@ -1,4 +0,0 @@ -lexer grammar ; -ENUM : [a-z]+ { }? { } ; -ID : [a-z]+ { } ; -WS : [ \n] -> skip ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/2UnpredicatedAlts.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/2UnpredicatedAlts.st deleted file mode 100644 index 0982d1d67..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/2UnpredicatedAlts.st +++ /dev/null @@ -1,9 +0,0 @@ -grammar ; -s : {} a ';' a; // do 2x: once in ATN, next in DFA -a : ID {} - | ID {} - | {}? ID {} - ; -ID : 'a'..'z'+ ; -INT : '0'..'9'+; -WS : (' '|'\n') -> skip ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/2UnpredicatedAltsAndOneOrthogonalAlt.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/2UnpredicatedAltsAndOneOrthogonalAlt.st deleted file mode 100644 index 4ab1601dc..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/2UnpredicatedAltsAndOneOrthogonalAlt.st +++ /dev/null @@ -1,10 +0,0 @@ -grammar ; -s : {} a ';' a ';' a; -a : INT {} - | ID {} // must pick this one for ID since pred is false - | ID {} - | {}? ID {} - ; -ID : 'a'..'z'+ ; -INT : '0'..'9'+; -WS : (' '|'\n') -> skip ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/ActionHidesPreds.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/ActionHidesPreds.st deleted file mode 100644 index 90c052f6e..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/ActionHidesPreds.st +++ /dev/null @@ -1,9 +0,0 @@ -grammar ; -@members {} -s : a+ ; -a : {} ID {}? {} - | {} ID {}? {} - ; -ID : 'a'..'z'+ ; -INT : '0'..'9'+; -WS : (' '|'\n') -> skip ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/ActionsHidePredsInGlobalFOLLOW.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/ActionsHidePredsInGlobalFOLLOW.st deleted file mode 100644 index 8316cf133..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/ActionsHidePredsInGlobalFOLLOW.st +++ /dev/null @@ -1,10 +0,0 @@ -grammar ; -@members { - -} -s : e {} {}? {} '!' ; -t : e {} {}? ID ; -e : ID | ; // non-LL(1) so we use ATN -ID : 'a'..'z'+ ; -INT : '0'..'9'+; -WS : (' '|'\n') -> skip ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/AtomWithClosureInTranslatedLRRule.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/AtomWithClosureInTranslatedLRRule.st deleted file mode 100644 index a782d480b..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/AtomWithClosureInTranslatedLRRule.st +++ /dev/null @@ -1,6 +0,0 @@ -grammar ; -start : e[0] EOF; -e[int _p] - : ( 'a' | 'b'+ ) ( {3 >= $_p}? '+' e[4] )* - ; - diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/DepedentPredsInGlobalFOLLOW.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/DepedentPredsInGlobalFOLLOW.st deleted file mode 100644 index 06b5a3735..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/DepedentPredsInGlobalFOLLOW.st +++ /dev/null @@ -1,11 +0,0 @@ -grammar ; -@members { - -} -s : a[99] ; -a[int i] : e {}? {} '!' ; -b[int i] : e {}? ID ; -e : ID | ; // non-LL(1) so we use ATN -ID : 'a'..'z'+ ; -INT : '0'..'9'+; -WS : (' '|'\n') -> skip ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/DependentPredNotInOuterCtxShouldBeIgnored.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/DependentPredNotInOuterCtxShouldBeIgnored.st deleted file mode 100644 index 266a67208..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/DependentPredNotInOuterCtxShouldBeIgnored.st +++ /dev/null @@ -1,11 +0,0 @@ -grammar ; -s : b[2] ';' | b[2] '.' ; // decision in s drills down to ctx-dependent pred in a; -b[int i] : a[i] ; -a[int i] - : {}? ID {} - | {}? ID {} - ; -ID : 'a'..'z'+ ; -INT : '0'..'9'+; -WS : (' '|'\n') -> skip ; - diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/DisabledAlternative.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/DisabledAlternative.st deleted file mode 100644 index 2cfbbab6c..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/DisabledAlternative.st +++ /dev/null @@ -1,5 +0,0 @@ -grammar ; -cppCompilationUnit : content+ EOF; -content: anything | {}? .; -anything: ANY_CHAR; -ANY_CHAR: [_a-zA-Z0-9]; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/IndependentPredNotPassedOuterCtxToAvoidCastException.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/IndependentPredNotPassedOuterCtxToAvoidCastException.st deleted file mode 100644 index 4143ec3b3..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/IndependentPredNotPassedOuterCtxToAvoidCastException.st +++ /dev/null @@ -1,10 +0,0 @@ -grammar ; -s : b ';' | b '.' ; -b : a ; -a - : {}? ID {} - | {}? ID {} - ; -ID : 'a'..'z'+ ; -INT : '0'..'9'+; -WS : (' '|'\n') -> skip ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/NoTruePredsThrowsNoViableAlt.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/NoTruePredsThrowsNoViableAlt.st deleted file mode 100644 index dd7b05895..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/NoTruePredsThrowsNoViableAlt.st +++ /dev/null @@ -1,8 +0,0 @@ -grammar ; -s : a a; -a : {}? ID INT {} - | {}? ID INT {} - ; -ID : 'a'..'z'+ ; -INT : '0'..'9'+; -WS : (' '|'\n') -> skip ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/Order.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/Order.st deleted file mode 100644 index 1d83ac807..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/Order.st +++ /dev/null @@ -1,10 +0,0 @@ -grammar ; -s : a {} a; // do 2x: once in ATN, next in DFA; -// action blocks lookahead from falling off of 'a' -// and looking into 2nd 'a' ref. !ctx dependent pred -a : ID {} - | {}? ID {} - ; -ID : 'a'..'z'+ ; -INT : '0'..'9'+; -WS : (' '|'\n') -> skip ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/PredFromAltTestedInLoopBack.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/PredFromAltTestedInLoopBack.st deleted file mode 100644 index ba95741ad..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/PredFromAltTestedInLoopBack.st +++ /dev/null @@ -1,9 +0,0 @@ -grammar ; -file_ -@after {} - : para para EOF ; -para: paraContent NL NL ; -paraContent : ('s'|'x'|{}? NL)+ ; -NL : '\n' ; -s : 's' ; -X : 'x' ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/PredTestedEvenWhenUnAmbig.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/PredTestedEvenWhenUnAmbig.st deleted file mode 100644 index 7b724ad2f..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/PredTestedEvenWhenUnAmbig.st +++ /dev/null @@ -1,8 +0,0 @@ -grammar ; -@members {} -primary - : ID {} - | {}? 'enum' {} - ; -ID : [a-z]+ ; -WS : [ \t\n\r]+ -> skip ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/PredicateDependentOnArg.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/PredicateDependentOnArg.st deleted file mode 100644 index 8a91793ab..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/PredicateDependentOnArg.st +++ /dev/null @@ -1,10 +0,0 @@ -grammar ; -@members {} -s : a[2] a[1]; -a[int i] - : {}? ID {} - | {}? ID {} - ; -ID : 'a'..'z'+ ; -INT : '0'..'9'+; -WS : (' '|'\n') -> skip ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/PredicateDependentOnArg2.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/PredicateDependentOnArg2.st deleted file mode 100644 index df1142e63..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/PredicateDependentOnArg2.st +++ /dev/null @@ -1,10 +0,0 @@ -grammar ; -@members {} -s : a[2] a[1]; -a[int i] - : {}? ID - | {}? ID - ; -ID : 'a'..'z'+ ; -INT : '0'..'9'+; -WS : (' '|'\n') -> skip ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/PredsInGlobalFOLLOW.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/PredsInGlobalFOLLOW.st deleted file mode 100644 index 27c61d473..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/PredsInGlobalFOLLOW.st +++ /dev/null @@ -1,10 +0,0 @@ -grammar ; -@members { - -} -s : e {}? {} '!' ; -t : e {}? ID ; -e : ID | ; // non-LL(1) so we use ATN -ID : 'a'..'z'+ ; -INT : '0'..'9'+; -WS : (' '|'\n') -> skip ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/RewindBeforePredEval.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/RewindBeforePredEval.st deleted file mode 100644 index a13cd7220..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/RewindBeforePredEval.st +++ /dev/null @@ -1,8 +0,0 @@ -grammar ; -s : a a; -a : {}? ID INT {} - | {}? ID INT {} - ; -ID : 'a'..'z'+ ; -INT : '0'..'9'+; -WS : (' '|'\n') -> skip ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/Simple.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/Simple.st deleted file mode 100644 index 5285fcf83..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/Simple.st +++ /dev/null @@ -1,9 +0,0 @@ -grammar ; -s : a a a; // do 3x: once in ATN, next in DFA then INT in ATN -a : {}? ID {} - | {}? ID {} - | INT {} - ; -ID : 'a'..'z'+ ; -INT : '0'..'9'+; -WS : (' '|'\n') -> skip ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/SimpleValidate.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/SimpleValidate.st deleted file mode 100644 index 5169d43b6..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/SimpleValidate.st +++ /dev/null @@ -1,8 +0,0 @@ -grammar ; -s : a ; -a : {}? ID {} - | {}? INT {} - ; -ID : 'a'..'z'+ ; -INT : '0'..'9'+; -WS : (' '|'\n') -> skip ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/SimpleValidate2.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/SimpleValidate2.st deleted file mode 100644 index 606c0083e..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/SimpleValidate2.st +++ /dev/null @@ -1,8 +0,0 @@ -grammar ; -s : a a a; -a : {}? ID {} - | {}? INT {} - ; -ID : 'a'..'z'+ ; -INT : '0'..'9'+; -WS : (' '|'\n') -> skip ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/ToLeft.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/ToLeft.st deleted file mode 100644 index a69e500a8..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/ToLeft.st +++ /dev/null @@ -1,8 +0,0 @@ -grammar ; - s : a+ ; -a : {}? ID {} - | {}? ID {} - ; -ID : 'a'..'z'+ ; -INT : '0'..'9'+; -WS : (' '|'\n') -> skip ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/ToLeftWithVaryingPredicate.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/ToLeftWithVaryingPredicate.st deleted file mode 100644 index f67afc57e..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/ToLeftWithVaryingPredicate.st +++ /dev/null @@ -1,10 +0,0 @@ -grammar ; -@members {} -s : ({ -} a)+ ; -a : {}? ID {} - | {}? ID {} - ; -ID : 'a'..'z'+ ; -INT : '0'..'9'+; -WS : (' '|'\n') -> skip ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/UnpredicatedPathsInAlt.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/UnpredicatedPathsInAlt.st deleted file mode 100644 index 207df7b63..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/UnpredicatedPathsInAlt.st +++ /dev/null @@ -1,12 +0,0 @@ -grammar ; -s : a {} - | b {} - ; -a : {}? ID INT - | ID INT - ; -b : ID ID - ; -ID : 'a'..'z'+ ; -INT : '0'..'9'+; -WS : (' '|'\n') -> skip ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/ValidateInDFA.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/ValidateInDFA.st deleted file mode 100644 index 47b10e627..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/SemPredEvalParser/ValidateInDFA.st +++ /dev/null @@ -1,11 +0,0 @@ -grammar ; -s : a ';' a; -// ';' helps us to resynchronize without consuming -// 2nd 'a' reference. We our testing that the DFA also -// throws an exception if the validating predicate fails -a : {}? ID {} - | {}? INT {} - ; -ID : 'a'..'z'+ ; -INT : '0'..'9'+; -WS : (' '|'\n') -> skip ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/Sets/CharSetLiteral.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/Sets/CharSetLiteral.st deleted file mode 100644 index 91f68f149..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/Sets/CharSetLiteral.st +++ /dev/null @@ -1,4 +0,0 @@ -grammar ; -a : (A {})+ ; -A : [AaBb] ; -WS : (' '|'\n')+ -> skip ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/Sets/ComplementSet.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/Sets/ComplementSet.st deleted file mode 100644 index 5873c96de..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/Sets/ComplementSet.st +++ /dev/null @@ -1,3 +0,0 @@ -grammar ; -parse : ~NEW_LINE; -NEW_LINE: '\\r'? '\\n'; \ No newline at end of file diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/Sets/LexerOptionalSet.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/Sets/LexerOptionalSet.st deleted file mode 100644 index 090b5c7e4..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/Sets/LexerOptionalSet.st +++ /dev/null @@ -1,3 +0,0 @@ -grammar ; -a : A {} ; -A : ('a'|'b')? 'c' ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/Sets/LexerPlusSet.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/Sets/LexerPlusSet.st deleted file mode 100644 index 0f00f8416..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/Sets/LexerPlusSet.st +++ /dev/null @@ -1,3 +0,0 @@ -grammar ; -a : A {} ; -A : ('a'|'b')+ 'c' ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/Sets/LexerStarSet.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/Sets/LexerStarSet.st deleted file mode 100644 index 9407e8a03..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/Sets/LexerStarSet.st +++ /dev/null @@ -1,3 +0,0 @@ -grammar ; -a : A {} ; -A : ('a'|'b')* 'c' ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/Sets/NotChar.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/Sets/NotChar.st deleted file mode 100644 index 1bbdffaf1..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/Sets/NotChar.st +++ /dev/null @@ -1,3 +0,0 @@ -grammar ; -a : A {} ; -A : ~'b' ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/Sets/NotCharSet.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/Sets/NotCharSet.st deleted file mode 100644 index 878839aeb..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/Sets/NotCharSet.st +++ /dev/null @@ -1,3 +0,0 @@ -grammar ; -a : A {} ; -A : ~('b'|'c') ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/Sets/NotCharSetWithLabel.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/Sets/NotCharSetWithLabel.st deleted file mode 100644 index 9f2025cb0..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/Sets/NotCharSetWithLabel.st +++ /dev/null @@ -1,3 +0,0 @@ -grammar ; -a : A {} ; -A : h=~('b'|'c') ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/Sets/NotCharSetWithRuleRef3.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/Sets/NotCharSetWithRuleRef3.st deleted file mode 100644 index 107e6bb85..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/Sets/NotCharSetWithRuleRef3.st +++ /dev/null @@ -1,5 +0,0 @@ -grammar ; -a : A {} ; -A : ('a'|B) ; // this doesn't collapse to set but works -fragment -B : ~('a'|'c') ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/Sets/OptionalLexerSingleElement.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/Sets/OptionalLexerSingleElement.st deleted file mode 100644 index 7f00dba67..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/Sets/OptionalLexerSingleElement.st +++ /dev/null @@ -1,3 +0,0 @@ -grammar ; -a : A {} ; -A : 'b'? 'c' ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/Sets/OptionalSet.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/Sets/OptionalSet.st deleted file mode 100644 index 128433b14..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/Sets/OptionalSet.st +++ /dev/null @@ -1,2 +0,0 @@ -grammar ; -a : ('a'|'b')? 'c' {} ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/Sets/OptionalSingleElement.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/Sets/OptionalSingleElement.st deleted file mode 100644 index 88b7bc673..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/Sets/OptionalSingleElement.st +++ /dev/null @@ -1,3 +0,0 @@ -grammar ; -a : A? 'c' {} ; -A : 'b' ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/Sets/ParserNotSet.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/Sets/ParserNotSet.st deleted file mode 100644 index eca2f1b3d..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/Sets/ParserNotSet.st +++ /dev/null @@ -1,2 +0,0 @@ -grammar ; -a : t=~('x'|'y') 'z' {} ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/Sets/ParserNotToken.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/Sets/ParserNotToken.st deleted file mode 100644 index f5b1a914d..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/Sets/ParserNotToken.st +++ /dev/null @@ -1,2 +0,0 @@ -grammar ; -a : ~'x' 'z' {} ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/Sets/ParserNotTokenWithLabel.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/Sets/ParserNotTokenWithLabel.st deleted file mode 100644 index 6dc068aab..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/Sets/ParserNotTokenWithLabel.st +++ /dev/null @@ -1,2 +0,0 @@ -grammar ; -a : t=~'x' 'z' {} ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/Sets/ParserSet.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/Sets/ParserSet.st deleted file mode 100644 index 528eca4b4..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/Sets/ParserSet.st +++ /dev/null @@ -1,2 +0,0 @@ -grammar ; -a : t=('x'|'y') {} ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/Sets/PlusLexerSingleElement.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/Sets/PlusLexerSingleElement.st deleted file mode 100644 index a7e8bca8f..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/Sets/PlusLexerSingleElement.st +++ /dev/null @@ -1,3 +0,0 @@ -grammar ; -a : A {} ; -A : 'b'+ 'c' ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/Sets/PlusSet.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/Sets/PlusSet.st deleted file mode 100644 index 5027cd639..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/Sets/PlusSet.st +++ /dev/null @@ -1,2 +0,0 @@ -grammar ; -a : ('a'|'b')+ 'c' {} ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/Sets/RuleAsSet.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/Sets/RuleAsSet.st deleted file mode 100644 index feced0155..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/Sets/RuleAsSet.st +++ /dev/null @@ -1,2 +0,0 @@ -grammar ; -a @after {} : 'a' | 'b' |'c' ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/Sets/SeqDoesNotBecomeSet.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/Sets/SeqDoesNotBecomeSet.st deleted file mode 100644 index 1bab7fe17..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/Sets/SeqDoesNotBecomeSet.st +++ /dev/null @@ -1,5 +0,0 @@ -grammar ; -a : C {} ; -fragment A : '1' | '2'; -fragment B : '3' '4'; -C : A | B; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/Sets/StarLexerSingleElement.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/Sets/StarLexerSingleElement.st deleted file mode 100644 index 8811e8019..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/Sets/StarLexerSingleElement.st +++ /dev/null @@ -1,3 +0,0 @@ -grammar ; -a : A {} ; -A : 'b'* 'c' ; diff --git a/tool/test/org/antlr/v4/test/rt/gen/grammars/Sets/StarSet.st b/tool/test/org/antlr/v4/test/rt/gen/grammars/Sets/StarSet.st deleted file mode 100644 index 0f0b2a0f9..000000000 --- a/tool/test/org/antlr/v4/test/rt/gen/grammars/Sets/StarSet.st +++ /dev/null @@ -1,2 +0,0 @@ -grammar ; -a : ('a'|'b')* 'c' {} ; diff --git a/tool/test/org/antlr/v4/test/rt/java/BaseTest.java b/tool/test/org/antlr/v4/test/rt/java/BaseTest.java deleted file mode 100644 index 3ccd09fb4..000000000 --- a/tool/test/org/antlr/v4/test/rt/java/BaseTest.java +++ /dev/null @@ -1,1417 +0,0 @@ -/* - * [The "BSD license"] - * Copyright (c) 2012 Terence Parr - * Copyright (c) 2012 Sam Harwell - * All rights reserved. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions - * are met: - * - * 1. Redistributions of source code must retain the above copyright - * notice, this list of conditions and the following disclaimer. - * 2. Redistributions in binary form must reproduce the above copyright - * notice, this list of conditions and the following disclaimer in the - * documentation and/or other materials provided with the distribution. - * 3. The name of the author may not be used to endorse or promote products - * derived from this software without specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR - * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES - * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. - * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, - * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT - * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, - * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY - * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT - * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF - * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ -package org.antlr.v4.test.rt.java; - -import org.antlr.v4.Tool; -import org.antlr.v4.automata.ATNFactory; -import org.antlr.v4.automata.ATNPrinter; -import org.antlr.v4.automata.LexerATNFactory; -import org.antlr.v4.automata.ParserATNFactory; -import org.antlr.v4.codegen.CodeGenerator; -import org.antlr.v4.runtime.ANTLRInputStream; -import org.antlr.v4.runtime.CharStream; -import org.antlr.v4.runtime.CommonToken; -import org.antlr.v4.runtime.CommonTokenStream; -import org.antlr.v4.runtime.IntStream; -import org.antlr.v4.runtime.Lexer; -import org.antlr.v4.runtime.Parser; -import org.antlr.v4.runtime.RuleContext; -import org.antlr.v4.runtime.Token; -import org.antlr.v4.runtime.TokenSource; -import org.antlr.v4.runtime.TokenStream; -import org.antlr.v4.runtime.WritableToken; -import org.antlr.v4.runtime.atn.ATN; -import org.antlr.v4.runtime.atn.ATNDeserializer; -import org.antlr.v4.runtime.atn.ATNSerializer; -import org.antlr.v4.runtime.atn.ATNState; -import org.antlr.v4.runtime.atn.DecisionState; -import org.antlr.v4.runtime.atn.LexerATNSimulator; -import org.antlr.v4.runtime.dfa.DFA; -import org.antlr.v4.runtime.misc.IntegerList; -import org.antlr.v4.runtime.misc.Interval; -import org.antlr.v4.runtime.misc.Pair; -import org.antlr.v4.runtime.misc.Utils; -import org.antlr.v4.runtime.tree.ParseTree; -import org.antlr.v4.semantics.SemanticPipeline; -import org.antlr.v4.test.tool.ErrorQueue; -import org.antlr.v4.tool.ANTLRMessage; -import org.antlr.v4.tool.DOTGenerator; -import org.antlr.v4.tool.DefaultToolListener; -import org.antlr.v4.tool.Grammar; -import org.antlr.v4.tool.GrammarSemanticsMessage; -import org.antlr.v4.tool.LexerGrammar; -import org.antlr.v4.tool.Rule; -import org.junit.Before; -import org.junit.rules.TestRule; -import org.junit.rules.TestWatcher; -import org.junit.runner.Description; -import org.stringtemplate.v4.ST; -import org.stringtemplate.v4.STGroup; -import org.stringtemplate.v4.STGroupString; - -import javax.tools.JavaCompiler; -import javax.tools.JavaFileObject; -import javax.tools.StandardJavaFileManager; -import javax.tools.ToolProvider; -import java.io.BufferedReader; -import java.io.File; -import java.io.IOException; -import java.io.InputStream; -import java.io.InputStreamReader; -import java.io.PipedInputStream; -import java.io.PipedOutputStream; -import java.io.PrintStream; -import java.io.StringReader; -import java.lang.reflect.Constructor; -import java.lang.reflect.InvocationTargetException; -import java.lang.reflect.Method; -import java.net.MalformedURLException; -import java.net.URL; -import java.net.URLClassLoader; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.HashSet; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.TreeMap; -import java.util.logging.Level; -import java.util.logging.Logger; - -import static org.junit.Assert.assertArrayEquals; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; - -public abstract class BaseTest { - // -J-Dorg.antlr.v4.test.BaseTest.level=FINE - private static final Logger LOGGER = Logger.getLogger(BaseTest.class.getName()); - - public static final String newline = System.getProperty("line.separator"); - public static final String pathSep = System.getProperty("path.separator"); - - /** - * When the {@code antlr.testinprocess} runtime property is set to - * {@code true}, the test suite will attempt to load generated classes into - * the test process for direct execution rather than invoking the JVM in a - * new process for testing. - * - *

- * In-process testing results in a substantial performance improvement, but - * some test environments created by IDEs do not support the mechanisms - * currently used by the tests to dynamically load compiled code. Therefore, - * the default behavior (used in all other cases) favors reliable - * cross-system test execution by executing generated test code in a - * separate process.

- */ - public static final boolean TEST_IN_SAME_PROCESS = Boolean.parseBoolean(System.getProperty("antlr.testinprocess")); - - /** - * When the {@code antlr.preserve-test-dir} runtime property is set to - * {@code true}, the temporary directories created by the test run will not - * be removed at the end of the test run, even for tests that completed - * successfully. - * - *

- * The default behavior (used in all other cases) is removing the temporary - * directories for all tests which completed successfully, and preserving - * the directories for tests which failed.

- */ - public static final boolean PRESERVE_TEST_DIR = Boolean.parseBoolean(System.getProperty("antlr.preserve-test-dir")); - - /** - * The base test directory is the directory where generated files get placed - * during unit test execution. - * - *

- * The default value for this property is the {@code java.io.tmpdir} system - * property, and can be overridden by setting the - * {@code antlr.java-test-dir} property to a custom location. Note that the - * {@code antlr.java-test-dir} property directly affects the - * {@link #CREATE_PER_TEST_DIRECTORIES} value as well.

- */ - public static final String BASE_TEST_DIR; - - /** - * When {@code true}, a temporary directory will be created for each test - * executed during the test run. - * - *

- * This value is {@code true} when the {@code antlr.java-test-dir} system - * property is set, and otherwise {@code false}.

- */ - public static final boolean CREATE_PER_TEST_DIRECTORIES; - - static { - String baseTestDir = System.getProperty("antlr.java-test-dir"); - boolean perTestDirectories = false; - if (baseTestDir == null || baseTestDir.isEmpty()) { - baseTestDir = System.getProperty("java.io.tmpdir"); - perTestDirectories = true; - } - - if (!new File(baseTestDir).isDirectory()) { - throw new UnsupportedOperationException("The specified base test directory does not exist: " + baseTestDir); - } - - BASE_TEST_DIR = baseTestDir; - CREATE_PER_TEST_DIRECTORIES = perTestDirectories; - } - - /** - * Build up the full classpath we need, including the surefire path (if present) - */ - public static final String CLASSPATH = System.getProperty("java.class.path"); - - public String tmpdir = null; - - /** If error during parser execution, store stderr here; can't return - * stdout and stderr. This doesn't trap errors from running antlr. - */ - protected String stderrDuringParse; - - @org.junit.Rule - public final TestRule testWatcher = new TestWatcher() { - - @Override - protected void succeeded(Description description) { - // remove tmpdir if no error. - if (!PRESERVE_TEST_DIR) { - eraseTempDir(); - } - } - - }; - - @Before - public void setUp() throws Exception { - if (CREATE_PER_TEST_DIRECTORIES) { - // new output dir for each test - String testDirectory = getClass().getSimpleName() + "-" + System.currentTimeMillis(); - tmpdir = new File(BASE_TEST_DIR, testDirectory).getAbsolutePath(); - } - else { - tmpdir = new File(BASE_TEST_DIR).getAbsolutePath(); - if (!PRESERVE_TEST_DIR && new File(tmpdir).exists()) { - eraseFiles(); - } - } - } - - protected org.antlr.v4.Tool newTool(String[] args) { - Tool tool = new Tool(args); - return tool; - } - - protected Tool newTool() { - org.antlr.v4.Tool tool = new Tool(new String[] {"-o", tmpdir}); - return tool; - } - - protected ATN createATN(Grammar g, boolean useSerializer) { - if ( g.atn==null ) { - semanticProcess(g); - assertEquals(0, g.tool.getNumErrors()); - - ParserATNFactory f; - if ( g.isLexer() ) { - f = new LexerATNFactory((LexerGrammar)g); - } - else { - f = new ParserATNFactory(g); - } - - g.atn = f.createATN(); - assertEquals(0, g.tool.getNumErrors()); - } - - ATN atn = g.atn; - if (useSerializer) { - char[] serialized = ATNSerializer.getSerializedAsChars(atn); - return new ATNDeserializer().deserialize(serialized); - } - - return atn; - } - - protected void semanticProcess(Grammar g) { - if ( g.ast!=null && !g.ast.hasErrors ) { - System.out.println(g.ast.toStringTree()); - Tool antlr = new Tool(); - SemanticPipeline sem = new SemanticPipeline(g); - sem.process(); - if ( g.getImportedGrammars()!=null ) { // process imported grammars (if any) - for (Grammar imp : g.getImportedGrammars()) { - antlr.processNonCombinedGrammar(imp, false); - } - } - } - } - - public DFA createDFA(Grammar g, DecisionState s) { -// PredictionDFAFactory conv = new PredictionDFAFactory(g, s); -// DFA dfa = conv.createDFA(); -// conv.issueAmbiguityWarnings(); -// System.out.print("DFA="+dfa); -// return dfa; - return null; - } - -// public void minimizeDFA(DFA dfa) { -// DFAMinimizer dmin = new DFAMinimizer(dfa); -// dfa.minimized = dmin.minimize(); -// } - - IntegerList getTypesFromString(Grammar g, String expecting) { - IntegerList expectingTokenTypes = new IntegerList(); - if ( expecting!=null && !expecting.trim().isEmpty() ) { - for (String tname : expecting.replace(" ", "").split(",")) { - int ttype = g.getTokenType(tname); - expectingTokenTypes.add(ttype); - } - } - return expectingTokenTypes; - } - - public IntegerList getTokenTypesViaATN(String input, LexerATNSimulator lexerATN) { - ANTLRInputStream in = new ANTLRInputStream(input); - IntegerList tokenTypes = new IntegerList(); - int ttype; - do { - ttype = lexerATN.match(in, Lexer.DEFAULT_MODE); - tokenTypes.add(ttype); - } while ( ttype!= Token.EOF ); - return tokenTypes; - } - - public List getTokenTypes(LexerGrammar lg, - ATN atn, - CharStream input) - { - LexerATNSimulator interp = new LexerATNSimulator(atn,new DFA[] { new DFA(atn.modeToStartState.get(Lexer.DEFAULT_MODE)) },null); - List tokenTypes = new ArrayList(); - int ttype; - boolean hitEOF = false; - do { - if ( hitEOF ) { - tokenTypes.add("EOF"); - break; - } - int t = input.LA(1); - ttype = interp.match(input, Lexer.DEFAULT_MODE); - if ( ttype == Token.EOF ) { - tokenTypes.add("EOF"); - } - else { - tokenTypes.add(lg.typeToTokenList.get(ttype)); - } - - if ( t==IntStream.EOF ) { - hitEOF = true; - } - } while ( ttype!=Token.EOF ); - return tokenTypes; - } - - List checkRuleDFA(String gtext, String ruleName, String expecting) - throws Exception - { - ErrorQueue equeue = new ErrorQueue(); - Grammar g = new Grammar(gtext, equeue); - ATN atn = createATN(g, false); - ATNState s = atn.ruleToStartState[g.getRule(ruleName).index]; - if ( s==null ) { - System.err.println("no such rule: "+ruleName); - return null; - } - ATNState t = s.transition(0).target; - if ( !(t instanceof DecisionState) ) { - System.out.println(ruleName+" has no decision"); - return null; - } - DecisionState blk = (DecisionState)t; - checkRuleDFA(g, blk, expecting); - return equeue.all; - } - - List checkRuleDFA(String gtext, int decision, String expecting) - throws Exception - { - ErrorQueue equeue = new ErrorQueue(); - Grammar g = new Grammar(gtext, equeue); - ATN atn = createATN(g, false); - DecisionState blk = atn.decisionToState.get(decision); - checkRuleDFA(g, blk, expecting); - return equeue.all; - } - - void checkRuleDFA(Grammar g, DecisionState blk, String expecting) - throws Exception - { - DFA dfa = createDFA(g, blk); - String result = null; - if ( dfa!=null ) result = dfa.toString(); - assertEquals(expecting, result); - } - - List checkLexerDFA(String gtext, String expecting) - throws Exception - { - return checkLexerDFA(gtext, LexerGrammar.DEFAULT_MODE_NAME, expecting); - } - - List checkLexerDFA(String gtext, String modeName, String expecting) - throws Exception - { - ErrorQueue equeue = new ErrorQueue(); - LexerGrammar g = new LexerGrammar(gtext, equeue); - g.atn = createATN(g, false); -// LexerATNToDFAConverter conv = new LexerATNToDFAConverter(g); -// DFA dfa = conv.createDFA(modeName); -// g.setLookaheadDFA(0, dfa); // only one decision to worry about -// -// String result = null; -// if ( dfa!=null ) result = dfa.toString(); -// assertEquals(expecting, result); -// -// return equeue.all; - return null; - } - - protected String load(String fileName, String encoding) - throws IOException - { - if ( fileName==null ) { - return null; - } - - String fullFileName = getClass().getPackage().getName().replace('.', '/') + '/' + fileName; - int size = 65000; - InputStreamReader isr; - InputStream fis = getClass().getClassLoader().getResourceAsStream(fullFileName); - if ( encoding!=null ) { - isr = new InputStreamReader(fis, encoding); - } - else { - isr = new InputStreamReader(fis); - } - try { - char[] data = new char[size]; - int n = isr.read(data); - return new String(data, 0, n); - } - finally { - isr.close(); - } - } - - /** Wow! much faster than compiling outside of VM. Finicky though. - * Had rules called r and modulo. Wouldn't compile til I changed to 'a'. - */ - protected boolean compile(String... fileNames) { - List files = new ArrayList(); - for (String fileName : fileNames) { - File f = new File(tmpdir, fileName); - files.add(f); - } - - JavaCompiler compiler = ToolProvider.getSystemJavaCompiler(); -// DiagnosticCollector diagnostics = -// new DiagnosticCollector(); - - StandardJavaFileManager fileManager = - compiler.getStandardFileManager(null, null, null); - - Iterable compilationUnits = - fileManager.getJavaFileObjectsFromFiles(files); - - Iterable compileOptions = - Arrays.asList("-g", "-source", "1.6", "-target", "1.6", "-implicit:class", "-Xlint:-options", "-d", tmpdir, "-cp", tmpdir+pathSep+CLASSPATH); - - JavaCompiler.CompilationTask task = - compiler.getTask(null, fileManager, null, compileOptions, null, - compilationUnits); - boolean ok = task.call(); - - try { - fileManager.close(); - } - catch (IOException ioe) { - ioe.printStackTrace(System.err); - } - -// List errors = new ArrayList(); -// for (Diagnostic diagnostic : diagnostics.getDiagnostics()) { -// errors.add( -// String.valueOf(diagnostic.getLineNumber())+ -// ": " + diagnostic.getMessage(null)); -// } -// if ( errors.size()>0 ) { -// System.err.println("compile stderr from: "+cmdLine); -// System.err.println(errors); -// return false; -// } - return ok; - - /* - File outputDir = new File(tmpdir); - try { - Process process = - Runtime.getRuntime().exec(args, null, outputDir); - StreamVacuum stdout = new StreamVacuum(process.getInputStream()); - StreamVacuum stderr = new StreamVacuum(process.getErrorStream()); - stdout.start(); - stderr.start(); - process.waitFor(); - stdout.join(); - stderr.join(); - if ( stdout.toString().length()>0 ) { - System.err.println("compile stdout from: "+cmdLine); - System.err.println(stdout); - } - if ( stderr.toString().length()>0 ) { - System.err.println("compile stderr from: "+cmdLine); - System.err.println(stderr); - } - int ret = process.exitValue(); - return ret==0; - } - catch (Exception e) { - System.err.println("can't exec compilation"); - e.printStackTrace(System.err); - return false; - } - */ - } - - protected ErrorQueue antlr(String grammarFileName, boolean defaultListener, String... extraOptions) { - final List options = new ArrayList(); - Collections.addAll(options, extraOptions); - if ( !options.contains("-o") ) { - options.add("-o"); - options.add(tmpdir); - } - if ( !options.contains("-lib") ) { - options.add("-lib"); - options.add(tmpdir); - } - if ( !options.contains("-encoding") ) { - options.add("-encoding"); - options.add("UTF-8"); - } - options.add(new File(tmpdir,grammarFileName).toString()); - - final String[] optionsA = new String[options.size()]; - options.toArray(optionsA); - Tool antlr = newTool(optionsA); - ErrorQueue equeue = new ErrorQueue(antlr); - antlr.addListener(equeue); - if (defaultListener) { - antlr.addListener(new DefaultToolListener(antlr)); - } - antlr.processGrammarsOnCommandLine(); - - if ( !defaultListener && !equeue.errors.isEmpty() ) { - System.err.println("antlr reports errors from "+options); - for (int i = 0; i < equeue.errors.size(); i++) { - ANTLRMessage msg = equeue.errors.get(i); - System.err.println(msg); - } - System.out.println("!!!\ngrammar:"); - try { - System.out.println(new String(Utils.readFile(tmpdir+"/"+grammarFileName))); - } - catch (IOException ioe) { - System.err.println(ioe.toString()); - } - System.out.println("###"); - } - if ( !defaultListener && !equeue.warnings.isEmpty() ) { - System.err.println("antlr reports warnings from "+options); - for (int i = 0; i < equeue.warnings.size(); i++) { - ANTLRMessage msg = equeue.warnings.get(i); - System.err.println(msg); - } - } - - return equeue; - } - - protected ErrorQueue antlr(String grammarFileName, String grammarStr, boolean defaultListener, String... extraOptions) { - System.out.println("dir "+tmpdir); - mkdir(tmpdir); - writeFile(tmpdir, grammarFileName, grammarStr); - return antlr(grammarFileName, defaultListener, extraOptions); - } - - protected String execLexer(String grammarFileName, - String grammarStr, - String lexerName, - String input) - { - return execLexer(grammarFileName, grammarStr, lexerName, input, false); - } - - protected String execLexer(String grammarFileName, - String grammarStr, - String lexerName, - String input, - boolean showDFA) - { - boolean success = rawGenerateAndBuildRecognizer(grammarFileName, - grammarStr, - null, - lexerName); - assertTrue(success); - writeFile(tmpdir, "input", input); - writeLexerTestFile(lexerName, showDFA); - compile("Test.java"); - String output = execClass("Test"); - if ( stderrDuringParse!=null && stderrDuringParse.length()>0 ) { - System.err.println(stderrDuringParse); - } - return output; - } - - public ParseTree execParser(String startRuleName, String input, - String parserName, String lexerName) - throws Exception - { - Pair pl = getParserAndLexer(input, parserName, lexerName); - Parser parser = pl.a; - return execStartRule(startRuleName, parser); - } - - public ParseTree execStartRule(String startRuleName, Parser parser) - throws IllegalAccessException, InvocationTargetException, - NoSuchMethodException - { - Method startRule = null; - Object[] args = null; - try { - startRule = parser.getClass().getMethod(startRuleName); - } - catch (NoSuchMethodException nsme) { - // try with int _p arg for recursive func - startRule = parser.getClass().getMethod(startRuleName, int.class); - args = new Integer[] {0}; - } - ParseTree result = (ParseTree)startRule.invoke(parser, args); -// System.out.println("parse tree = "+result.toStringTree(parser)); - return result; - } - - public Pair getParserAndLexer(String input, - String parserName, String lexerName) - throws Exception - { - final Class lexerClass = loadLexerClassFromTempDir(lexerName); - final Class parserClass = loadParserClassFromTempDir(parserName); - - ANTLRInputStream in = new ANTLRInputStream(new StringReader(input)); - - Class c = lexerClass.asSubclass(Lexer.class); - Constructor ctor = c.getConstructor(CharStream.class); - Lexer lexer = ctor.newInstance(in); - - Class pc = parserClass.asSubclass(Parser.class); - Constructor pctor = pc.getConstructor(TokenStream.class); - CommonTokenStream tokens = new CommonTokenStream(lexer); - Parser parser = pctor.newInstance(tokens); - return new Pair(parser, lexer); - } - - public Class loadClassFromTempDir(String name) throws Exception { - URLClassLoader loader = - new URLClassLoader(new URL[] { new File(tmpdir).toURI().toURL() }, - ClassLoader.getSystemClassLoader()); - return loader.loadClass(name); - } - - public Class loadLexerClassFromTempDir(String name) throws Exception { - return loadClassFromTempDir(name).asSubclass(Lexer.class); - } - - public Class loadParserClassFromTempDir(String name) throws Exception { - return loadClassFromTempDir(name).asSubclass(Parser.class); - } - - protected String execParser(String grammarFileName, - String grammarStr, - String parserName, - String lexerName, - String startRuleName, - String input, boolean debug) - { - return execParser(grammarFileName, grammarStr, parserName, - lexerName, startRuleName, input, debug, false); - } - - protected String execParser(String grammarFileName, - String grammarStr, - String parserName, - String lexerName, - String startRuleName, - String input, boolean debug, - boolean profile) - { - boolean success = rawGenerateAndBuildRecognizer(grammarFileName, - grammarStr, - parserName, - lexerName, - "-visitor"); - assertTrue(success); - writeFile(tmpdir, "input", input); - return rawExecRecognizer(parserName, - lexerName, - startRuleName, - debug, - profile); - } - - /** Return true if all is well */ - protected boolean rawGenerateAndBuildRecognizer(String grammarFileName, - String grammarStr, - String parserName, - String lexerName, - String... extraOptions) - { - return rawGenerateAndBuildRecognizer(grammarFileName, grammarStr, parserName, lexerName, false, extraOptions); - } - - /** Return true if all is well */ - protected boolean rawGenerateAndBuildRecognizer(String grammarFileName, - String grammarStr, - String parserName, - String lexerName, - boolean defaultListener, - String... extraOptions) - { - ErrorQueue equeue = - antlr(grammarFileName, grammarStr, defaultListener, extraOptions); - if (!equeue.errors.isEmpty()) { - return false; - } - - List files = new ArrayList(); - if ( lexerName!=null ) { - files.add(lexerName+".java"); - } - if ( parserName!=null ) { - files.add(parserName+".java"); - Set optionsSet = new HashSet(Arrays.asList(extraOptions)); - String grammarName = grammarFileName.substring(0, grammarFileName.lastIndexOf('.')); - if (!optionsSet.contains("-no-listener")) { - files.add(grammarName+"Listener.java"); - files.add(grammarName+"BaseListener.java"); - } - if (optionsSet.contains("-visitor")) { - files.add(grammarName+"Visitor.java"); - files.add(grammarName+"BaseVisitor.java"); - } - } - boolean allIsWell = compile(files.toArray(new String[files.size()])); - return allIsWell; - } - - protected String rawExecRecognizer(String parserName, - String lexerName, - String parserStartRuleName, - boolean debug, - boolean profile) - { - this.stderrDuringParse = null; - if ( parserName==null ) { - writeLexerTestFile(lexerName, false); - } - else { - writeTestFile(parserName, - lexerName, - parserStartRuleName, - debug, - profile); - } - - compile("Test.java"); - return execClass("Test"); - } - - public String execRecognizer() { - return execClass("Test"); - } - - public String execClass(String className) { - if (TEST_IN_SAME_PROCESS) { - URLClassLoader loader = null; - try { - loader = new URLClassLoader(new URL[] { new File(tmpdir).toURI().toURL() }, ClassLoader.getSystemClassLoader()); - final Class mainClass = (Class)loader.loadClass(className); - final Method mainMethod = mainClass.getDeclaredMethod("main", String[].class); - PipedInputStream stdoutIn = new PipedInputStream(); - PipedInputStream stderrIn = new PipedInputStream(); - PipedOutputStream stdoutOut = new PipedOutputStream(stdoutIn); - PipedOutputStream stderrOut = new PipedOutputStream(stderrIn); - StreamVacuum stdoutVacuum = new StreamVacuum(stdoutIn); - StreamVacuum stderrVacuum = new StreamVacuum(stderrIn); - - PrintStream originalOut = System.out; - System.setOut(new PrintStream(stdoutOut)); - try { - PrintStream originalErr = System.err; - try { - System.setErr(new PrintStream(stderrOut)); - stdoutVacuum.start(); - stderrVacuum.start(); - mainMethod.invoke(null, (Object)new String[] { new File(tmpdir, "input").getAbsolutePath() }); - } - finally { - System.setErr(originalErr); - } - } - finally { - System.setOut(originalOut); - } - - stdoutOut.close(); - stderrOut.close(); - stdoutVacuum.join(); - stderrVacuum.join(); - String output = stdoutVacuum.toString(); - if ( stderrVacuum.toString().length()>0 ) { - this.stderrDuringParse = stderrVacuum.toString(); - System.err.println("exec stderrVacuum: "+ stderrVacuum); - } - return output; - } catch (MalformedURLException ex) { - LOGGER.log(Level.SEVERE, null, ex); - throw new RuntimeException(ex); - } catch (IOException ex) { - LOGGER.log(Level.SEVERE, null, ex); - throw new RuntimeException(ex); - } catch (InterruptedException ex) { - LOGGER.log(Level.SEVERE, null, ex); - throw new RuntimeException(ex); - } catch (IllegalAccessException ex) { - LOGGER.log(Level.SEVERE, null, ex); - throw new RuntimeException(ex); - } catch (IllegalArgumentException ex) { - LOGGER.log(Level.SEVERE, null, ex); - throw new RuntimeException(ex); - } catch (InvocationTargetException ex) { - LOGGER.log(Level.SEVERE, null, ex); - throw new RuntimeException(ex); - } catch (NoSuchMethodException ex) { - LOGGER.log(Level.SEVERE, null, ex); - throw new RuntimeException(ex); - } catch (SecurityException ex) { - LOGGER.log(Level.SEVERE, null, ex); - throw new RuntimeException(ex); - } catch (ClassNotFoundException ex) { - LOGGER.log(Level.SEVERE, null, ex); - throw new RuntimeException(ex); - } - } - - try { - String[] args = new String[] { - "java", "-classpath", tmpdir+pathSep+CLASSPATH, - className, new File(tmpdir, "input").getAbsolutePath() - }; - //String cmdLine = "java -classpath "+CLASSPATH+pathSep+tmpdir+" Test " + new File(tmpdir, "input").getAbsolutePath(); - //System.out.println("execParser: "+cmdLine); - Process process = - Runtime.getRuntime().exec(args, null, new File(tmpdir)); - StreamVacuum stdoutVacuum = new StreamVacuum(process.getInputStream()); - StreamVacuum stderrVacuum = new StreamVacuum(process.getErrorStream()); - stdoutVacuum.start(); - stderrVacuum.start(); - process.waitFor(); - stdoutVacuum.join(); - stderrVacuum.join(); - String output = stdoutVacuum.toString(); - if ( stderrVacuum.toString().length()>0 ) { - this.stderrDuringParse = stderrVacuum.toString(); - System.err.println("exec stderrVacuum: "+ stderrVacuum); - } - return output; - } - catch (Exception e) { - System.err.println("can't exec recognizer"); - e.printStackTrace(System.err); - } - return null; - } - - public void testErrors(String[] pairs, boolean printTree) { - for (int i = 0; i < pairs.length; i+=2) { - String input = pairs[i]; - String expect = pairs[i+1]; - - String[] lines = input.split("\n"); - String fileName = getFilenameFromFirstLineOfGrammar(lines[0]); - ErrorQueue equeue = antlr(fileName, input, false); - - String actual = equeue.toString(true); - actual = actual.replace(tmpdir + File.separator, ""); - System.err.println(actual); - String msg = input; - msg = msg.replace("\n","\\n"); - msg = msg.replace("\r","\\r"); - msg = msg.replace("\t","\\t"); - - assertEquals("error in: "+msg,expect,actual); - } - } - - public String getFilenameFromFirstLineOfGrammar(String line) { - String fileName = "A" + Tool.GRAMMAR_EXTENSION; - int grIndex = line.lastIndexOf("grammar"); - int semi = line.lastIndexOf(';'); - if ( grIndex>=0 && semi>=0 ) { - int space = line.indexOf(' ', grIndex); - fileName = line.substring(space+1, semi)+Tool.GRAMMAR_EXTENSION; - } - if ( fileName.length()==Tool.GRAMMAR_EXTENSION.length() ) fileName = "A" + Tool.GRAMMAR_EXTENSION; - return fileName; - } - -// void ambig(List msgs, int[] expectedAmbigAlts, String expectedAmbigInput) -// throws Exception -// { -// ambig(msgs, 0, expectedAmbigAlts, expectedAmbigInput); -// } - -// void ambig(List msgs, int i, int[] expectedAmbigAlts, String expectedAmbigInput) -// throws Exception -// { -// List amsgs = getMessagesOfType(msgs, AmbiguityMessage.class); -// AmbiguityMessage a = (AmbiguityMessage)amsgs.get(i); -// if ( a==null ) assertNull(expectedAmbigAlts); -// else { -// assertEquals(a.conflictingAlts.toString(), Arrays.toString(expectedAmbigAlts)); -// } -// assertEquals(expectedAmbigInput, a.input); -// } - -// void unreachable(List msgs, int[] expectedUnreachableAlts) -// throws Exception -// { -// unreachable(msgs, 0, expectedUnreachableAlts); -// } - -// void unreachable(List msgs, int i, int[] expectedUnreachableAlts) -// throws Exception -// { -// List amsgs = getMessagesOfType(msgs, UnreachableAltsMessage.class); -// UnreachableAltsMessage u = (UnreachableAltsMessage)amsgs.get(i); -// if ( u==null ) assertNull(expectedUnreachableAlts); -// else { -// assertEquals(u.conflictingAlts.toString(), Arrays.toString(expectedUnreachableAlts)); -// } -// } - - List getMessagesOfType(List msgs, Class c) { - List filtered = new ArrayList(); - for (ANTLRMessage m : msgs) { - if ( m.getClass() == c ) filtered.add(m); - } - return filtered; - } - - void checkRuleATN(Grammar g, String ruleName, String expecting) { - DOTGenerator dot = new DOTGenerator(g); - System.out.println(dot.getDOT(g.atn.ruleToStartState[g.getRule(ruleName).index])); - - Rule r = g.getRule(ruleName); - ATNState startState = g.atn.ruleToStartState[r.index]; - ATNPrinter serializer = new ATNPrinter(g, startState); - String result = serializer.asString(); - - //System.out.print(result); - assertEquals(expecting, result); - } - - public void testActions(String templates, String actionName, String action, String expected) throws org.antlr.runtime.RecognitionException { - int lp = templates.indexOf('('); - String name = templates.substring(0, lp); - STGroup group = new STGroupString(templates); - ST st = group.getInstanceOf(name); - st.add(actionName, action); - String grammar = st.render(); - ErrorQueue equeue = new ErrorQueue(); - Grammar g = new Grammar(grammar, equeue); - if ( g.ast!=null && !g.ast.hasErrors ) { - SemanticPipeline sem = new SemanticPipeline(g); - sem.process(); - - ATNFactory factory = new ParserATNFactory(g); - if ( g.isLexer() ) factory = new LexerATNFactory((LexerGrammar)g); - g.atn = factory.createATN(); - - CodeGenerator gen = new CodeGenerator(g); - ST outputFileST = gen.generateParser(); - String output = outputFileST.render(); - //System.out.println(output); - String b = "#" + actionName + "#"; - int start = output.indexOf(b); - String e = "#end-" + actionName + "#"; - int end = output.indexOf(e); - String snippet = output.substring(start+b.length(),end); - assertEquals(expected, snippet); - } - if ( equeue.size()>0 ) { - System.err.println(equeue.toString()); - } - } - - public static class StreamVacuum implements Runnable { - StringBuilder buf = new StringBuilder(); - BufferedReader in; - Thread sucker; - public StreamVacuum(InputStream in) { - this.in = new BufferedReader( new InputStreamReader(in) ); - } - public void start() { - sucker = new Thread(this); - sucker.start(); - } - @Override - public void run() { - try { - String line = in.readLine(); - while (line!=null) { - buf.append(line); - buf.append('\n'); - line = in.readLine(); - } - } - catch (IOException ioe) { - System.err.println("can't read output from process"); - } - } - /** wait for the thread to finish */ - public void join() throws InterruptedException { - sucker.join(); - } - @Override - public String toString() { - return buf.toString(); - } - } - - protected void checkGrammarSemanticsError(ErrorQueue equeue, - GrammarSemanticsMessage expectedMessage) - throws Exception - { - ANTLRMessage foundMsg = null; - for (int i = 0; i < equeue.errors.size(); i++) { - ANTLRMessage m = equeue.errors.get(i); - if (m.getErrorType()==expectedMessage.getErrorType() ) { - foundMsg = m; - } - } - assertNotNull("no error; "+expectedMessage.getErrorType()+" expected", foundMsg); - assertTrue("error is not a GrammarSemanticsMessage", - foundMsg instanceof GrammarSemanticsMessage); - assertEquals(Arrays.toString(expectedMessage.getArgs()), Arrays.toString(foundMsg.getArgs())); - if ( equeue.size()!=1 ) { - System.err.println(equeue); - } - } - - protected void checkGrammarSemanticsWarning(ErrorQueue equeue, - GrammarSemanticsMessage expectedMessage) - throws Exception - { - ANTLRMessage foundMsg = null; - for (int i = 0; i < equeue.warnings.size(); i++) { - ANTLRMessage m = equeue.warnings.get(i); - if (m.getErrorType()==expectedMessage.getErrorType() ) { - foundMsg = m; - } - } - assertNotNull("no error; "+expectedMessage.getErrorType()+" expected", foundMsg); - assertTrue("error is not a GrammarSemanticsMessage", - foundMsg instanceof GrammarSemanticsMessage); - assertEquals(Arrays.toString(expectedMessage.getArgs()), Arrays.toString(foundMsg.getArgs())); - if ( equeue.size()!=1 ) { - System.err.println(equeue); - } - } - - protected void checkError(ErrorQueue equeue, - ANTLRMessage expectedMessage) - throws Exception - { - //System.out.println("errors="+equeue); - ANTLRMessage foundMsg = null; - for (int i = 0; i < equeue.errors.size(); i++) { - ANTLRMessage m = equeue.errors.get(i); - if (m.getErrorType()==expectedMessage.getErrorType() ) { - foundMsg = m; - } - } - assertTrue("no error; "+expectedMessage.getErrorType()+" expected", !equeue.errors.isEmpty()); - assertTrue("too many errors; "+equeue.errors, equeue.errors.size()<=1); - assertNotNull("couldn't find expected error: "+expectedMessage.getErrorType(), foundMsg); - /* - assertTrue("error is not a GrammarSemanticsMessage", - foundMsg instanceof GrammarSemanticsMessage); - */ - assertArrayEquals(expectedMessage.getArgs(), foundMsg.getArgs()); - } - - public static class FilteringTokenStream extends CommonTokenStream { - public FilteringTokenStream(TokenSource src) { super(src); } - Set hide = new HashSet(); - @Override - protected boolean sync(int i) { - if (!super.sync(i)) { - return false; - } - - Token t = get(i); - if ( hide.contains(t.getType()) ) { - ((WritableToken)t).setChannel(Token.HIDDEN_CHANNEL); - } - - return true; - } - public void setTokenTypeChannel(int ttype, int channel) { - hide.add(ttype); - } - } - - public static void writeFile(String dir, String fileName, String content) { - try { - Utils.writeFile(dir+"/"+fileName, content, "UTF-8"); - } - catch (IOException ioe) { - System.err.println("can't write file"); - ioe.printStackTrace(System.err); - } - } - - protected void mkdir(String dir) { - File f = new File(dir); - f.mkdirs(); - } - - protected void writeTestFile(String parserName, - String lexerName, - String parserStartRuleName, - boolean debug, - boolean profile) - { - ST outputFileST = new ST( - "import org.antlr.v4.runtime.*;\n" + - "import org.antlr.v4.runtime.tree.*;\n" + - "import org.antlr.v4.runtime.atn.*;\n" + - "import java.util.Arrays;\n"+ - "\n" + - "public class Test {\n" + - " public static void main(String[] args) throws Exception {\n" + - " CharStream input = new ANTLRFileStream(args[0]);\n" + - " lex = new (input);\n" + - " CommonTokenStream tokens = new CommonTokenStream(lex);\n" + - " \n"+ - " parser.setBuildParseTree(true);\n" + - " \n"+ - " ParserRuleContext tree = parser.();\n" + - " System.out.println(Arrays.toString(profiler.getDecisionInfo()));\n" + - " ParseTreeWalker.DEFAULT.walk(new TreeShapeListener(), tree);\n" + - " }\n" + - "\n" + - " static class TreeShapeListener implements ParseTreeListener {\n" + - " @Override public void visitTerminal(TerminalNode node) { }\n" + - " @Override public void visitErrorNode(ErrorNode node) { }\n" + - " @Override public void exitEveryRule(ParserRuleContext ctx) { }\n" + - "\n" + - " @Override\n" + - " public void enterEveryRule(ParserRuleContext ctx) {\n" + - " for (int i = 0; i \\< ctx.getChildCount(); i++) {\n" + - " ParseTree parent = ctx.getChild(i).getParent();\n" + - " if (!(parent instanceof RuleNode) || ((RuleNode)parent).getRuleContext() != ctx) {\n" + - " throw new IllegalStateException(\"Invalid parse tree shape detected.\");\n" + - " }\n" + - " }\n" + - " }\n" + - " }\n" + - "}" - ); - ST createParserST = new ST(" parser = new (tokens);\n"); - if ( debug ) { - createParserST = - new ST( - " parser = new (tokens);\n" + - " parser.addErrorListener(new DiagnosticErrorListener());\n"); - } - if ( profile ) { - outputFileST.add("profile", - "ProfilingATNSimulator profiler = new ProfilingATNSimulator(parser);\n" + - "parser.setInterpreter(profiler);"); - } - else { - outputFileST.add("profile", new ArrayList()); - } - outputFileST.add("createParser", createParserST); - outputFileST.add("parserName", parserName); - outputFileST.add("lexerName", lexerName); - outputFileST.add("parserStartRuleName", parserStartRuleName); - writeFile(tmpdir, "Test.java", outputFileST.render()); - } - - protected void writeLexerTestFile(String lexerName, boolean showDFA) { - ST outputFileST = new ST( - "import org.antlr.v4.runtime.*;\n" + - "\n" + - "public class Test {\n" + - " public static void main(String[] args) throws Exception {\n" + - " CharStream input = new ANTLRFileStream(args[0]);\n" + - " lex = new (input);\n" + - " CommonTokenStream tokens = new CommonTokenStream(lex);\n" + - " tokens.fill();\n" + - " for (Object t : tokens.getTokens()) System.out.println(t);\n" + - (showDFA?"System.out.print(lex.getInterpreter().getDFA(Lexer.DEFAULT_MODE).toLexerString());\n":"")+ - " }\n" + - "}" - ); - - outputFileST.add("lexerName", lexerName); - writeFile(tmpdir, "Test.java", outputFileST.render()); - } - - public void writeRecognizerAndCompile(String parserName, String lexerName, - String parserStartRuleName, - boolean debug, - boolean profile) { - if ( parserName==null ) { - writeLexerTestFile(lexerName, debug); - } - else { - writeTestFile(parserName, - lexerName, - parserStartRuleName, - debug, - profile); - } - - compile("Test.java"); - } - - - protected void eraseFiles(final String filesEndingWith) { - File tmpdirF = new File(tmpdir); - String[] files = tmpdirF.list(); - for(int i = 0; files!=null && i < files.length; i++) { - if ( files[i].endsWith(filesEndingWith) ) { - new File(tmpdir+"/"+files[i]).delete(); - } - } - } - - protected void eraseFiles() { - if (tmpdir == null) { - return; - } - - File tmpdirF = new File(tmpdir); - String[] files = tmpdirF.list(); - for(int i = 0; files!=null && i < files.length; i++) { - new File(tmpdir+"/"+files[i]).delete(); - } - } - - protected void eraseTempDir() { - File tmpdirF = new File(tmpdir); - if ( tmpdirF.exists() ) { - eraseFiles(); - tmpdirF.delete(); - } - } - - public String getFirstLineOfException() { - if ( this.stderrDuringParse ==null ) { - return null; - } - String[] lines = this.stderrDuringParse.split("\n"); - String prefix="Exception in thread \"main\" "; - return lines[0].substring(prefix.length(),lines[0].length()); - } - - /** - * When looking at a result set that consists of a Map/HashTable - * we cannot rely on the output order, as the hashing algorithm or other aspects - * of the implementation may be different on differnt JDKs or platforms. Hence - * we take the Map, convert the keys to a List, sort them and Stringify the Map, which is a - * bit of a hack, but guarantees that we get the same order on all systems. We assume that - * the keys are strings. - * - * @param m The Map that contains keys we wish to return in sorted order - * @return A string that represents all the keys in sorted order. - */ - public String sortMapToString(Map m) { - // Pass in crap, and get nothing back - // - if (m == null) { - return null; - } - - System.out.println("Map toString looks like: " + m.toString()); - - // Sort the keys in the Map - // - TreeMap nset = new TreeMap(m); - - System.out.println("Tree map looks like: " + nset.toString()); - return nset.toString(); - } - - public List realElements(List elements) { - return elements.subList(Token.MIN_USER_TOKEN_TYPE, elements.size()); - } - - public void assertNotNullOrEmpty(String message, String text) { - assertNotNull(message, text); - assertFalse(message, text.isEmpty()); - } - - public void assertNotNullOrEmpty(String text) { - assertNotNull(text); - assertFalse(text.isEmpty()); - } - - public static class IntTokenStream implements TokenStream { - IntegerList types; - int p=0; - public IntTokenStream(IntegerList types) { this.types = types; } - - @Override - public void consume() { p++; } - - @Override - public int LA(int i) { return LT(i).getType(); } - - @Override - public int mark() { - return index(); - } - - @Override - public int index() { return p; } - - @Override - public void release(int marker) { - seek(marker); - } - - @Override - public void seek(int index) { - p = index; - } - - @Override - public int size() { - return types.size(); - } - - @Override - public String getSourceName() { - return UNKNOWN_SOURCE_NAME; - } - - @Override - public Token LT(int i) { - CommonToken t; - int rawIndex = p + i - 1; - if ( rawIndex>=types.size() ) t = new CommonToken(Token.EOF); - else t = new CommonToken(types.get(rawIndex)); - t.setTokenIndex(rawIndex); - return t; - } - - @Override - public Token get(int i) { - return new org.antlr.v4.runtime.CommonToken(types.get(i)); - } - - @Override - public TokenSource getTokenSource() { - return null; - } - - - @Override - public String getText() { - throw new UnsupportedOperationException("can't give strings"); - } - - - @Override - public String getText(Interval interval) { - throw new UnsupportedOperationException("can't give strings"); - } - - - @Override - public String getText(RuleContext ctx) { - throw new UnsupportedOperationException("can't give strings"); - } - - - @Override - public String getText(Token start, Token stop) { - throw new UnsupportedOperationException("can't give strings"); - } - } - - /** Sort a list */ - public > List sort(List data) { - List dup = new ArrayList(); - dup.addAll(data); - Collections.sort(dup); - return dup; - } - - /** Return map sorted by key */ - public ,V> LinkedHashMap sort(Map data) { - LinkedHashMap dup = new LinkedHashMap(); - List keys = new ArrayList(); - keys.addAll(data.keySet()); - Collections.sort(keys); - for (K k : keys) { - dup.put(k, data.get(k)); - } - return dup; - } -} diff --git a/tool/test/org/antlr/v4/test/rt/java/Java.test.stg b/tool/test/org/antlr/v4/test/rt/java/Java.test.stg deleted file mode 100644 index bb7917f25..000000000 --- a/tool/test/org/antlr/v4/test/rt/java/Java.test.stg +++ /dev/null @@ -1,356 +0,0 @@ -TestFile(file) ::= << -package org.antlr.v4.test.rt.java; - -import org.junit.Test; -import static org.junit.Assert.*; - - -import org.antlr.v4.test.tool.ErrorQueue; - - -import org.antlr.v4.tool.Grammar; - - -public class Test extends BaseTest { - - }; separator="\n", wrap, anchor> - -} ->> - -LexerTestMethod(test) ::= << -/* this file and method are generated, any edit will be overwritten by the next generation */ -@Test -public void test() throws Exception { - = };separator="\\n\" +\n", wrap, anchor>"; - mkdir(tmpdir); - writeFile(tmpdir, ".g4", slave_); - };separator="\n", wrap, anchor> - StringBuilder sb = new StringBuilder(); - \\n");};separator="\n", wrap, anchor> - String grammar = sb.toString(); - - String found = execLexer(".g4", grammar, "Lexer", "", ); - assertEquals(\\n"};separator=" + \n", wrap, anchor>, found); - - assertEquals("", this.stderrDuringParse); - - assertNull(this.stderrDuringParse); - -} - ->> - -CompositeLexerTestMethod(test) ::= << - ->> - - -ParserTestMethod(test) ::= << -/* this file and method are generated, any edit will be overwritten by the next generation */ -@Test -public void test() throws Exception { - = };separator="\\n\" +\n", wrap, anchor>"; - - rawGenerateAndBuildRecognizer(".g4", slave_, null, ""); - - mkdir(tmpdir); - writeFile(tmpdir, ".g4", slave_); - - };separator="\n", wrap, anchor> - String grammar = };separator="\\n\" +\n", wrap, anchor>"; - - String found = execParser(".g4", grammar, "Parser", "Lexer", "", "", ); - assertEquals("", found); - - assertEquals("", this.stderrDuringParse); - - assertNull(this.stderrDuringParse); - -} - ->> - -CompositeParserTestMethod(test) ::= << - ->> - -AbstractParserTestMethod(test) ::= << -/* this file and method are generated, any edit will be overwritten by the next generation */ -String test(String input) throws Exception { - String grammar = };separator="\\n\" +\n", wrap, anchor>"; - return execParser(".g4", grammar, "Parser", "Lexer", "", input, ); -} - ->> - -ConcreteParserTestMethod(test) ::= << -/* this file and method are generated, any edit will be overwritten by the next generation */ -@Test -public void test() throws Exception { - String found = test(""); - assertEquals("", found); - - assertEquals("", this.stderrDuringParse); - - assertNull(this.stderrDuringParse); - -} - ->> - -writeln(s) ::= <);>> - -write(s) ::= <);>> - -False() ::= "false" - -True() ::= "true" - -Not(v) ::= "!" - -Assert(s) ::= <);>> - -Cast(t,v) ::= "(())" - -Append(a,b) ::= " + " - -Concat(a,b) ::= "" - -DeclareLocal(s,v) ::= "Object = ;" - -AssignLocal(s,v) ::= " = ;" - -InitIntMember(n,v) ::= <%int = ;%> - -InitBooleanMember(n,v) ::= <%boolean = ;%> - -GetMember(n) ::= <%this.%> - -SetMember(n,v) ::= <%this. = ;%> - -AddMember(n,v) ::= <%this. += ;%> - -PlusMember(v,n) ::= <% + this.%> - -MemberEquals(n,v) ::= <%this. == %> - -ModMemberEquals(n,m,v) ::= <%this. % == %> - -ModMemberNotEquals(n,m,v) ::= <%this. % != %> - -DumpDFA() ::= "this.dumpDFA();" - -Pass() ::= "" - -StringList() ::= "List\" - -BuildParseTrees() ::= "setBuildParseTree(true);" - -BailErrorStrategy() ::= <%setErrorHandler(new BailErrorStrategy());%> - -ToStringTree(s) ::= <%.toStringTree(this)%> - -Column() ::= "this.getCharPositionInLine()" - -Text() ::= "this.getText()" - -ValEquals(a,b) ::= <%==%> - -TextEquals(a) ::= <%this.getText().equals("")%> - -PlusText(a) ::= <%"" + this.getText()%> - -InputText() ::= "this._input.getText()" - -LTEquals(i, v) ::= <%this._input.LT().getText().equals()%> - -LANotEquals(i, v) ::= <%this._input.LA()!=%> - -TokenStartColumnEquals(i) ::= <%this._tokenStartCharPositionInLine==%> - -ImportListener(X) ::= "" - -GetExpectedTokenNames() ::= "this.getExpectedTokens().toString(this.tokenNames)" - -RuleInvocationStack() ::= "getRuleInvocationStack()" - -LL_EXACT_AMBIG_DETECTION() ::= <<_interp.setPredictionMode(PredictionMode.LL_EXACT_AMBIG_DETECTION);>> - -ParserPropertyMember() ::= << -@members { -boolean Property() { - return true; -} -} ->> - -PositionAdjustingLexer() ::= << - -@Override -public Token nextToken() { - if (!(_interp instanceof PositionAdjustingLexerATNSimulator)) { - _interp = new PositionAdjustingLexerATNSimulator(this, _ATN, _decisionToDFA, _sharedContextCache); - } - - return super.nextToken(); -} - -@Override -public Token emit() { - switch (_type) { - case TOKENS: - handleAcceptPositionForKeyword("tokens"); - break; - - case LABEL: - handleAcceptPositionForIdentifier(); - break; - - default: - break; - } - - return super.emit(); -} - -private boolean handleAcceptPositionForIdentifier() { - String tokenText = getText(); - int identifierLength = 0; - while (identifierLength \< tokenText.length() && isIdentifierChar(tokenText.charAt(identifierLength))) { - identifierLength++; - } - - if (getInputStream().index() > _tokenStartCharIndex + identifierLength) { - int offset = identifierLength - 1; - getInterpreter().resetAcceptPosition(getInputStream(), _tokenStartCharIndex + offset, _tokenStartLine, _tokenStartCharPositionInLine + offset); - return true; - } - - return false; -} - -private boolean handleAcceptPositionForKeyword(String keyword) { - if (getInputStream().index() > _tokenStartCharIndex + keyword.length()) { - int offset = keyword.length() - 1; - getInterpreter().resetAcceptPosition(getInputStream(), _tokenStartCharIndex + offset, _tokenStartLine, _tokenStartCharPositionInLine + offset); - return true; - } - - return false; -} - -@Override -public PositionAdjustingLexerATNSimulator getInterpreter() { - return (PositionAdjustingLexerATNSimulator)super.getInterpreter(); -} - -private static boolean isIdentifierChar(char c) { - return Character.isLetterOrDigit(c) || c == '_'; -} - -protected static class PositionAdjustingLexerATNSimulator extends LexerATNSimulator { - - public PositionAdjustingLexerATNSimulator(Lexer recog, ATN atn, - DFA[] decisionToDFA, - PredictionContextCache sharedContextCache) - { - super(recog, atn, decisionToDFA, sharedContextCache); - } - - protected void resetAcceptPosition(CharStream input, int index, int line, int charPositionInLine) { - input.seek(index); - this.line = line; - this.charPositionInLine = charPositionInLine; - consume(input); - } - -} - ->> - -BasicListener(X) ::= << -public static class LeafListener extends TBaseListener { - public void visitTerminal(TerminalNode node) { - System.out.println(node.getSymbol().getText()); - } -} ->> - -WalkListener(s) ::= << -ParseTreeWalker walker = new ParseTreeWalker(); -walker.walk(new LeafListener(), ); ->> - -TokenGetterListener(X) ::= << -public static class LeafListener extends TBaseListener { - public void exitA(TParser.AContext ctx) { - if (ctx.getChildCount()==2) - System.out.printf("%s %s %s",ctx.INT(0).getSymbol().getText(), - ctx.INT(1).getSymbol().getText(),ctx.INT()); - else - System.out.println(ctx.ID().getSymbol()); - } -} ->> - -RuleGetterListener(X) ::= << -public static class LeafListener extends TBaseListener { - public void exitA(TParser.AContext ctx) { - if (ctx.getChildCount()==2) { - System.out.printf("%s %s %s",ctx.b(0).start.getText(), - ctx.b(1).start.getText(),ctx.b().get(0).start.getText()); - } else - System.out.println(ctx.b(0).start.getText()); - } -} ->> - - -LRListener(X) ::= << -public static class LeafListener extends TBaseListener { - public void exitE(TParser.EContext ctx) { - if (ctx.getChildCount()==3) { - System.out.printf("%s %s %s\n",ctx.e(0).start.getText(), - ctx.e(1).start.getText(), ctx.e().get(0).start.getText()); - } else - System.out.println(ctx.INT().getSymbol().getText()); - } -} ->> - -LRWithLabelsListener(X) ::= << -public static class LeafListener extends TBaseListener { - public void exitCall(TParser.CallContext ctx) { - System.out.printf("%s %s",ctx.e().start.getText(),ctx.eList()); - } - public void exitInt(TParser.IntContext ctx) { - System.out.println(ctx.INT().getSymbol().getText()); - } -} ->> - -DeclareContextListGettersFunction() ::= << -void foo() { - SContext s = null; - List\ a = s.a(); - List\ b = s.b(); -} ->> - -Declare_foo() ::= <> - -Invoke_foo() ::= "this.foo();" - -Declare_pred() ::= < skip ;\n"); - String grammar = sb.toString(); - String found = execLexer("M.g4", grammar, "M", "abc", false); - assertEquals("S.A\n" + - "[@0,0:0='a',<3>,1:0]\n" + - "[@1,1:1='b',<1>,1:1]\n" + - "[@2,2:2='c',<4>,1:2]\n" + - "[@3,3:2='',<-1>,1:3]\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testLexerDelegatorRuleOverridesDelegate() throws Exception { - String slave_S = "lexer grammar S;\n" + - "A : 'a' {System.out.println(\"S.A\");};\n" + - "B : 'b' {System.out.println(\"S.B\");};"; - mkdir(tmpdir); - writeFile(tmpdir, "S.g4", slave_S); - - StringBuilder sb = new StringBuilder(); - sb.append("lexer grammar M;\n"); - sb.append("import S;\n"); - sb.append("A : 'a' B {System.out.println(\"M.A\");};\n"); - sb.append("WS : (' '|'\\n') -> skip ;\n"); - String grammar = sb.toString(); - String found = execLexer("M.g4", grammar, "M", "ab", false); - assertEquals("M.A\n" + - "[@0,0:1='ab',<1>,1:0]\n" + - "[@1,2:1='',<-1>,1:2]\n", found); - assertNull(this.stderrDuringParse); - } - - -} diff --git a/tool/test/org/antlr/v4/test/rt/java/TestCompositeParsers.java b/tool/test/org/antlr/v4/test/rt/java/TestCompositeParsers.java deleted file mode 100644 index a0aa46019..000000000 --- a/tool/test/org/antlr/v4/test/rt/java/TestCompositeParsers.java +++ /dev/null @@ -1,342 +0,0 @@ -package org.antlr.v4.test.rt.java; - -import org.antlr.v4.test.tool.ErrorQueue; -import org.antlr.v4.tool.Grammar; -import org.junit.Test; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; - -public class TestCompositeParsers extends BaseTest { - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testDelegatorInvokesDelegateRule() throws Exception { - String slave_S = "parser grammar S;\n" + - "a : B {System.out.println(\"S.a\");};"; - mkdir(tmpdir); - writeFile(tmpdir, "S.g4", slave_S); - - String grammar = "grammar M;\n" + - "import S;\n" + - "s : a ;\n" + - "B : 'b' ; // defines B from inherited token space\n" + - "WS : (' '|'\\n') -> skip ;"; - String found = execParser("M.g4", grammar, "MParser", "MLexer", "s", "b", false); - assertEquals("S.a\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testBringInLiteralsFromDelegate() throws Exception { - String slave_S = "parser grammar S;\n" + - "a : '=' 'a' {System.out.print(\"S.a\");};"; - mkdir(tmpdir); - writeFile(tmpdir, "S.g4", slave_S); - - String grammar = "grammar M;\n" + - "import S;\n" + - "s : a ;\n" + - "WS : (' '|'\\n') -> skip ;"; - String found = execParser("M.g4", grammar, "MParser", "MLexer", "s", "=a", false); - assertEquals("S.a\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testDelegatorInvokesDelegateRuleWithArgs() throws Exception { - String slave_S = "parser grammar S;\n" + - "a[int x] returns [int y] : B {System.out.print(\"S.a\");;$y=1000;};"; - mkdir(tmpdir); - writeFile(tmpdir, "S.g4", slave_S); - - String grammar = "grammar M;\n" + - "import S;\n" + - "s : label=a[3] {System.out.println($label.y);} ;\n" + - "B : 'b' ; // defines B from inherited token space\n" + - "WS : (' '|'\\n') -> skip ;"; - String found = execParser("M.g4", grammar, "MParser", "MLexer", "s", "b", false); - assertEquals("S.a1000\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testDelegatorInvokesDelegateRuleWithReturnStruct() throws Exception { - String slave_S = "parser grammar S;\n" + - "a : B {System.out.print(\"S.a\");};"; - mkdir(tmpdir); - writeFile(tmpdir, "S.g4", slave_S); - - String grammar = "grammar M;\n" + - "import S;\n" + - "s : a {System.out.print($a.text);} ;\n" + - "B : 'b' ; // defines B from inherited token space\n" + - "WS : (' '|'\\n') -> skip ;"; - String found = execParser("M.g4", grammar, "MParser", "MLexer", "s", "b", false); - assertEquals("S.ab\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testDelegatorAccessesDelegateMembers() throws Exception { - String slave_S = "parser grammar S;\n" + - "@members {\n" + - "public void foo() {System.out.println(\"foo\");}\n" + - "}\n" + - "a : B;"; - mkdir(tmpdir); - writeFile(tmpdir, "S.g4", slave_S); - - String grammar = "grammar M; // uses no rules from the import\n" + - "import S;\n" + - "s : 'b'{this.foo();}; // gS is import pointer\n" + - "WS : (' '|'\\n') -> skip ;"; - String found = execParser("M.g4", grammar, "MParser", "MLexer", "s", "b", false); - assertEquals("foo\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testDelegatorInvokesFirstVersionOfDelegateRule() throws Exception { - String slave_S = "parser grammar S;\n" + - "a : B {System.out.println(\"S.a\");};\n" + - "b : B;"; - mkdir(tmpdir); - writeFile(tmpdir, "S.g4", slave_S); - - String slave_T = "parser grammar T;\n" + - "a : B {System.out.println(\"T.a\");};"; - mkdir(tmpdir); - writeFile(tmpdir, "T.g4", slave_T); - - String grammar = "grammar M;\n" + - "import S,T;\n" + - "s : a ;\n" + - "B : 'b' ; // defines B from inherited token space\n" + - "WS : (' '|'\\n') -> skip ;"; - String found = execParser("M.g4", grammar, "MParser", "MLexer", "s", "b", false); - assertEquals("S.a\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testDelegatesSeeSameTokenType() throws Exception { - String slave_S = "parser grammar S;\n" + - "tokens { A, B, C }\n" + - "x : A {System.out.println(\"S.x\");};"; - mkdir(tmpdir); - writeFile(tmpdir, "S.g4", slave_S); - - String slave_T = "parser grammar S;\n" + - "tokens { C, B, A } // reverse order\n" + - "y : A {System.out.println(\"T.y\");};"; - mkdir(tmpdir); - writeFile(tmpdir, "T.g4", slave_T); - - String grammar = "// The lexer will create rules to match letters a, b, c.\n" + - "// The associated token types A, B, C must have the same value\n" + - "// and all import'd parsers. Since ANTLR regenerates all imports\n" + - "// for use with the delegator M, it can generate the same token type\n" + - "// mapping in each parser:\n" + - "// public static final int C=6;\n" + - "// public static final int EOF=-1;\n" + - "// public static final int B=5;\n" + - "// public static final int WS=7;\n" + - "// public static final int A=4;\n" + - "grammar M;\n" + - "import S,T;\n" + - "s : x y ; // matches AA, which should be 'aa'\n" + - "B : 'b' ; // another order: B, A, C\n" + - "A : 'a' ; \n" + - "C : 'c' ; \n" + - "WS : (' '|'\\n') -> skip ;"; - writeFile(tmpdir, "M.g4", grammar); - ErrorQueue equeue = new ErrorQueue(); - Grammar g = new Grammar(tmpdir+"/M.g4", grammar, equeue); - String expectedTokenIDToTypeMap = "{EOF=-1, B=1, A=2, C=3, WS=4}"; - String expectedStringLiteralToTypeMap = "{'a'=2, 'b'=1, 'c'=3}"; - String expectedTypeToTokenList = "[B, A, C, WS]"; - assertEquals(expectedTokenIDToTypeMap, g.tokenNameToTypeMap.toString()); - assertEquals(expectedStringLiteralToTypeMap, sort(g.stringLiteralToTypeMap).toString()); - assertEquals(expectedTypeToTokenList, realElements(g.typeToTokenList).toString()); - assertEquals("unexpected errors: "+equeue, 0, equeue.errors.size()); - - String found = execParser("M.g4", grammar, "MParser", "MLexer", "s", "aa", false); - assertEquals("S.x\nT.y\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testCombinedImportsCombined() throws Exception { - String slave_S = "parser grammar S;\n" + - "tokens { A, B, C }\n" + - "x : 'x' INT {System.out.println(\"S.x\");};\n" + - "INT : '0'..'9'+ ;\n" + - "WS : (' '|'\\n') -> skip ;"; - mkdir(tmpdir); - writeFile(tmpdir, "S.g4", slave_S); - - String grammar = "grammar M;\n" + - "import S;\n" + - "s : x INT;"; - writeFile(tmpdir, "M.g4", grammar); - ErrorQueue equeue = new ErrorQueue(); - new Grammar(tmpdir+"/M.g4", grammar, equeue); - assertEquals("unexpected errors: " + equeue, 0, equeue.errors.size()); - - String found = execParser("M.g4", grammar, "MParser", "MLexer", "s", "x 34 9", false); - assertEquals("S.x\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testDelegatorRuleOverridesDelegate() throws Exception { - String slave_S = "parser grammar S;\n" + - "a : b {System.out.print(\"S.a\");};\n" + - "b : B ;"; - mkdir(tmpdir); - writeFile(tmpdir, "S.g4", slave_S); - - String grammar = "grammar M;\n" + - "import S;\n" + - "b : 'b'|'c';\n" + - "WS : (' '|'\\n') -> skip ;"; - String found = execParser("M.g4", grammar, "MParser", "MLexer", "a", "c", false); - assertEquals("S.a\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testDelegatorRuleOverridesLookaheadInDelegate() throws Exception { - String slave_S = "parser grammar S;\n" + - "type_ : 'int' ;\n" + - "decl : type_ ID ';'\n" + - " | type_ ID init ';' {System.out.print(\"Decl: \" + $text);};\n" + - "init : '=' INT;"; - mkdir(tmpdir); - writeFile(tmpdir, "S.g4", slave_S); - - String grammar = "grammar M;\n" + - "import S;\n" + - "prog : decl ;\n" + - "type_ : 'int' | 'float' ;\n" + - "ID : 'a'..'z'+ ;\n" + - "INT : '0'..'9'+ ;\n" + - "WS : (' '|'\\n') -> skip;"; - String found = execParser("M.g4", grammar, "MParser", "MLexer", "prog", "float x = 3;", false); - assertEquals("Decl: floatx=3;\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testDelegatorRuleOverridesDelegates() throws Exception { - String slave_S = "parser grammar S;\n" + - "a : b {System.out.println(\"S.a\");};\n" + - "b : 'b' ;\n" + - " "; - mkdir(tmpdir); - writeFile(tmpdir, "S.g4", slave_S); - - String slave_T = "parser grammar S;\n" + - "tokens { A }\n" + - "b : 'b' {System.out.println(\"T.b\");};"; - mkdir(tmpdir); - writeFile(tmpdir, "T.g4", slave_T); - - String grammar = "grammar M;\n" + - "import S, T;\n" + - "b : 'b'|'c' {System.out.println(\"M.b\");}|B|A;\n" + - "WS : (' '|'\\n') -> skip ;"; - String found = execParser("M.g4", grammar, "MParser", "MLexer", "a", "c", false); - assertEquals("M.b\nS.a\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testKeywordVSIDOrder() throws Exception { - String slave_S = "lexer grammar S;\n" + - "ID : 'a'..'z'+;"; - mkdir(tmpdir); - writeFile(tmpdir, "S.g4", slave_S); - - String grammar = "grammar M;\n" + - "import S;\n" + - "a : A {System.out.println(\"M.a: \" + $A);};\n" + - "A : 'abc' {System.out.println(\"M.A\");};\n" + - "WS : (' '|'\\n') -> skip ;"; - String found = execParser("M.g4", grammar, "MParser", "MLexer", "a", "abc", false); - assertEquals("M.A\nM.a: [@0,0:2='abc',<1>,1:0]\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testImportedRuleWithAction() throws Exception { - String slave_S = "parser grammar S;\n" + - "a @after {} : B;"; - mkdir(tmpdir); - writeFile(tmpdir, "S.g4", slave_S); - - String grammar = "grammar M;\n" + - "import S;\n" + - "s : a;\n" + - "B : 'b';\n" + - "WS : (' '|'\\n') -> skip ;"; - String found = execParser("M.g4", grammar, "MParser", "MLexer", "s", "b", false); - assertEquals("", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testImportedGrammarWithEmptyOptions() throws Exception { - String slave_S = "parser grammar S;\n" + - "options {}\n" + - "a : B;"; - mkdir(tmpdir); - writeFile(tmpdir, "S.g4", slave_S); - - String grammar = "grammar M;\n" + - "import S;\n" + - "s : a;\n" + - "B : 'b';\n" + - "WS : (' '|'\\n') -> skip ;"; - String found = execParser("M.g4", grammar, "MParser", "MLexer", "s", "b", false); - assertEquals("", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testImportLexerWithOnlyFragmentRules() throws Exception { - String slave_S = "lexer grammar S;\n" + - "fragment\n" + - "UNICODE_CLASS_Zs : '\\u0020' | '\\u00A0' | '\\u1680' | '\\u180E'\n" + - " | '\\u2000'..'\\u200A'\n" + - " | '\\u202F' | '\\u205F' | '\\u3000'\n" + - " ;"; - mkdir(tmpdir); - writeFile(tmpdir, "S.g4", slave_S); - - String grammar = "grammar M;\n" + - "import S;\n" + - "program : 'test' 'test';\n" + - "WS : (UNICODE_CLASS_Zs)+ -> skip;"; - String found = execParser("M.g4", grammar, "MParser", "MLexer", "program", "test test", false); - assertEquals("", found); - assertNull(this.stderrDuringParse); - } - - -} diff --git a/tool/test/org/antlr/v4/test/rt/java/TestFullContextParsing.java b/tool/test/org/antlr/v4/test/rt/java/TestFullContextParsing.java deleted file mode 100644 index 7f0a53739..000000000 --- a/tool/test/org/antlr/v4/test/rt/java/TestFullContextParsing.java +++ /dev/null @@ -1,228 +0,0 @@ -package org.antlr.v4.test.rt.java; - -import org.junit.Test; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; - -public class TestFullContextParsing extends BaseTest { - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testAmbigYieldsCtxSensitiveDFA() throws Exception { - String grammar = "grammar T;\n" + - "s @after {this.dumpDFA();}\n" + - " : ID | ID {} ;\n" + - "ID : 'a'..'z'+;\n" + - "WS : (' '|'\\t'|'\\n')+ -> skip ;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "s", "abc", true); - assertEquals("Decision 0:\ns0-ID->:s1^=>1\n", found); - assertEquals("line 1:0 reportAttemptingFullContext d=0 (s), input='abc'\n", this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - String testCtxSensitiveDFA(String input) throws Exception { - String grammar = "grammar T;\n" + - "s @after {this.dumpDFA();}\n" + - " : '$' a | '@' b ;\n" + - "a : e ID ;\n" + - "b : e INT ID ;\n" + - "e : INT | ;\n" + - "ID : 'a'..'z'+ ;\n" + - "INT : '0'..'9'+ ;\n" + - "WS : (' '|'\\t'|'\\n')+ -> skip ;"; - return execParser("T.g4", grammar, "TParser", "TLexer", "s", input, true); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testCtxSensitiveDFA_1() throws Exception { - String found = testCtxSensitiveDFA("$ 34 abc"); - assertEquals("Decision 1:\ns0-INT->s1\ns1-ID->:s2^=>1\n", found); - assertEquals("line 1:5 reportAttemptingFullContext d=1 (e), input='34abc'\nline 1:2 reportContextSensitivity d=1 (e), input='34'\n", this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testCtxSensitiveDFA_2() throws Exception { - String found = testCtxSensitiveDFA("@ 34 abc"); - assertEquals("Decision 1:\ns0-INT->s1\ns1-ID->:s2^=>1\n", found); - assertEquals("line 1:5 reportAttemptingFullContext d=1 (e), input='34abc'\nline 1:5 reportContextSensitivity d=1 (e), input='34abc'\n", this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testCtxSensitiveDFATwoDiffInput() throws Exception { - String grammar = "grammar T;\n" + - "s @after {this.dumpDFA();}\n" + - " : ('$' a | '@' b)+ ;\n" + - "a : e ID ;\n" + - "b : e INT ID ;\n" + - "e : INT | ;\n" + - "ID : 'a'..'z'+ ;\n" + - "INT : '0'..'9'+ ;\n" + - "WS : (' '|'\\t'|'\\n')+ -> skip ;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "s", "$ 34 abc @ 34 abc", true); - assertEquals("Decision 2:\ns0-INT->s1\ns1-ID->:s2^=>1\n", found); - assertEquals("line 1:5 reportAttemptingFullContext d=2 (e), input='34abc'\nline 1:2 reportContextSensitivity d=2 (e), input='34'\nline 1:14 reportAttemptingFullContext d=2 (e), input='34abc'\nline 1:14 reportContextSensitivity d=2 (e), input='34abc'\n", this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testSLLSeesEOFInLLGrammar() throws Exception { - String grammar = "grammar T;\n" + - "s @after {this.dumpDFA();}\n" + - " : a;\n" + - "a : e ID ;\n" + - "b : e INT ID ;\n" + - "e : INT | ;\n" + - "ID : 'a'..'z'+ ;\n" + - "INT : '0'..'9'+ ;\n" + - "WS : (' '|'\\t'|'\\n')+ -> skip ;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "s", "34 abc", true); - assertEquals("Decision 0:\ns0-INT->s1\ns1-ID->:s2^=>1\n", found); - assertEquals("line 1:3 reportAttemptingFullContext d=0 (e), input='34abc'\nline 1:0 reportContextSensitivity d=0 (e), input='34'\n", this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - String testFullContextIF_THEN_ELSEParse(String input) throws Exception { - String grammar = "grammar T;\n" + - "s \n" + - "@init {_interp.setPredictionMode(PredictionMode.LL_EXACT_AMBIG_DETECTION);}\n" + - "@after {this.dumpDFA();}\n" + - " : '{' stat* '}' ;\n" + - "stat: 'if' ID 'then' stat ('else' ID)?\n" + - " | 'return'\n" + - " ;\n" + - "ID : 'a'..'z'+ ;\n" + - "WS : (' '|'\\t'|'\\n')+ -> skip ;"; - return execParser("T.g4", grammar, "TParser", "TLexer", "s", input, true); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testFullContextIF_THEN_ELSEParse_1() throws Exception { - String found = testFullContextIF_THEN_ELSEParse("{ if x then return }"); - assertEquals("Decision 1:\ns0-'}'->:s1=>2\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testFullContextIF_THEN_ELSEParse_2() throws Exception { - String found = testFullContextIF_THEN_ELSEParse("{ if x then return else foo }"); - assertEquals("Decision 1:\ns0-'else'->:s1^=>1\n", found); - assertEquals("line 1:19 reportAttemptingFullContext d=1 (stat), input='else'\nline 1:19 reportContextSensitivity d=1 (stat), input='else'\n", this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testFullContextIF_THEN_ELSEParse_3() throws Exception { - String found = testFullContextIF_THEN_ELSEParse("{ if x then if y then return else foo }"); - assertEquals("Decision 1:\ns0-'}'->:s2=>2\ns0-'else'->:s1^=>1\n", found); - assertEquals("line 1:29 reportAttemptingFullContext d=1 (stat), input='else'\nline 1:38 reportAmbiguity d=1 (stat): ambigAlts={1, 2}, input='elsefoo}'\n", this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testFullContextIF_THEN_ELSEParse_4() throws Exception { - String found = testFullContextIF_THEN_ELSEParse("{ if x then if y then return else foo else bar }"); - assertEquals("Decision 1:\ns0-'else'->:s1^=>1\n", found); - assertEquals("line 1:29 reportAttemptingFullContext d=1 (stat), input='else'\nline 1:38 reportContextSensitivity d=1 (stat), input='elsefooelse'\nline 1:38 reportAttemptingFullContext d=1 (stat), input='else'\nline 1:38 reportContextSensitivity d=1 (stat), input='else'\n", this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testFullContextIF_THEN_ELSEParse_5() throws Exception { - String found = testFullContextIF_THEN_ELSEParse("{ if x then return else foo\nif x then if y then return else foo }"); - assertEquals("Decision 1:\ns0-'}'->:s2=>2\ns0-'else'->:s1^=>1\n", found); - assertEquals("line 1:19 reportAttemptingFullContext d=1 (stat), input='else'\nline 1:19 reportContextSensitivity d=1 (stat), input='else'\nline 2:27 reportAttemptingFullContext d=1 (stat), input='else'\nline 2:36 reportAmbiguity d=1 (stat): ambigAlts={1, 2}, input='elsefoo}'\n", this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testFullContextIF_THEN_ELSEParse_6() throws Exception { - String found = testFullContextIF_THEN_ELSEParse("{ if x then return else foo\nif x then if y then return else foo }"); - assertEquals("Decision 1:\ns0-'}'->:s2=>2\ns0-'else'->:s1^=>1\n", found); - assertEquals("line 1:19 reportAttemptingFullContext d=1 (stat), input='else'\nline 1:19 reportContextSensitivity d=1 (stat), input='else'\nline 2:27 reportAttemptingFullContext d=1 (stat), input='else'\nline 2:36 reportAmbiguity d=1 (stat): ambigAlts={1, 2}, input='elsefoo}'\n", this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testLoopsSimulateTailRecursion() throws Exception { - String grammar = "grammar T;\n" + - "prog\n" + - "@init {_interp.setPredictionMode(PredictionMode.LL_EXACT_AMBIG_DETECTION);}\n" + - " : expr_or_assign*;\n" + - "expr_or_assign\n" + - " : expr '++' {System.out.println(\"fail.\");}\n" + - " | expr {System.out.println(\"pass: \"+$expr.text);}\n" + - " ;\n" + - "expr: expr_primary ('<-' ID)?;\n" + - "expr_primary\n" + - " : '(' ID ')'\n" + - " | ID '(' ID ')'\n" + - " | ID\n" + - " ;\n" + - "ID : [a-z]+ ;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "prog", "a(i)<-x", true); - assertEquals("pass: a(i)<-x\n", found); - assertEquals("line 1:3 reportAttemptingFullContext d=3 (expr_primary), input='a(i)'\nline 1:7 reportAmbiguity d=3 (expr_primary): ambigAlts={2, 3}, input='a(i)<-x'\n", this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testAmbiguityNoLoop() throws Exception { - String grammar = "grammar T;\n" + - "prog\n" + - "@init {_interp.setPredictionMode(PredictionMode.LL_EXACT_AMBIG_DETECTION);}\n" + - " : expr expr {System.out.println(\"alt 1\");}\n" + - " | expr\n" + - " ;\n" + - "expr: '@'\n" + - " | ID '@'\n" + - " | ID\n" + - " ;\n" + - "ID : [a-z]+ ;\n" + - "WS : [ \\r\\n\\t]+ -> skip ;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "prog", "a@", true); - assertEquals("alt 1\n", found); - assertEquals("line 1:2 reportAttemptingFullContext d=0 (prog), input='a@'\nline 1:2 reportAmbiguity d=0 (prog): ambigAlts={1, 2}, input='a@'\nline 1:2 reportAttemptingFullContext d=1 (expr), input='a@'\nline 1:2 reportContextSensitivity d=1 (expr), input='a@'\n", this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - String testExprAmbiguity(String input) throws Exception { - String grammar = "grammar T;\n" + - "s\n" + - "@init {_interp.setPredictionMode(PredictionMode.LL_EXACT_AMBIG_DETECTION);}\n" + - ": expr[0] {System.out.println($expr.ctx.toStringTree(this));};\n" + - " expr[int _p]\n" + - " : ID \n" + - " ( \n" + - " {5 >= $_p}? '*' expr[6]\n" + - " | {4 >= $_p}? '+' expr[5]\n" + - " )*\n" + - " ;\n" + - "ID : [a-zA-Z]+ ;\n" + - "WS : [ \\r\\n\\t]+ -> skip ;"; - return execParser("T.g4", grammar, "TParser", "TLexer", "s", input, true); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testExprAmbiguity_1() throws Exception { - String found = testExprAmbiguity("a+b"); - assertEquals("(expr a + (expr b))\n", found); - assertEquals("line 1:1 reportAttemptingFullContext d=1 (expr), input='+'\nline 1:2 reportContextSensitivity d=1 (expr), input='+b'\n", this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testExprAmbiguity_2() throws Exception { - String found = testExprAmbiguity("a+b*c"); - assertEquals("(expr a + (expr b * (expr c)))\n", found); - assertEquals("line 1:1 reportAttemptingFullContext d=1 (expr), input='+'\nline 1:2 reportContextSensitivity d=1 (expr), input='+b'\nline 1:3 reportAttemptingFullContext d=1 (expr), input='*'\nline 1:5 reportAmbiguity d=1 (expr): ambigAlts={1, 2}, input='*c'\n", this.stderrDuringParse); - } - - -} diff --git a/tool/test/org/antlr/v4/test/rt/java/TestLeftRecursion.java b/tool/test/org/antlr/v4/test/rt/java/TestLeftRecursion.java deleted file mode 100644 index 82480f4fd..000000000 --- a/tool/test/org/antlr/v4/test/rt/java/TestLeftRecursion.java +++ /dev/null @@ -1,1167 +0,0 @@ -package org.antlr.v4.test.rt.java; - -import org.junit.Test; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; - -public class TestLeftRecursion extends BaseTest { - - /* this file and method are generated, any edit will be overwritten by the next generation */ - String testSimple(String input) throws Exception { - String grammar = "grammar T;\n" + - "s @after {System.out.println($ctx.toStringTree(this));} : a ;\n" + - "a : a ID\n" + - " | ID\n" + - " ;\n" + - "ID : 'a'..'z'+ ;\n" + - "WS : (' '|'\\n') -> skip ;"; - return execParser("T.g4", grammar, "TParser", "TLexer", "s", input, false); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testSimple_1() throws Exception { - String found = testSimple("x"); - assertEquals("(s (a x))\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testSimple_2() throws Exception { - String found = testSimple("x y"); - assertEquals("(s (a (a x) y))\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testSimple_3() throws Exception { - String found = testSimple("x y z"); - assertEquals("(s (a (a (a x) y) z))\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - String testDirectCallToLeftRecursiveRule(String input) throws Exception { - String grammar = "grammar T;\n" + - "a @after {System.out.println($ctx.toStringTree(this));} : a ID\n" + - " | ID\n" + - " ;\n" + - "ID : 'a'..'z'+ ;\n" + - "WS : (' '|'\\n') -> skip ;"; - return execParser("T.g4", grammar, "TParser", "TLexer", "a", input, false); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testDirectCallToLeftRecursiveRule_1() throws Exception { - String found = testDirectCallToLeftRecursiveRule("x"); - assertEquals("(a x)\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testDirectCallToLeftRecursiveRule_2() throws Exception { - String found = testDirectCallToLeftRecursiveRule("x y"); - assertEquals("(a (a x) y)\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testDirectCallToLeftRecursiveRule_3() throws Exception { - String found = testDirectCallToLeftRecursiveRule("x y z"); - assertEquals("(a (a (a x) y) z)\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testSemPred() throws Exception { - String grammar = "grammar T;\n" + - "s @after {System.out.println($ctx.toStringTree(this));} : a ;\n" + - "a : a {true}? ID\n" + - " | ID\n" + - " ;\n" + - "ID : 'a'..'z'+ ;\n" + - "WS : (' '|'\\n') -> skip ;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "s", "x y z", false); - assertEquals("(s (a (a (a x) y) z))\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - String testTernaryExpr(String input) throws Exception { - String grammar = "grammar T;\n" + - "s @after {System.out.println($ctx.toStringTree(this));} : e EOF ; // must indicate EOF can follow or 'a' won't match\n" + - "e : e '*' e\n" + - " | e '+' e\n" + - " | e '?' e ':' e\n" + - " | e '=' e\n" + - " | ID\n" + - " ;\n" + - "ID : 'a'..'z'+ ;\n" + - "WS : (' '|'\\n') -> skip ;"; - return execParser("T.g4", grammar, "TParser", "TLexer", "s", input, false); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testTernaryExpr_1() throws Exception { - String found = testTernaryExpr("a"); - assertEquals("(s (e a) )\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testTernaryExpr_2() throws Exception { - String found = testTernaryExpr("a+b"); - assertEquals("(s (e (e a) + (e b)) )\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testTernaryExpr_3() throws Exception { - String found = testTernaryExpr("a*b"); - assertEquals("(s (e (e a) * (e b)) )\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testTernaryExpr_4() throws Exception { - String found = testTernaryExpr("a?b:c"); - assertEquals("(s (e (e a) ? (e b) : (e c)) )\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testTernaryExpr_5() throws Exception { - String found = testTernaryExpr("a=b=c"); - assertEquals("(s (e (e a) = (e (e b) = (e c))) )\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testTernaryExpr_6() throws Exception { - String found = testTernaryExpr("a?b+c:d"); - assertEquals("(s (e (e a) ? (e (e b) + (e c)) : (e d)) )\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testTernaryExpr_7() throws Exception { - String found = testTernaryExpr("a?b=c:d"); - assertEquals("(s (e (e a) ? (e (e b) = (e c)) : (e d)) )\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testTernaryExpr_8() throws Exception { - String found = testTernaryExpr("a? b?c:d : e"); - assertEquals("(s (e (e a) ? (e (e b) ? (e c) : (e d)) : (e e)) )\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testTernaryExpr_9() throws Exception { - String found = testTernaryExpr("a?b: c?d:e"); - assertEquals("(s (e (e a) ? (e b) : (e (e c) ? (e d) : (e e))) )\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - String testExpressions(String input) throws Exception { - String grammar = "grammar T;\n" + - "s @after {System.out.println($ctx.toStringTree(this));} : e EOF ; // must indicate EOF can follow\n" + - "e : e '.' ID\n" + - " | e '.' 'this'\n" + - " | '-' e\n" + - " | e '*' e\n" + - " | e ('+'|'-') e\n" + - " | INT\n" + - " | ID\n" + - " ;\n" + - "ID : 'a'..'z'+ ;\n" + - "INT : '0'..'9'+ ;\n" + - "WS : (' '|'\\n') -> skip ;"; - return execParser("T.g4", grammar, "TParser", "TLexer", "s", input, false); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testExpressions_1() throws Exception { - String found = testExpressions("a"); - assertEquals("(s (e a) )\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testExpressions_2() throws Exception { - String found = testExpressions("1"); - assertEquals("(s (e 1) )\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testExpressions_3() throws Exception { - String found = testExpressions("a-1"); - assertEquals("(s (e (e a) - (e 1)) )\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testExpressions_4() throws Exception { - String found = testExpressions("a.b"); - assertEquals("(s (e (e a) . b) )\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testExpressions_5() throws Exception { - String found = testExpressions("a.this"); - assertEquals("(s (e (e a) . this) )\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testExpressions_6() throws Exception { - String found = testExpressions("-a"); - assertEquals("(s (e - (e a)) )\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testExpressions_7() throws Exception { - String found = testExpressions("-a+b"); - assertEquals("(s (e (e - (e a)) + (e b)) )\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - String testJavaExpressions(String input) throws Exception { - String grammar = "grammar T;\n" + - "s @after {System.out.println($ctx.toStringTree(this));} : e EOF ; // must indicate EOF can follow\n" + - "expressionList\n" + - " : e (',' e)*\n" + - " ;\n" + - "e : '(' e ')'\n" + - " | 'this' \n" + - " | 'super'\n" + - " | INT\n" + - " | ID\n" + - " | type_ '.' 'class'\n" + - " | e '.' ID\n" + - " | e '.' 'this'\n" + - " | e '.' 'super' '(' expressionList? ')'\n" + - " | e '.' 'new' ID '(' expressionList? ')'\n" + - " | 'new' type_ ( '(' expressionList? ')' | ('[' e ']')+)\n" + - " | e '[' e ']'\n" + - " | '(' type_ ')' e\n" + - " | e ('++' | '--')\n" + - " | e '(' expressionList? ')'\n" + - " | ('+'|'-'|'++'|'--') e\n" + - " | ('~'|'!') e\n" + - " | e ('*'|'/'|'%') e\n" + - " | e ('+'|'-') e\n" + - " | e ('<<' | '>>>' | '>>') e\n" + - " | e ('<=' | '>=' | '>' | '<') e\n" + - " | e 'instanceof' e\n" + - " | e ('==' | '!=') e\n" + - " | e '&' e\n" + - " | e '^' e\n" + - " | e '|' e\n" + - " | e '&&' e\n" + - " | e '||' e\n" + - " | e '?' e ':' e\n" + - " |\n" + - " e ('='\n" + - " |'+='\n" + - " |'-='\n" + - " |'*='\n" + - " |'/='\n" + - " |'&='\n" + - " |'|='\n" + - " |'^='\n" + - " |'>>='\n" + - " |'>>>='\n" + - " |'<<='\n" + - " |'%=') e\n" + - " ;\n" + - "type_: ID \n" + - " | ID '[' ']'\n" + - " | 'int'\n" + - " | 'int' '[' ']' \n" + - " ;\n" + - "ID : ('a'..'z'|'A'..'Z'|'_'|'$')+;\n" + - "INT : '0'..'9'+ ;\n" + - "WS : (' '|'\\n') -> skip ;"; - return execParser("T.g4", grammar, "TParser", "TLexer", "s", input, false); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testJavaExpressions_1() throws Exception { - String found = testJavaExpressions("a|b&c"); - assertEquals("(s (e (e a) | (e (e b) & (e c))) )\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testJavaExpressions_2() throws Exception { - String found = testJavaExpressions("(a|b)&c"); - assertEquals("(s (e (e ( (e (e a) | (e b)) )) & (e c)) )\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testJavaExpressions_3() throws Exception { - String found = testJavaExpressions("a > b"); - assertEquals("(s (e (e a) > (e b)) )\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testJavaExpressions_4() throws Exception { - String found = testJavaExpressions("a >> b"); - assertEquals("(s (e (e a) >> (e b)) )\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testJavaExpressions_5() throws Exception { - String found = testJavaExpressions("a=b=c"); - assertEquals("(s (e (e a) = (e (e b) = (e c))) )\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testJavaExpressions_6() throws Exception { - String found = testJavaExpressions("a^b^c"); - assertEquals("(s (e (e a) ^ (e (e b) ^ (e c))) )\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testJavaExpressions_7() throws Exception { - String found = testJavaExpressions("(T)x"); - assertEquals("(s (e ( (type_ T) ) (e x)) )\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testJavaExpressions_8() throws Exception { - String found = testJavaExpressions("new A().b"); - assertEquals("(s (e (e new (type_ A) ( )) . b) )\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testJavaExpressions_9() throws Exception { - String found = testJavaExpressions("(T)t.f()"); - assertEquals("(s (e (e ( (type_ T) ) (e (e t) . f)) ( )) )\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testJavaExpressions_10() throws Exception { - String found = testJavaExpressions("a.f(x)==T.c"); - assertEquals("(s (e (e (e (e a) . f) ( (expressionList (e x)) )) == (e (e T) . c)) )\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testJavaExpressions_11() throws Exception { - String found = testJavaExpressions("a.f().g(x,1)"); - assertEquals("(s (e (e (e (e (e a) . f) ( )) . g) ( (expressionList (e x) , (e 1)) )) )\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testJavaExpressions_12() throws Exception { - String found = testJavaExpressions("new T[((n-1) * x) + 1]"); - assertEquals("(s (e new (type_ T) [ (e (e ( (e (e ( (e (e n) - (e 1)) )) * (e x)) )) + (e 1)) ]) )\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - String testDeclarations(String input) throws Exception { - String grammar = "grammar T;\n" + - "s @after {System.out.println($ctx.toStringTree(this));} : declarator EOF ; // must indicate EOF can follow\n" + - "declarator\n" + - " : declarator '[' e ']'\n" + - " | declarator '[' ']'\n" + - " | declarator '(' ')'\n" + - " | '*' declarator // binds less tight than suffixes\n" + - " | '(' declarator ')'\n" + - " | ID\n" + - " ;\n" + - "e : INT ;\n" + - "ID : 'a'..'z'+ ;\n" + - "INT : '0'..'9'+ ;\n" + - "WS : (' '|'\\n') -> skip ;"; - return execParser("T.g4", grammar, "TParser", "TLexer", "s", input, false); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testDeclarations_1() throws Exception { - String found = testDeclarations("a"); - assertEquals("(s (declarator a) )\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testDeclarations_2() throws Exception { - String found = testDeclarations("*a"); - assertEquals("(s (declarator * (declarator a)) )\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testDeclarations_3() throws Exception { - String found = testDeclarations("**a"); - assertEquals("(s (declarator * (declarator * (declarator a))) )\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testDeclarations_4() throws Exception { - String found = testDeclarations("a[3]"); - assertEquals("(s (declarator (declarator a) [ (e 3) ]) )\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testDeclarations_5() throws Exception { - String found = testDeclarations("b[]"); - assertEquals("(s (declarator (declarator b) [ ]) )\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testDeclarations_6() throws Exception { - String found = testDeclarations("(a)"); - assertEquals("(s (declarator ( (declarator a) )) )\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testDeclarations_7() throws Exception { - String found = testDeclarations("a[]()"); - assertEquals("(s (declarator (declarator (declarator a) [ ]) ( )) )\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testDeclarations_8() throws Exception { - String found = testDeclarations("a[][]"); - assertEquals("(s (declarator (declarator (declarator a) [ ]) [ ]) )\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testDeclarations_9() throws Exception { - String found = testDeclarations("*a[]"); - assertEquals("(s (declarator * (declarator (declarator a) [ ])) )\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testDeclarations_10() throws Exception { - String found = testDeclarations("(*a)[]"); - assertEquals("(s (declarator (declarator ( (declarator * (declarator a)) )) [ ]) )\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - String testReturnValueAndActions(String input) throws Exception { - String grammar = "grammar T;\n" + - "s : e {System.out.println($e.v);}; \n" + - "e returns [int v, List ignored]\n" + - " : a=e '*' b=e {$v = $a.v * $b.v;}\n" + - " | a=e '+' b=e {$v = $a.v + $b.v;}\n" + - " | INT {$v = $INT.int;}\n" + - " | '(' x=e ')' {$v = $x.v;}\n" + - " ;\n" + - "INT : '0'..'9'+ ;\n" + - "WS : (' '|'\\n') -> skip ;"; - return execParser("T.g4", grammar, "TParser", "TLexer", "s", input, false); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testReturnValueAndActions_1() throws Exception { - String found = testReturnValueAndActions("4"); - assertEquals("4\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testReturnValueAndActions_2() throws Exception { - String found = testReturnValueAndActions("1+2"); - assertEquals("3\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testReturnValueAndActions_3() throws Exception { - String found = testReturnValueAndActions("1+2*3"); - assertEquals("7\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testReturnValueAndActions_4() throws Exception { - String found = testReturnValueAndActions("(1+2)*3"); - assertEquals("9\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - String testLabelsOnOpSubrule(String input) throws Exception { - String grammar = "grammar T;\n" + - "s @after {System.out.println($ctx.toStringTree(this));} : e;\n" + - "e : a=e op=('*'|'/') b=e {}\n" + - " | INT {}\n" + - " | '(' x=e ')' {}\n" + - " ;\n" + - "INT : '0'..'9'+ ;\n" + - "WS : (' '|'\\n') -> skip ;"; - return execParser("T.g4", grammar, "TParser", "TLexer", "s", input, false); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testLabelsOnOpSubrule_1() throws Exception { - String found = testLabelsOnOpSubrule("4"); - assertEquals("(s (e 4))\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testLabelsOnOpSubrule_2() throws Exception { - String found = testLabelsOnOpSubrule("1*2/3"); - assertEquals("(s (e (e (e 1) * (e 2)) / (e 3)))\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testLabelsOnOpSubrule_3() throws Exception { - String found = testLabelsOnOpSubrule("(1/2)*3"); - assertEquals("(s (e (e ( (e (e 1) / (e 2)) )) * (e 3)))\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - String testReturnValueAndActionsAndLabels(String input) throws Exception { - String grammar = "grammar T;\n" + - "s : q=e {System.out.println($e.v);}; \n" + - "e returns [int v]\n" + - " : a=e op='*' b=e {$v = $a.v * $b.v;} # mult\n" + - " | a=e '+' b=e {$v = $a.v + $b.v;} # add\n" + - " | INT {$v = $INT.int;} # anInt\n" + - " | '(' x=e ')' {$v = $x.v;} # parens\n" + - " | x=e '++' {$v = $x.v+1;} # inc\n" + - " | e '--' # dec\n" + - " | ID {$v = 3;} # anID\n" + - " ; \n" + - "ID : 'a'..'z'+ ;\n" + - "INT : '0'..'9'+ ;\n" + - "WS : (' '|'\\n') -> skip ;"; - return execParser("T.g4", grammar, "TParser", "TLexer", "s", input, false); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testReturnValueAndActionsAndLabels_1() throws Exception { - String found = testReturnValueAndActionsAndLabels("4"); - assertEquals("4\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testReturnValueAndActionsAndLabels_2() throws Exception { - String found = testReturnValueAndActionsAndLabels("1+2"); - assertEquals("3\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testReturnValueAndActionsAndLabels_3() throws Exception { - String found = testReturnValueAndActionsAndLabels("1+2*3"); - assertEquals("7\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testReturnValueAndActionsAndLabels_4() throws Exception { - String found = testReturnValueAndActionsAndLabels("i++*3"); - assertEquals("12\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - String testMultipleAlternativesWithCommonLabel(String input) throws Exception { - String grammar = "grammar T;\n" + - "s : e {System.out.println($e.v);}; \n" + - "e returns [int v]\n" + - " : e '*' e {$v = ((BinaryContext)$ctx).e(0).v * ((BinaryContext)$ctx).e(1).v;} # binary\n" + - " | e '+' e {$v = ((BinaryContext)$ctx).e(0).v + ((BinaryContext)$ctx).e(1).v;} # binary\n" + - " | INT {$v = $INT.int;} # anInt\n" + - " | '(' e ')' {$v = $e.v;} # parens\n" + - " | left=e INC {assert(((UnaryContext)$ctx).INC() != null);$v = $left.v + 1;} # unary\n" + - " | left=e DEC {assert(((UnaryContext)$ctx).DEC() != null);$v = $left.v - 1;} # unary\n" + - " | ID {$v = 3;} # anID\n" + - " ; \n" + - "ID : 'a'..'z'+ ;\n" + - "INT : '0'..'9'+ ;\n" + - "INC : '++' ;\n" + - "DEC : '--' ;\n" + - "WS : (' '|'\\n') -> skip ;"; - return execParser("T.g4", grammar, "TParser", "TLexer", "s", input, false); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testMultipleAlternativesWithCommonLabel_1() throws Exception { - String found = testMultipleAlternativesWithCommonLabel("4"); - assertEquals("4\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testMultipleAlternativesWithCommonLabel_2() throws Exception { - String found = testMultipleAlternativesWithCommonLabel("1+2"); - assertEquals("3\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testMultipleAlternativesWithCommonLabel_3() throws Exception { - String found = testMultipleAlternativesWithCommonLabel("1+2*3"); - assertEquals("7\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testMultipleAlternativesWithCommonLabel_4() throws Exception { - String found = testMultipleAlternativesWithCommonLabel("i++*3"); - assertEquals("12\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - String testPrefixOpWithActionAndLabel(String input) throws Exception { - String grammar = "grammar T;\n" + - "s : e {System.out.println($e.result);} ;\n" + - "e returns [String result]\n" + - " : ID '=' e1=e {$result = \"(\" + $ID.text + \"=\" + $e1.result + \")\";}\n" + - " | ID {$result = $ID.text;}\n" + - " | e1=e '+' e2=e {$result = \"(\" + $e1.result + \"+\" + $e2.result + \")\";}\n" + - " ;\n" + - "ID : 'a'..'z'+ ;\n" + - "INT : '0'..'9'+ ;\n" + - "WS : (' '|'\\n') -> skip ;"; - return execParser("T.g4", grammar, "TParser", "TLexer", "s", input, false); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testPrefixOpWithActionAndLabel_1() throws Exception { - String found = testPrefixOpWithActionAndLabel("a"); - assertEquals("a\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testPrefixOpWithActionAndLabel_2() throws Exception { - String found = testPrefixOpWithActionAndLabel("a+b"); - assertEquals("(a+b)\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testPrefixOpWithActionAndLabel_3() throws Exception { - String found = testPrefixOpWithActionAndLabel("a=b+c"); - assertEquals("((a=b)+c)\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - String testAmbigLR(String input) throws Exception { - String grammar = "grammar Expr;\n" + - "prog: stat ;\n" + - "stat: expr NEWLINE # printExpr\n" + - " | ID '=' expr NEWLINE # assign\n" + - " | NEWLINE # blank\n" + - " ;\n" + - "expr: expr ('*'|'/') expr # MulDiv\n" + - " | expr ('+'|'-') expr # AddSub\n" + - " | INT # int\n" + - " | ID # id\n" + - " | '(' expr ')' # parens\n" + - " ;\n" + - "\n" + - "MUL : '*' ; // assigns token name to '*' used above in grammar\n" + - "DIV : '/' ;\n" + - "ADD : '+' ;\n" + - "SUB : '-' ;\n" + - "ID : [a-zA-Z]+ ; // match identifiers\n" + - "INT : [0-9]+ ; // match integers\n" + - "NEWLINE:'\\r'? '\\n' ; // return newlines to parser (is end-statement signal)\n" + - "WS : [ \\t]+ -> skip ; // toss out whitespace"; - return execParser("Expr.g4", grammar, "ExprParser", "ExprLexer", "prog", input, false); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testAmbigLR_1() throws Exception { - String found = testAmbigLR("1\n"); - assertEquals("", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testAmbigLR_2() throws Exception { - String found = testAmbigLR("a = 5\n"); - assertEquals("", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testAmbigLR_3() throws Exception { - String found = testAmbigLR("b = 6\n"); - assertEquals("", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testAmbigLR_4() throws Exception { - String found = testAmbigLR("a+b*2\n"); - assertEquals("", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testAmbigLR_5() throws Exception { - String found = testAmbigLR("(1+2)*3\n"); - assertEquals("", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - String testWhitespaceInfluence(String input) throws Exception { - String grammar = "grammar Expr;\n" + - "prog : expression EOF;\n" + - "expression\n" + - " : ID '(' expression (',' expression)* ')' # doFunction\n" + - " | '(' expression ')' # doParenthesis\n" + - " | '!' expression # doNot\n" + - " | '-' expression # doNegate\n" + - " | '+' expression # doPositiv\n" + - " | expression '^' expression # doPower\n" + - " | expression '*' expression # doMultipy\n" + - " | expression '/' expression # doDivide\n" + - " | expression '%' expression # doModulo\n" + - " | expression '-' expression # doMinus\n" + - " | expression '+' expression # doPlus\n" + - " | expression '=' expression # doEqual\n" + - " | expression '!=' expression # doNotEqual\n" + - " | expression '>' expression # doGreather\n" + - " | expression '>=' expression # doGreatherEqual\n" + - " | expression '<' expression # doLesser\n" + - " | expression '<=' expression # doLesserEqual\n" + - " | expression K_IN '(' expression (',' expression)* ')' # doIn\n" + - " | expression ( '&' | K_AND) expression # doAnd\n" + - " | expression ( '|' | K_OR) expression # doOr\n" + - " | '[' expression (',' expression)* ']' # newArray\n" + - " | K_TRUE # newTrueBoolean\n" + - " | K_FALSE # newFalseBoolean\n" + - " | NUMBER # newNumber\n" + - " | DATE # newDateTime\n" + - " | ID # newIdentifier\n" + - " | SQ_STRING # newString\n" + - " | K_NULL # newNull\n" + - " ;\n" + - "\n" + - "// Fragments\n" + - "fragment DIGIT : '0' .. '9'; \n" + - "fragment UPPER : 'A' .. 'Z';\n" + - "fragment LOWER : 'a' .. 'z';\n" + - "fragment LETTER : LOWER | UPPER;\n" + - "fragment WORD : LETTER | '_' | '$' | '#' | '.';\n" + - "fragment ALPHANUM : WORD | DIGIT; \n" + - "\n" + - "// Tokens\n" + - "ID : LETTER ALPHANUM*;\n" + - "NUMBER : DIGIT+ ('.' DIGIT+)? (('e'|'E')('+'|'-')? DIGIT+)?;\n" + - "DATE : '\\'' DIGIT DIGIT DIGIT DIGIT '-' DIGIT DIGIT '-' DIGIT DIGIT (' ' DIGIT DIGIT ':' DIGIT DIGIT ':' DIGIT DIGIT ('.' DIGIT+)?)? '\\'';\n" + - "SQ_STRING : '\\'' ('\\'\\'' | ~'\\'')* '\\'';\n" + - "DQ_STRING : '\\\"' ('\\\\\"' | ~'\\\"')* '\\\"';\n" + - "WS : [ \\t\\n\\r]+ -> skip ;\n" + - "COMMENTS : ('/*' .*? '*/' | '//' ~'\\n'* '\\n' ) -> skip;"; - return execParser("Expr.g4", grammar, "ExprParser", "ExprLexer", "prog", input, false); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testWhitespaceInfluence_1() throws Exception { - String found = testWhitespaceInfluence("Test(1,3)"); - assertEquals("", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testWhitespaceInfluence_2() throws Exception { - String found = testWhitespaceInfluence("Test(1, 3)"); - assertEquals("", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testPrecedenceFilterConsidersContext() throws Exception { - String grammar = "grammar T;\n" + - "prog \n" + - "@after {System.out.println($ctx.toStringTree(this));}\n" + - ": statement* EOF {};\n" + - "statement: letterA | statement letterA 'b' ;\n" + - "letterA: 'a';"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "prog", "aa", false); - assertEquals("(prog (statement (letterA a)) (statement (letterA a)) )\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - String testMultipleActions(String input) throws Exception { - String grammar = "grammar T;\n" + - "s @after {System.out.println($ctx.toStringTree(this));} : e ;\n" + - "e : a=e op=('*'|'/') b=e {}{}\n" + - " | INT {}{}\n" + - " | '(' x=e ')' {}{}\n" + - " ;\n" + - "INT : '0'..'9'+ ;\n" + - "WS : (' '|'\\n') -> skip ;"; - return execParser("T.g4", grammar, "TParser", "TLexer", "s", input, false); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testMultipleActions_1() throws Exception { - String found = testMultipleActions("4"); - assertEquals("(s (e 4))\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testMultipleActions_2() throws Exception { - String found = testMultipleActions("1*2/3"); - assertEquals("(s (e (e (e 1) * (e 2)) / (e 3)))\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testMultipleActions_3() throws Exception { - String found = testMultipleActions("(1/2)*3"); - assertEquals("(s (e (e ( (e (e 1) / (e 2)) )) * (e 3)))\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - String testMultipleActionsPredicatesOptions(String input) throws Exception { - String grammar = "grammar T;\n" + - "s @after {System.out.println($ctx.toStringTree(this));} : e ;\n" + - "e : a=e op=('*'|'/') b=e {}{true}?\n" + - " | a=e op=('+'|'-') b=e {}{true}?\n" + - " | INT {}{}\n" + - " | '(' x=e ')' {}{}\n" + - " ;\n" + - "INT : '0'..'9'+ ;\n" + - "WS : (' '|'\\n') -> skip ;"; - return execParser("T.g4", grammar, "TParser", "TLexer", "s", input, false); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testMultipleActionsPredicatesOptions_1() throws Exception { - String found = testMultipleActionsPredicatesOptions("4"); - assertEquals("(s (e 4))\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testMultipleActionsPredicatesOptions_2() throws Exception { - String found = testMultipleActionsPredicatesOptions("1*2/3"); - assertEquals("(s (e (e (e 1) * (e 2)) / (e 3)))\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testMultipleActionsPredicatesOptions_3() throws Exception { - String found = testMultipleActionsPredicatesOptions("(1/2)*3"); - assertEquals("(s (e (e ( (e (e 1) / (e 2)) )) * (e 3)))\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testSemPredFailOption() throws Exception { - String grammar = "grammar T;\n" + - "s @after {System.out.println($ctx.toStringTree(this));} : a ;\n" + - "a : a ID {false}?\n" + - " | ID\n" + - " ;\n" + - "ID : 'a'..'z'+ ;\n" + - "WS : (' '|'\\n') -> skip ;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "s", "x y z", false); - assertEquals("(s (a (a x) y z))\n", found); - assertEquals("line 1:4 rule a custom message\n", this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - String testTernaryExprExplicitAssociativity(String input) throws Exception { - String grammar = "grammar T;\n" + - "s @after {System.out.println($ctx.toStringTree(this));} : e EOF; // must indicate EOF can follow or 'a' won't match\n" + - "e : e '*' e\n" + - " | e '+' e\n" + - " | e '?' e ':' e\n" + - " | e '=' e\n" + - " | ID\n" + - " ;\n" + - "ID : 'a'..'z'+ ;\n" + - "WS : (' '|'\\n') -> skip ;"; - return execParser("T.g4", grammar, "TParser", "TLexer", "s", input, false); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testTernaryExprExplicitAssociativity_1() throws Exception { - String found = testTernaryExprExplicitAssociativity("a"); - assertEquals("(s (e a) )\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testTernaryExprExplicitAssociativity_2() throws Exception { - String found = testTernaryExprExplicitAssociativity("a+b"); - assertEquals("(s (e (e a) + (e b)) )\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testTernaryExprExplicitAssociativity_3() throws Exception { - String found = testTernaryExprExplicitAssociativity("a*b"); - assertEquals("(s (e (e a) * (e b)) )\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testTernaryExprExplicitAssociativity_4() throws Exception { - String found = testTernaryExprExplicitAssociativity("a?b:c"); - assertEquals("(s (e (e a) ? (e b) : (e c)) )\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testTernaryExprExplicitAssociativity_5() throws Exception { - String found = testTernaryExprExplicitAssociativity("a=b=c"); - assertEquals("(s (e (e a) = (e (e b) = (e c))) )\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testTernaryExprExplicitAssociativity_6() throws Exception { - String found = testTernaryExprExplicitAssociativity("a?b+c:d"); - assertEquals("(s (e (e a) ? (e (e b) + (e c)) : (e d)) )\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testTernaryExprExplicitAssociativity_7() throws Exception { - String found = testTernaryExprExplicitAssociativity("a?b=c:d"); - assertEquals("(s (e (e a) ? (e (e b) = (e c)) : (e d)) )\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testTernaryExprExplicitAssociativity_8() throws Exception { - String found = testTernaryExprExplicitAssociativity("a? b?c:d : e"); - assertEquals("(s (e (e a) ? (e (e b) ? (e c) : (e d)) : (e e)) )\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testTernaryExprExplicitAssociativity_9() throws Exception { - String found = testTernaryExprExplicitAssociativity("a?b: c?d:e"); - assertEquals("(s (e (e a) ? (e b) : (e (e c) ? (e d) : (e e))) )\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - String testReturnValueAndActionsList1(String input) throws Exception { - String grammar = "grammar T;\n" + - "s @after {System.out.println($ctx.toStringTree(this));} : expr EOF;\n" + - "expr:\n" + - " a=expr '*' a=expr #Factor\n" + - " | b+=expr (',' b+=expr)* '>>' c=expr #Send\n" + - " | ID #JustId //semantic check on modifiers\n" + - ";\n" + - "\n" + - "ID : ('a'..'z'|'A'..'Z'|'_')\n" + - " ('a'..'z'|'A'..'Z'|'0'..'9'|'_')*\n" + - ";\n" + - "\n" + - "WS : [ \\t\\n]+ -> skip ;"; - return execParser("T.g4", grammar, "TParser", "TLexer", "s", input, false); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testReturnValueAndActionsList1_1() throws Exception { - String found = testReturnValueAndActionsList1("a*b"); - assertEquals("(s (expr (expr a) * (expr b)) )\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testReturnValueAndActionsList1_2() throws Exception { - String found = testReturnValueAndActionsList1("a,c>>x"); - assertEquals("(s (expr (expr a) , (expr c) >> (expr x)) )\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testReturnValueAndActionsList1_3() throws Exception { - String found = testReturnValueAndActionsList1("x"); - assertEquals("(s (expr x) )\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testReturnValueAndActionsList1_4() throws Exception { - String found = testReturnValueAndActionsList1("a*b,c,x*y>>r"); - assertEquals("(s (expr (expr (expr a) * (expr b)) , (expr c) , (expr (expr x) * (expr y)) >> (expr r)) )\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - String testReturnValueAndActionsList2(String input) throws Exception { - String grammar = "grammar T;\n" + - "s @after {System.out.println($ctx.toStringTree(this));} : expr EOF;\n" + - "expr:\n" + - " a=expr '*' a=expr #Factor\n" + - " | b+=expr ',' b+=expr #Comma\n" + - " | b+=expr '>>' c=expr #Send\n" + - " | ID #JustId //semantic check on modifiers\n" + - " ;\n" + - "ID : ('a'..'z'|'A'..'Z'|'_')\n" + - " ('a'..'z'|'A'..'Z'|'0'..'9'|'_')*\n" + - ";\n" + - "WS : [ \\t\\n]+ -> skip ;"; - return execParser("T.g4", grammar, "TParser", "TLexer", "s", input, false); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testReturnValueAndActionsList2_1() throws Exception { - String found = testReturnValueAndActionsList2("a*b"); - assertEquals("(s (expr (expr a) * (expr b)) )\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testReturnValueAndActionsList2_2() throws Exception { - String found = testReturnValueAndActionsList2("a,c>>x"); - assertEquals("(s (expr (expr (expr a) , (expr c)) >> (expr x)) )\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testReturnValueAndActionsList2_3() throws Exception { - String found = testReturnValueAndActionsList2("x"); - assertEquals("(s (expr x) )\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testReturnValueAndActionsList2_4() throws Exception { - String found = testReturnValueAndActionsList2("a*b,c,x*y>>r"); - assertEquals("(s (expr (expr (expr (expr (expr a) * (expr b)) , (expr c)) , (expr (expr x) * (expr y))) >> (expr r)) )\n", found); - assertNull(this.stderrDuringParse); - } - - -} diff --git a/tool/test/org/antlr/v4/test/rt/java/TestLexerErrors.java b/tool/test/org/antlr/v4/test/rt/java/TestLexerErrors.java deleted file mode 100644 index e268a4ec5..000000000 --- a/tool/test/org/antlr/v4/test/rt/java/TestLexerErrors.java +++ /dev/null @@ -1,178 +0,0 @@ -package org.antlr.v4.test.rt.java; - -import org.junit.Test; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; - -public class TestLexerErrors extends BaseTest { - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testInvalidCharAtStart() throws Exception { - StringBuilder sb = new StringBuilder(); - sb.append("lexer grammar L;\n"); - sb.append("A : 'a' 'b' ;\n"); - String grammar = sb.toString(); - String found = execLexer("L.g4", grammar, "L", "x", false); - assertEquals("[@0,1:0='',<-1>,1:1]\n", found); - assertEquals("line 1:0 token recognition error at: 'x'\n", this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testStringsEmbeddedInActions_1() throws Exception { - StringBuilder sb = new StringBuilder(); - sb.append("lexer grammar L;\n"); - sb.append("ACTION2 : '[' (STRING | ~'\"')*? ']';\n"); - sb.append("STRING : '\"' ('\\\"' | .)*? '\"';\n"); - sb.append("WS : [ \\t\\r\\n]+ -> skip;\n"); - String grammar = sb.toString(); - String found = execLexer("L.g4", grammar, "L", "[\"foo\"]", false); - assertEquals("[@0,0:6='[\"foo\"]',<1>,1:0]\n" + - "[@1,7:6='',<-1>,1:7]\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testStringsEmbeddedInActions_2() throws Exception { - StringBuilder sb = new StringBuilder(); - sb.append("lexer grammar L;\n"); - sb.append("ACTION2 : '[' (STRING | ~'\"')*? ']';\n"); - sb.append("STRING : '\"' ('\\\"' | .)*? '\"';\n"); - sb.append("WS : [ \\t\\r\\n]+ -> skip;\n"); - String grammar = sb.toString(); - String found = execLexer("L.g4", grammar, "L", "[\"foo]", false); - assertEquals("[@0,6:5='',<-1>,1:6]\n", found); - assertEquals("line 1:0 token recognition error at: '[\"foo]'\n", this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testEnforcedGreedyNestedBrances_1() throws Exception { - StringBuilder sb = new StringBuilder(); - sb.append("lexer grammar L;\n"); - sb.append("ACTION : '{' (ACTION | ~[{}])* '}';\n"); - sb.append("WS : [ \\r\\n\\t]+ -> skip;\n"); - String grammar = sb.toString(); - String found = execLexer("L.g4", grammar, "L", "{ { } }", false); - assertEquals("[@0,0:6='{ { } }',<1>,1:0]\n" + - "[@1,7:6='',<-1>,1:7]\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testEnforcedGreedyNestedBrances_2() throws Exception { - StringBuilder sb = new StringBuilder(); - sb.append("lexer grammar L;\n"); - sb.append("ACTION : '{' (ACTION | ~[{}])* '}';\n"); - sb.append("WS : [ \\r\\n\\t]+ -> skip;\n"); - String grammar = sb.toString(); - String found = execLexer("L.g4", grammar, "L", "{ { }", false); - assertEquals("[@0,5:4='',<-1>,1:5]\n", found); - assertEquals("line 1:0 token recognition error at: '{ { }'\n", this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testInvalidCharAtStartAfterDFACache() throws Exception { - StringBuilder sb = new StringBuilder(); - sb.append("lexer grammar L;\n"); - sb.append("A : 'a' 'b' ;\n"); - String grammar = sb.toString(); - String found = execLexer("L.g4", grammar, "L", "abx", false); - assertEquals("[@0,0:1='ab',<1>,1:0]\n" + - "[@1,3:2='',<-1>,1:3]\n", found); - assertEquals("line 1:2 token recognition error at: 'x'\n", this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testInvalidCharInToken() throws Exception { - StringBuilder sb = new StringBuilder(); - sb.append("lexer grammar L;\n"); - sb.append("A : 'a' 'b' ;\n"); - String grammar = sb.toString(); - String found = execLexer("L.g4", grammar, "L", "ax", false); - assertEquals("[@0,2:1='',<-1>,1:2]\n", found); - assertEquals("line 1:0 token recognition error at: 'ax'\n", this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testInvalidCharInTokenAfterDFACache() throws Exception { - StringBuilder sb = new StringBuilder(); - sb.append("lexer grammar L;\n"); - sb.append("A : 'a' 'b' ;\n"); - String grammar = sb.toString(); - String found = execLexer("L.g4", grammar, "L", "abax", false); - assertEquals("[@0,0:1='ab',<1>,1:0]\n" + - "[@1,4:3='',<-1>,1:4]\n", found); - assertEquals("line 1:2 token recognition error at: 'ax'\n", this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testDFAToATNThatFailsBackToDFA() throws Exception { - StringBuilder sb = new StringBuilder(); - sb.append("lexer grammar L;\n"); - sb.append("A : 'ab' ;\n"); - sb.append("B : 'abc' ;\n"); - String grammar = sb.toString(); - String found = execLexer("L.g4", grammar, "L", "ababx", false); - assertEquals("[@0,0:1='ab',<1>,1:0]\n" + - "[@1,2:3='ab',<1>,1:2]\n" + - "[@2,5:4='',<-1>,1:5]\n", found); - assertEquals("line 1:4 token recognition error at: 'x'\n", this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testDFAToATNThatMatchesThenFailsInATN() throws Exception { - StringBuilder sb = new StringBuilder(); - sb.append("lexer grammar L;\n"); - sb.append("A : 'ab' ;\n"); - sb.append("B : 'abc' ;\n"); - sb.append("C : 'abcd' ;\n"); - String grammar = sb.toString(); - String found = execLexer("L.g4", grammar, "L", "ababcx", false); - assertEquals("[@0,0:1='ab',<1>,1:0]\n" + - "[@1,2:4='abc',<2>,1:2]\n" + - "[@2,6:5='',<-1>,1:6]\n", found); - assertEquals("line 1:5 token recognition error at: 'x'\n", this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testErrorInMiddle() throws Exception { - StringBuilder sb = new StringBuilder(); - sb.append("lexer grammar L;\n"); - sb.append("A : 'abc' ;\n"); - String grammar = sb.toString(); - String found = execLexer("L.g4", grammar, "L", "abx", false); - assertEquals("[@0,3:2='',<-1>,1:3]\n", found); - assertEquals("line 1:0 token recognition error at: 'abx'\n", this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testLexerExecDFA() throws Exception { - StringBuilder sb = new StringBuilder(); - sb.append("grammar L;\n"); - sb.append("start : ID ':' expr;\n"); - sb.append("expr : primary expr? {} | expr '->' ID;\n"); - sb.append("primary : ID;\n"); - sb.append("ID : [a-z]+;\n"); - String grammar = sb.toString(); - String found = execLexer("L.g4", grammar, "LLexer", "x : x", false); - assertEquals("[@0,0:0='x',<3>,1:0]\n" + - "[@1,2:2=':',<1>,1:2]\n" + - "[@2,4:4='x',<3>,1:4]\n" + - "[@3,5:4='',<-1>,1:5]\n", found); - assertEquals("line 1:1 token recognition error at: ' '\nline 1:3 token recognition error at: ' '\n", this.stderrDuringParse); - } - - -} diff --git a/tool/test/org/antlr/v4/test/rt/java/TestLexerExec.java b/tool/test/org/antlr/v4/test/rt/java/TestLexerExec.java deleted file mode 100644 index 23c7dac78..000000000 --- a/tool/test/org/antlr/v4/test/rt/java/TestLexerExec.java +++ /dev/null @@ -1,4760 +0,0 @@ -package org.antlr.v4.test.rt.java; - -import org.junit.Test; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; - -public class TestLexerExec extends BaseTest { - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testQuoteTranslation() throws Exception { - StringBuilder sb = new StringBuilder(); - sb.append("lexer grammar L;\n"); - sb.append("QUOTE : '\"' ; // make sure this compiles\n"); - String grammar = sb.toString(); - String found = execLexer("L.g4", grammar, "L", "\"", false); - assertEquals("[@0,0:0='\"',<1>,1:0]\n" + - "[@1,1:0='',<-1>,1:1]\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testRefToRuleDoesNotSetTokenNorEmitAnother() throws Exception { - StringBuilder sb = new StringBuilder(); - sb.append("lexer grammar L;\n"); - sb.append("A : '-' I ;\n"); - sb.append("I : '0'..'9'+ ;\n"); - sb.append("WS : (' '|'\\n') -> skip ;\n"); - String grammar = sb.toString(); - String found = execLexer("L.g4", grammar, "L", "34 -21 3", false); - assertEquals("[@0,0:1='34',<2>,1:0]\n" + - "[@1,3:5='-21',<1>,1:3]\n" + - "[@2,7:7='3',<2>,1:7]\n" + - "[@3,8:7='',<-1>,1:8]\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testSlashes() throws Exception { - StringBuilder sb = new StringBuilder(); - sb.append("lexer grammar L;\n"); - sb.append("Backslash : '\\\\';\n"); - sb.append("Slash : '/';\n"); - sb.append("Vee : '\\\\/';\n"); - sb.append("Wedge : '/\\\\';\n"); - sb.append("WS : [ \\t] -> skip;\n"); - String grammar = sb.toString(); - String found = execLexer("L.g4", grammar, "L", "\\ / \\/ /\\", false); - assertEquals("[@0,0:0='\\',<1>,1:0]\n" + - "[@1,2:2='/',<2>,1:2]\n" + - "[@2,4:5='\\/',<3>,1:4]\n" + - "[@3,7:8='/\\',<4>,1:7]\n" + - "[@4,9:8='',<-1>,1:9]\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testParentheses() throws Exception { - StringBuilder sb = new StringBuilder(); - sb.append("lexer grammar L;\n"); - sb.append("START_BLOCK: '-.-.-';\n"); - sb.append("ID : (LETTER SEPARATOR) (LETTER SEPARATOR)+;\n"); - sb.append("fragment LETTER: L_A|L_K;\n"); - sb.append("fragment L_A: '.-';\n"); - sb.append("fragment L_K: '-.-';\n"); - sb.append("SEPARATOR: '!';\n"); - String grammar = sb.toString(); - String found = execLexer("L.g4", grammar, "L", "-.-.-!", false); - assertEquals("[@0,0:4='-.-.-',<1>,1:0]\n" + - "[@1,5:5='!',<3>,1:5]\n" + - "[@2,6:5='',<-1>,1:6]\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testNonGreedyTermination1() throws Exception { - StringBuilder sb = new StringBuilder(); - sb.append("lexer grammar L;\n"); - sb.append("STRING : '\"' ('\"\"' | .)*? '\"';\n"); - String grammar = sb.toString(); - String found = execLexer("L.g4", grammar, "L", "\"hi\"\"mom\"", false); - assertEquals("[@0,0:3='\"hi\"',<1>,1:0]\n" + - "[@1,4:8='\"mom\"',<1>,1:4]\n" + - "[@2,9:8='',<-1>,1:9]\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testNonGreedyTermination2() throws Exception { - StringBuilder sb = new StringBuilder(); - sb.append("lexer grammar L;\n"); - sb.append("STRING : '\"' ('\"\"' | .)+? '\"';\n"); - String grammar = sb.toString(); - String found = execLexer("L.g4", grammar, "L", "\"\"\"mom\"", false); - assertEquals("[@0,0:6='\"\"\"mom\"',<1>,1:0]\n" + - "[@1,7:6='',<-1>,1:7]\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testGreedyOptional() throws Exception { - StringBuilder sb = new StringBuilder(); - sb.append("lexer grammar L;\n"); - sb.append("CMT : '//' .*? '\\n' CMT?;\n"); - sb.append("WS : (' '|'\\t')+;\n"); - String grammar = sb.toString(); - String found = execLexer("L.g4", grammar, "L", "//blah\n//blah\n", false); - assertEquals("[@0,0:13='//blah\\n//blah\\n',<1>,1:0]\n" + - "[@1,14:13='',<-1>,3:0]\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testNonGreedyOptional() throws Exception { - StringBuilder sb = new StringBuilder(); - sb.append("lexer grammar L;\n"); - sb.append("CMT : '//' .*? '\\n' CMT??;\n"); - sb.append("WS : (' '|'\\t')+;\n"); - String grammar = sb.toString(); - String found = execLexer("L.g4", grammar, "L", "//blah\n//blah\n", false); - assertEquals("[@0,0:6='//blah\\n',<1>,1:0]\n" + - "[@1,7:13='//blah\\n',<1>,2:0]\n" + - "[@2,14:13='',<-1>,3:0]\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testGreedyClosure() throws Exception { - StringBuilder sb = new StringBuilder(); - sb.append("lexer grammar L;\n"); - sb.append("CMT : '//' .*? '\\n' CMT*;\n"); - sb.append("WS : (' '|'\\t')+;\n"); - String grammar = sb.toString(); - String found = execLexer("L.g4", grammar, "L", "//blah\n//blah\n", false); - assertEquals("[@0,0:13='//blah\\n//blah\\n',<1>,1:0]\n" + - "[@1,14:13='',<-1>,3:0]\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testNonGreedyClosure() throws Exception { - StringBuilder sb = new StringBuilder(); - sb.append("lexer grammar L;\n"); - sb.append("CMT : '//' .*? '\\n' CMT*?;\n"); - sb.append("WS : (' '|'\\t')+;\n"); - String grammar = sb.toString(); - String found = execLexer("L.g4", grammar, "L", "//blah\n//blah\n", false); - assertEquals("[@0,0:6='//blah\\n',<1>,1:0]\n" + - "[@1,7:13='//blah\\n',<1>,2:0]\n" + - "[@2,14:13='',<-1>,3:0]\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testGreedyPositiveClosure() throws Exception { - StringBuilder sb = new StringBuilder(); - sb.append("lexer grammar L;\n"); - sb.append("CMT : ('//' .*? '\\n')+;\n"); - sb.append("WS : (' '|'\\t')+;\n"); - String grammar = sb.toString(); - String found = execLexer("L.g4", grammar, "L", "//blah\n//blah\n", false); - assertEquals("[@0,0:13='//blah\\n//blah\\n',<1>,1:0]\n" + - "[@1,14:13='',<-1>,3:0]\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testNonGreedyPositiveClosure() throws Exception { - StringBuilder sb = new StringBuilder(); - sb.append("lexer grammar L;\n"); - sb.append("CMT : ('//' .*? '\\n')+?;\n"); - sb.append("WS : (' '|'\\t')+;\n"); - String grammar = sb.toString(); - String found = execLexer("L.g4", grammar, "L", "//blah\n//blah\n", false); - assertEquals("[@0,0:6='//blah\\n',<1>,1:0]\n" + - "[@1,7:13='//blah\\n',<1>,2:0]\n" + - "[@2,14:13='',<-1>,3:0]\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testRecursiveLexerRuleRefWithWildcardStar_1() throws Exception { - StringBuilder sb = new StringBuilder(); - sb.append("lexer grammar L;\n"); - sb.append("CMT : '/*' (CMT | .)*? '*/' ;\n"); - sb.append("WS : (' '|'\\n')+;\n"); - String grammar = sb.toString(); - String found = execLexer("L.g4", grammar, "L", "/* ick */\n/* /* */\n/* /*nested*/ */\n", false); - assertEquals("[@0,0:8='/* ick */',<1>,1:0]\n" + - "[@1,9:9='\\n',<2>,1:9]\n" + - "[@2,10:34='/* /* */\\n/* /*nested*/ */',<1>,2:0]\n" + - "[@3,35:35='\\n',<2>,3:16]\n" + - "[@4,36:35='',<-1>,4:0]\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testRecursiveLexerRuleRefWithWildcardStar_2() throws Exception { - StringBuilder sb = new StringBuilder(); - sb.append("lexer grammar L;\n"); - sb.append("CMT : '/*' (CMT | .)*? '*/' ;\n"); - sb.append("WS : (' '|'\\n')+;\n"); - String grammar = sb.toString(); - String found = execLexer("L.g4", grammar, "L", "/* ick */x\n/* /* */x\n/* /*nested*/ */x\n", false); - assertEquals("[@0,0:8='/* ick */',<1>,1:0]\n" + - "[@1,10:10='\\n',<2>,1:10]\n" + - "[@2,11:36='/* /* */x\\n/* /*nested*/ */',<1>,2:0]\n" + - "[@3,38:38='\\n',<2>,3:17]\n" + - "[@4,39:38='',<-1>,4:0]\n", found); - assertEquals("line 1:9 token recognition error at: 'x'\nline 3:16 token recognition error at: 'x'\n", this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testRecursiveLexerRuleRefWithWildcardPlus_1() throws Exception { - StringBuilder sb = new StringBuilder(); - sb.append("lexer grammar L;\n"); - sb.append("CMT : '/*' (CMT | .)+? '*/' ;\n"); - sb.append("WS : (' '|'\\n')+;\n"); - String grammar = sb.toString(); - String found = execLexer("L.g4", grammar, "L", "/* ick */\n/* /* */\n/* /*nested*/ */\n", false); - assertEquals("[@0,0:8='/* ick */',<1>,1:0]\n" + - "[@1,9:9='\\n',<2>,1:9]\n" + - "[@2,10:34='/* /* */\\n/* /*nested*/ */',<1>,2:0]\n" + - "[@3,35:35='\\n',<2>,3:16]\n" + - "[@4,36:35='',<-1>,4:0]\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testRecursiveLexerRuleRefWithWildcardPlus_2() throws Exception { - StringBuilder sb = new StringBuilder(); - sb.append("lexer grammar L;\n"); - sb.append("CMT : '/*' (CMT | .)+? '*/' ;\n"); - sb.append("WS : (' '|'\\n')+;\n"); - String grammar = sb.toString(); - String found = execLexer("L.g4", grammar, "L", "/* ick */x\n/* /* */x\n/* /*nested*/ */x\n", false); - assertEquals("[@0,0:8='/* ick */',<1>,1:0]\n" + - "[@1,10:10='\\n',<2>,1:10]\n" + - "[@2,11:36='/* /* */x\\n/* /*nested*/ */',<1>,2:0]\n" + - "[@3,38:38='\\n',<2>,3:17]\n" + - "[@4,39:38='',<-1>,4:0]\n", found); - assertEquals("line 1:9 token recognition error at: 'x'\nline 3:16 token recognition error at: 'x'\n", this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testActionPlacement() throws Exception { - StringBuilder sb = new StringBuilder(); - sb.append("lexer grammar L;\n"); - sb.append("I : ({System.out.println(\"stuff fail: \" + this.getText());} 'a'\n"); - sb.append("| {System.out.println(\"stuff0: \" + this.getText());}\n"); - sb.append(" 'a' {System.out.println(\"stuff1: \" + this.getText());}\n"); - sb.append(" 'b' {System.out.println(\"stuff2: \" + this.getText());})\n"); - sb.append(" {System.out.println(this.getText());} ;\n"); - sb.append("WS : (' '|'\\n') -> skip ;\n"); - sb.append("J : .;\n"); - String grammar = sb.toString(); - String found = execLexer("L.g4", grammar, "L", "ab", false); - assertEquals("stuff0: \n" + - "stuff1: a\n" + - "stuff2: ab\n" + - "ab\n" + - "[@0,0:1='ab',<1>,1:0]\n" + - "[@1,2:1='',<-1>,1:2]\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testGreedyConfigs() throws Exception { - StringBuilder sb = new StringBuilder(); - sb.append("lexer grammar L;\n"); - sb.append("I : ('a' | 'ab') {System.out.println(this.getText());} ;\n"); - sb.append("WS : (' '|'\\n') -> skip ;\n"); - sb.append("J : .;\n"); - String grammar = sb.toString(); - String found = execLexer("L.g4", grammar, "L", "ab", false); - assertEquals("ab\n" + - "[@0,0:1='ab',<1>,1:0]\n" + - "[@1,2:1='',<-1>,1:2]\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testNonGreedyConfigs() throws Exception { - StringBuilder sb = new StringBuilder(); - sb.append("lexer grammar L;\n"); - sb.append("I : .*? ('a' | 'ab') {System.out.println(this.getText());} ;\n"); - sb.append("WS : (' '|'\\n') -> skip ;\n"); - sb.append("J : . {System.out.println(this.getText());};\n"); - String grammar = sb.toString(); - String found = execLexer("L.g4", grammar, "L", "ab", false); - assertEquals("a\n" + - "b\n" + - "[@0,0:0='a',<1>,1:0]\n" + - "[@1,1:1='b',<3>,1:1]\n" + - "[@2,2:1='',<-1>,1:2]\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testKeywordID() throws Exception { - StringBuilder sb = new StringBuilder(); - sb.append("lexer grammar L;\n"); - sb.append("KEND : 'end' ; // has priority\n"); - sb.append("ID : 'a'..'z'+ ;\n"); - sb.append("WS : (' '|'\\n')+;\n"); - String grammar = sb.toString(); - String found = execLexer("L.g4", grammar, "L", "end eend ending a", false); - assertEquals("[@0,0:2='end',<1>,1:0]\n" + - "[@1,3:3=' ',<3>,1:3]\n" + - "[@2,4:7='eend',<2>,1:4]\n" + - "[@3,8:8=' ',<3>,1:8]\n" + - "[@4,9:14='ending',<2>,1:9]\n" + - "[@5,15:15=' ',<3>,1:15]\n" + - "[@6,16:16='a',<2>,1:16]\n" + - "[@7,17:16='',<-1>,1:17]\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testHexVsID() throws Exception { - StringBuilder sb = new StringBuilder(); - sb.append("lexer grammar L;\n"); - sb.append("HexLiteral : '0' ('x'|'X') HexDigit+ ;\n"); - sb.append("DecimalLiteral : ('0' | '1'..'9' '0'..'9'*) ;\n"); - sb.append("FloatingPointLiteral : ('0x' | '0X') HexDigit* ('.' HexDigit*)? ;\n"); - sb.append("DOT : '.' ;\n"); - sb.append("ID : 'a'..'z'+ ;\n"); - sb.append("fragment HexDigit : ('0'..'9'|'a'..'f'|'A'..'F') ;\n"); - sb.append("WS : (' '|'\\n')+;\n"); - String grammar = sb.toString(); - String found = execLexer("L.g4", grammar, "L", "x 0 1 a.b a.l", false); - assertEquals("[@0,0:0='x',<5>,1:0]\n" + - "[@1,1:1=' ',<6>,1:1]\n" + - "[@2,2:2='0',<2>,1:2]\n" + - "[@3,3:3=' ',<6>,1:3]\n" + - "[@4,4:4='1',<2>,1:4]\n" + - "[@5,5:5=' ',<6>,1:5]\n" + - "[@6,6:6='a',<5>,1:6]\n" + - "[@7,7:7='.',<4>,1:7]\n" + - "[@8,8:8='b',<5>,1:8]\n" + - "[@9,9:9=' ',<6>,1:9]\n" + - "[@10,10:10='a',<5>,1:10]\n" + - "[@11,11:11='.',<4>,1:11]\n" + - "[@12,12:12='l',<5>,1:12]\n" + - "[@13,13:12='',<-1>,1:13]\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testEOFByItself() throws Exception { - StringBuilder sb = new StringBuilder(); - sb.append("lexer grammar L;\n"); - sb.append("DONE : EOF ;\n"); - sb.append("A : 'a';\n"); - String grammar = sb.toString(); - String found = execLexer("L.g4", grammar, "L", "", false); - assertEquals("[@0,0:-1='',<1>,1:0]\n" + - "[@1,0:-1='',<-1>,1:0]\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testEOFSuffixInFirstRule_1() throws Exception { - StringBuilder sb = new StringBuilder(); - sb.append("lexer grammar L;\n"); - sb.append("A : 'a' EOF ;\n"); - sb.append("B : 'a';\n"); - sb.append("C : 'c';\n"); - String grammar = sb.toString(); - String found = execLexer("L.g4", grammar, "L", "", false); - assertEquals("[@0,0:-1='',<-1>,1:0]\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testEOFSuffixInFirstRule_2() throws Exception { - StringBuilder sb = new StringBuilder(); - sb.append("lexer grammar L;\n"); - sb.append("A : 'a' EOF ;\n"); - sb.append("B : 'a';\n"); - sb.append("C : 'c';\n"); - String grammar = sb.toString(); - String found = execLexer("L.g4", grammar, "L", "a", false); - assertEquals("[@0,0:0='a',<1>,1:0]\n" + - "[@1,1:0='',<-1>,1:1]\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testCharSet() throws Exception { - StringBuilder sb = new StringBuilder(); - sb.append("lexer grammar L;\n"); - sb.append("I : '0'..'9'+ {System.out.println(\"I\");} ;\n"); - sb.append("WS : [ \\n\\u000D] -> skip ;\n"); - String grammar = sb.toString(); - String found = execLexer("L.g4", grammar, "L", "34\n 34", false); - assertEquals("I\n" + - "I\n" + - "[@0,0:1='34',<1>,1:0]\n" + - "[@1,4:5='34',<1>,2:1]\n" + - "[@2,6:5='',<-1>,2:3]\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testCharSetPlus() throws Exception { - StringBuilder sb = new StringBuilder(); - sb.append("lexer grammar L;\n"); - sb.append("I : '0'..'9'+ {System.out.println(\"I\");} ;\n"); - sb.append("WS : [ \\n\\u000D]+ -> skip ;\n"); - String grammar = sb.toString(); - String found = execLexer("L.g4", grammar, "L", "34\n 34", false); - assertEquals("I\n" + - "I\n" + - "[@0,0:1='34',<1>,1:0]\n" + - "[@1,4:5='34',<1>,2:1]\n" + - "[@2,6:5='',<-1>,2:3]\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testCharSetNot() throws Exception { - StringBuilder sb = new StringBuilder(); - sb.append("lexer grammar L;\n"); - sb.append("I : ~[ab \\n] ~[ \\ncd]* {System.out.println(\"I\");} ;\n"); - sb.append("WS : [ \\n\\u000D]+ -> skip ;\n"); - String grammar = sb.toString(); - String found = execLexer("L.g4", grammar, "L", "xaf", false); - assertEquals("I\n" + - "[@0,0:2='xaf',<1>,1:0]\n" + - "[@1,3:2='',<-1>,1:3]\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testCharSetInSet() throws Exception { - StringBuilder sb = new StringBuilder(); - sb.append("lexer grammar L;\n"); - sb.append("I : (~[ab \\n]|'a') {System.out.println(\"I\");} ;\n"); - sb.append("WS : [ \\n\\u000D]+ -> skip ;\n"); - sb.append(" \n"); - String grammar = sb.toString(); - String found = execLexer("L.g4", grammar, "L", "a x", false); - assertEquals("I\n" + - "I\n" + - "[@0,0:0='a',<1>,1:0]\n" + - "[@1,2:2='x',<1>,1:2]\n" + - "[@2,3:2='',<-1>,1:3]\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testCharSetRange() throws Exception { - StringBuilder sb = new StringBuilder(); - sb.append("lexer grammar L;\n"); - sb.append("I : [0-9]+ {System.out.println(\"I\");} ;\n"); - sb.append("ID : [a-zA-Z] [a-zA-Z0-9]* {System.out.println(\"ID\");} ;\n"); - sb.append("WS : [ \\n\\u0009\\r]+ -> skip ;\n"); - String grammar = sb.toString(); - String found = execLexer("L.g4", grammar, "L", "34\n 34 a2 abc \n ", false); - assertEquals("I\n" + - "I\n" + - "ID\n" + - "ID\n" + - "[@0,0:1='34',<1>,1:0]\n" + - "[@1,4:5='34',<1>,2:1]\n" + - "[@2,7:8='a2',<2>,2:4]\n" + - "[@3,10:12='abc',<2>,2:7]\n" + - "[@4,18:17='',<-1>,3:3]\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testCharSetWithMissingEndRange() throws Exception { - StringBuilder sb = new StringBuilder(); - sb.append("lexer grammar L;\n"); - sb.append("I : [0-]+ {System.out.println(\"I\");} ;\n"); - sb.append("WS : [ \\n\\u000D]+ -> skip ;\n"); - String grammar = sb.toString(); - String found = execLexer("L.g4", grammar, "L", "00\n", false); - assertEquals("I\n" + - "[@0,0:1='00',<1>,1:0]\n" + - "[@1,3:2='',<-1>,2:0]\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testCharSetWithMissingEscapeChar() throws Exception { - StringBuilder sb = new StringBuilder(); - sb.append("lexer grammar L;\n"); - sb.append("I : [0-9]+ {System.out.println(\"I\");} ;\n"); - sb.append("WS : [ \\u]+ -> skip ;\n"); - String grammar = sb.toString(); - String found = execLexer("L.g4", grammar, "L", "34 ", false); - assertEquals("I\n" + - "[@0,0:1='34',<1>,1:0]\n" + - "[@1,3:2='',<-1>,1:3]\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testCharSetWithEscapedChar() throws Exception { - StringBuilder sb = new StringBuilder(); - sb.append("lexer grammar L;\n"); - sb.append("DASHBRACK : [\\-\\]]+ {System.out.println(\"DASHBRACK\");} ;\n"); - sb.append("WS : [ \\u]+ -> skip ;\n"); - String grammar = sb.toString(); - String found = execLexer("L.g4", grammar, "L", "- ] ", false); - assertEquals("DASHBRACK\n" + - "DASHBRACK\n" + - "[@0,0:0='-',<1>,1:0]\n" + - "[@1,2:2=']',<1>,1:2]\n" + - "[@2,4:3='',<-1>,1:4]\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testCharSetWithReversedRange() throws Exception { - StringBuilder sb = new StringBuilder(); - sb.append("lexer grammar L;\n"); - sb.append("A : [z-a9]+ {System.out.println(\"A\");} ;\n"); - sb.append("WS : [ \\u]+ -> skip ;\n"); - String grammar = sb.toString(); - String found = execLexer("L.g4", grammar, "L", "9", false); - assertEquals("A\n" + - "[@0,0:0='9',<1>,1:0]\n" + - "[@1,1:0='',<-1>,1:1]\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testCharSetWithQuote1() throws Exception { - StringBuilder sb = new StringBuilder(); - sb.append("lexer grammar L;\n"); - sb.append("A : [\"a-z]+ {System.out.println(\"A\");} ;\n"); - sb.append("WS : [ \\n\\t]+ -> skip ;\n"); - String grammar = sb.toString(); - String found = execLexer("L.g4", grammar, "L", "b\"a", false); - assertEquals("A\n" + - "[@0,0:2='b\"a',<1>,1:0]\n" + - "[@1,3:2='',<-1>,1:3]\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testCharSetWithQuote2() throws Exception { - StringBuilder sb = new StringBuilder(); - sb.append("lexer grammar L;\n"); - sb.append("A : [\"\\ab]+ {System.out.println(\"A\");} ;\n"); - sb.append("WS : [ \\n\\t]+ -> skip ;\n"); - String grammar = sb.toString(); - String found = execLexer("L.g4", grammar, "L", "b\"\\a", false); - assertEquals("A\n" + - "[@0,0:3='b\"\\a',<1>,1:0]\n" + - "[@1,4:3='',<-1>,1:4]\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testPositionAdjustingLexer() throws Exception { - StringBuilder sb = new StringBuilder(); - sb.append("lexer grammar PositionAdjustingLexer;\n"); - sb.append("\n"); - sb.append("@members {\n"); - sb.append("@Override\n"); - sb.append("public Token nextToken() {\n"); - sb.append(" if (!(_interp instanceof PositionAdjustingLexerATNSimulator)) {\n"); - sb.append(" _interp = new PositionAdjustingLexerATNSimulator(this, _ATN, _decisionToDFA, _sharedContextCache);\n"); - sb.append(" }\n"); - sb.append("\n"); - sb.append(" return super.nextToken();\n"); - sb.append("}\n"); - sb.append("\n"); - sb.append("@Override\n"); - sb.append("public Token emit() {\n"); - sb.append(" switch (_type) {\n"); - sb.append(" case TOKENS:\n"); - sb.append(" handleAcceptPositionForKeyword(\"tokens\");\n"); - sb.append(" break;\n"); - sb.append("\n"); - sb.append(" case LABEL:\n"); - sb.append(" handleAcceptPositionForIdentifier();\n"); - sb.append(" break;\n"); - sb.append("\n"); - sb.append(" default:\n"); - sb.append(" break;\n"); - sb.append(" }\n"); - sb.append("\n"); - sb.append(" return super.emit();\n"); - sb.append("}\n"); - sb.append("\n"); - sb.append("private boolean handleAcceptPositionForIdentifier() {\n"); - sb.append(" String tokenText = getText();\n"); - sb.append(" int identifierLength = 0;\n"); - sb.append(" while (identifierLength < tokenText.length() && isIdentifierChar(tokenText.charAt(identifierLength))) {\n"); - sb.append(" identifierLength++;\n"); - sb.append(" }\n"); - sb.append("\n"); - sb.append(" if (getInputStream().index() > _tokenStartCharIndex + identifierLength) {\n"); - sb.append(" int offset = identifierLength - 1;\n"); - sb.append(" getInterpreter().resetAcceptPosition(getInputStream(), _tokenStartCharIndex + offset, _tokenStartLine, _tokenStartCharPositionInLine + offset);\n"); - sb.append(" return true;\n"); - sb.append(" }\n"); - sb.append("\n"); - sb.append(" return false;\n"); - sb.append("}\n"); - sb.append("\n"); - sb.append("private boolean handleAcceptPositionForKeyword(String keyword) {\n"); - sb.append(" if (getInputStream().index() > _tokenStartCharIndex + keyword.length()) {\n"); - sb.append(" int offset = keyword.length() - 1;\n"); - sb.append(" getInterpreter().resetAcceptPosition(getInputStream(), _tokenStartCharIndex + offset, _tokenStartLine, _tokenStartCharPositionInLine + offset);\n"); - sb.append(" return true;\n"); - sb.append(" }\n"); - sb.append("\n"); - sb.append(" return false;\n"); - sb.append("}\n"); - sb.append("\n"); - sb.append("@Override\n"); - sb.append("public PositionAdjustingLexerATNSimulator getInterpreter() {\n"); - sb.append(" return (PositionAdjustingLexerATNSimulator)super.getInterpreter();\n"); - sb.append("}\n"); - sb.append("\n"); - sb.append("private static boolean isIdentifierChar(char c) {\n"); - sb.append(" return Character.isLetterOrDigit(c) || c == '_';\n"); - sb.append("}\n"); - sb.append("\n"); - sb.append("protected static class PositionAdjustingLexerATNSimulator extends LexerATNSimulator {\n"); - sb.append("\n"); - sb.append(" public PositionAdjustingLexerATNSimulator(Lexer recog, ATN atn,\n"); - sb.append(" DFA[] decisionToDFA,\n"); - sb.append(" PredictionContextCache sharedContextCache)\n"); - sb.append(" {\n"); - sb.append(" super(recog, atn, decisionToDFA, sharedContextCache);\n"); - sb.append(" }\n"); - sb.append("\n"); - sb.append(" protected void resetAcceptPosition(CharStream input, int index, int line, int charPositionInLine) {\n"); - sb.append(" input.seek(index);\n"); - sb.append(" this.line = line;\n"); - sb.append(" this.charPositionInLine = charPositionInLine;\n"); - sb.append(" consume(input);\n"); - sb.append(" }\n"); - sb.append("\n"); - sb.append("}\n"); - sb.append("\n"); - sb.append("}\n"); - sb.append("\n"); - sb.append("ASSIGN : '=' ;\n"); - sb.append("PLUS_ASSIGN : '+=' ;\n"); - sb.append("LCURLY: '{';\n"); - sb.append("\n"); - sb.append("// 'tokens' followed by '{'\n"); - sb.append("TOKENS : 'tokens' IGNORED '{';\n"); - sb.append("\n"); - sb.append("// IDENTIFIER followed by '+=' or '='\n"); - sb.append("LABEL\n"); - sb.append(" : IDENTIFIER IGNORED '+'? '='\n"); - sb.append(" ;\n"); - sb.append("\n"); - sb.append("IDENTIFIER\n"); - sb.append(" : [a-zA-Z_] [a-zA-Z0-9_]*\n"); - sb.append(" ;\n"); - sb.append("\n"); - sb.append("fragment\n"); - sb.append("IGNORED\n"); - sb.append(" : [ \\t\\r\\n]*\n"); - sb.append(" ;\n"); - sb.append("\n"); - sb.append("NEWLINE\n"); - sb.append(" : [\\r\\n]+ -> skip\n"); - sb.append(" ;\n"); - sb.append("\n"); - sb.append("WS\n"); - sb.append(" : [ \\t]+ -> skip\n"); - sb.append(" ;\n"); - String grammar = sb.toString(); - String found = execLexer("PositionAdjustingLexer.g4", grammar, "PositionAdjustingLexer", "tokens\ntokens {\nnotLabel\nlabel1 =\nlabel2 +=\nnotLabel\n", false); - assertEquals("[@0,0:5='tokens',<6>,1:0]\n" + - "[@1,7:12='tokens',<4>,2:0]\n" + - "[@2,14:14='{',<3>,2:7]\n" + - "[@3,16:23='notLabel',<6>,3:0]\n" + - "[@4,25:30='label1',<5>,4:0]\n" + - "[@5,32:32='=',<1>,4:7]\n" + - "[@6,34:39='label2',<5>,5:0]\n" + - "[@7,41:42='+=',<2>,5:7]\n" + - "[@8,44:51='notLabel',<6>,6:0]\n" + - "[@9,53:52='',<-1>,7:0]\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testLargeLexer() throws Exception { - StringBuilder sb = new StringBuilder(); - sb.append("lexer grammar L;\n"); - sb.append("WS : [ \\t\\r\\n]+ -> skip;\n"); - sb.append("KW0 : 'KW' '0';\n"); - sb.append("KW1 : 'KW' '1';\n"); - sb.append("KW2 : 'KW' '2';\n"); - sb.append("KW3 : 'KW' '3';\n"); - sb.append("KW4 : 'KW' '4';\n"); - sb.append("KW5 : 'KW' '5';\n"); - sb.append("KW6 : 'KW' '6';\n"); - sb.append("KW7 : 'KW' '7';\n"); - sb.append("KW8 : 'KW' '8';\n"); - sb.append("KW9 : 'KW' '9';\n"); - sb.append("KW10 : 'KW' '10';\n"); - sb.append("KW11 : 'KW' '11';\n"); - sb.append("KW12 : 'KW' '12';\n"); - sb.append("KW13 : 'KW' '13';\n"); - sb.append("KW14 : 'KW' '14';\n"); - sb.append("KW15 : 'KW' '15';\n"); - sb.append("KW16 : 'KW' '16';\n"); - sb.append("KW17 : 'KW' '17';\n"); - sb.append("KW18 : 'KW' '18';\n"); - sb.append("KW19 : 'KW' '19';\n"); - sb.append("KW20 : 'KW' '20';\n"); - sb.append("KW21 : 'KW' '21';\n"); - sb.append("KW22 : 'KW' '22';\n"); - sb.append("KW23 : 'KW' '23';\n"); - sb.append("KW24 : 'KW' '24';\n"); - sb.append("KW25 : 'KW' '25';\n"); - sb.append("KW26 : 'KW' '26';\n"); - sb.append("KW27 : 'KW' '27';\n"); - sb.append("KW28 : 'KW' '28';\n"); - sb.append("KW29 : 'KW' '29';\n"); - sb.append("KW30 : 'KW' '30';\n"); - sb.append("KW31 : 'KW' '31';\n"); - sb.append("KW32 : 'KW' '32';\n"); - sb.append("KW33 : 'KW' '33';\n"); - sb.append("KW34 : 'KW' '34';\n"); - sb.append("KW35 : 'KW' '35';\n"); - sb.append("KW36 : 'KW' '36';\n"); - sb.append("KW37 : 'KW' '37';\n"); - sb.append("KW38 : 'KW' '38';\n"); - sb.append("KW39 : 'KW' '39';\n"); - sb.append("KW40 : 'KW' '40';\n"); - sb.append("KW41 : 'KW' '41';\n"); - sb.append("KW42 : 'KW' '42';\n"); - sb.append("KW43 : 'KW' '43';\n"); - sb.append("KW44 : 'KW' '44';\n"); - sb.append("KW45 : 'KW' '45';\n"); - sb.append("KW46 : 'KW' '46';\n"); - sb.append("KW47 : 'KW' '47';\n"); - sb.append("KW48 : 'KW' '48';\n"); - sb.append("KW49 : 'KW' '49';\n"); - sb.append("KW50 : 'KW' '50';\n"); - sb.append("KW51 : 'KW' '51';\n"); - sb.append("KW52 : 'KW' '52';\n"); - sb.append("KW53 : 'KW' '53';\n"); - sb.append("KW54 : 'KW' '54';\n"); - sb.append("KW55 : 'KW' '55';\n"); - sb.append("KW56 : 'KW' '56';\n"); - sb.append("KW57 : 'KW' '57';\n"); - sb.append("KW58 : 'KW' '58';\n"); - sb.append("KW59 : 'KW' '59';\n"); - sb.append("KW60 : 'KW' '60';\n"); - sb.append("KW61 : 'KW' '61';\n"); - sb.append("KW62 : 'KW' '62';\n"); - sb.append("KW63 : 'KW' '63';\n"); - sb.append("KW64 : 'KW' '64';\n"); - sb.append("KW65 : 'KW' '65';\n"); - sb.append("KW66 : 'KW' '66';\n"); - sb.append("KW67 : 'KW' '67';\n"); - sb.append("KW68 : 'KW' '68';\n"); - sb.append("KW69 : 'KW' '69';\n"); - sb.append("KW70 : 'KW' '70';\n"); - sb.append("KW71 : 'KW' '71';\n"); - sb.append("KW72 : 'KW' '72';\n"); - sb.append("KW73 : 'KW' '73';\n"); - sb.append("KW74 : 'KW' '74';\n"); - sb.append("KW75 : 'KW' '75';\n"); - sb.append("KW76 : 'KW' '76';\n"); - sb.append("KW77 : 'KW' '77';\n"); - sb.append("KW78 : 'KW' '78';\n"); - sb.append("KW79 : 'KW' '79';\n"); - sb.append("KW80 : 'KW' '80';\n"); - sb.append("KW81 : 'KW' '81';\n"); - sb.append("KW82 : 'KW' '82';\n"); - sb.append("KW83 : 'KW' '83';\n"); - sb.append("KW84 : 'KW' '84';\n"); - sb.append("KW85 : 'KW' '85';\n"); - sb.append("KW86 : 'KW' '86';\n"); - sb.append("KW87 : 'KW' '87';\n"); - sb.append("KW88 : 'KW' '88';\n"); - sb.append("KW89 : 'KW' '89';\n"); - sb.append("KW90 : 'KW' '90';\n"); - sb.append("KW91 : 'KW' '91';\n"); - sb.append("KW92 : 'KW' '92';\n"); - sb.append("KW93 : 'KW' '93';\n"); - sb.append("KW94 : 'KW' '94';\n"); - sb.append("KW95 : 'KW' '95';\n"); - sb.append("KW96 : 'KW' '96';\n"); - sb.append("KW97 : 'KW' '97';\n"); - sb.append("KW98 : 'KW' '98';\n"); - sb.append("KW99 : 'KW' '99';\n"); - sb.append("KW100 : 'KW' '100';\n"); - sb.append("KW101 : 'KW' '101';\n"); - sb.append("KW102 : 'KW' '102';\n"); - sb.append("KW103 : 'KW' '103';\n"); - sb.append("KW104 : 'KW' '104';\n"); - sb.append("KW105 : 'KW' '105';\n"); - sb.append("KW106 : 'KW' '106';\n"); - sb.append("KW107 : 'KW' '107';\n"); - sb.append("KW108 : 'KW' '108';\n"); - sb.append("KW109 : 'KW' '109';\n"); - sb.append("KW110 : 'KW' '110';\n"); - sb.append("KW111 : 'KW' '111';\n"); - sb.append("KW112 : 'KW' '112';\n"); - sb.append("KW113 : 'KW' '113';\n"); - sb.append("KW114 : 'KW' '114';\n"); - sb.append("KW115 : 'KW' '115';\n"); - sb.append("KW116 : 'KW' '116';\n"); - sb.append("KW117 : 'KW' '117';\n"); - sb.append("KW118 : 'KW' '118';\n"); - sb.append("KW119 : 'KW' '119';\n"); - sb.append("KW120 : 'KW' '120';\n"); - sb.append("KW121 : 'KW' '121';\n"); - sb.append("KW122 : 'KW' '122';\n"); - sb.append("KW123 : 'KW' '123';\n"); - sb.append("KW124 : 'KW' '124';\n"); - sb.append("KW125 : 'KW' '125';\n"); - sb.append("KW126 : 'KW' '126';\n"); - sb.append("KW127 : 'KW' '127';\n"); - sb.append("KW128 : 'KW' '128';\n"); - sb.append("KW129 : 'KW' '129';\n"); - sb.append("KW130 : 'KW' '130';\n"); - sb.append("KW131 : 'KW' '131';\n"); - sb.append("KW132 : 'KW' '132';\n"); - sb.append("KW133 : 'KW' '133';\n"); - sb.append("KW134 : 'KW' '134';\n"); - sb.append("KW135 : 'KW' '135';\n"); - sb.append("KW136 : 'KW' '136';\n"); - sb.append("KW137 : 'KW' '137';\n"); - sb.append("KW138 : 'KW' '138';\n"); - sb.append("KW139 : 'KW' '139';\n"); - sb.append("KW140 : 'KW' '140';\n"); - sb.append("KW141 : 'KW' '141';\n"); - sb.append("KW142 : 'KW' '142';\n"); - sb.append("KW143 : 'KW' '143';\n"); - sb.append("KW144 : 'KW' '144';\n"); - sb.append("KW145 : 'KW' '145';\n"); - sb.append("KW146 : 'KW' '146';\n"); - sb.append("KW147 : 'KW' '147';\n"); - sb.append("KW148 : 'KW' '148';\n"); - sb.append("KW149 : 'KW' '149';\n"); - sb.append("KW150 : 'KW' '150';\n"); - sb.append("KW151 : 'KW' '151';\n"); - sb.append("KW152 : 'KW' '152';\n"); - sb.append("KW153 : 'KW' '153';\n"); - sb.append("KW154 : 'KW' '154';\n"); - sb.append("KW155 : 'KW' '155';\n"); - sb.append("KW156 : 'KW' '156';\n"); - sb.append("KW157 : 'KW' '157';\n"); - sb.append("KW158 : 'KW' '158';\n"); - sb.append("KW159 : 'KW' '159';\n"); - sb.append("KW160 : 'KW' '160';\n"); - sb.append("KW161 : 'KW' '161';\n"); - sb.append("KW162 : 'KW' '162';\n"); - sb.append("KW163 : 'KW' '163';\n"); - sb.append("KW164 : 'KW' '164';\n"); - sb.append("KW165 : 'KW' '165';\n"); - sb.append("KW166 : 'KW' '166';\n"); - sb.append("KW167 : 'KW' '167';\n"); - sb.append("KW168 : 'KW' '168';\n"); - sb.append("KW169 : 'KW' '169';\n"); - sb.append("KW170 : 'KW' '170';\n"); - sb.append("KW171 : 'KW' '171';\n"); - sb.append("KW172 : 'KW' '172';\n"); - sb.append("KW173 : 'KW' '173';\n"); - sb.append("KW174 : 'KW' '174';\n"); - sb.append("KW175 : 'KW' '175';\n"); - sb.append("KW176 : 'KW' '176';\n"); - sb.append("KW177 : 'KW' '177';\n"); - sb.append("KW178 : 'KW' '178';\n"); - sb.append("KW179 : 'KW' '179';\n"); - sb.append("KW180 : 'KW' '180';\n"); - sb.append("KW181 : 'KW' '181';\n"); - sb.append("KW182 : 'KW' '182';\n"); - sb.append("KW183 : 'KW' '183';\n"); - sb.append("KW184 : 'KW' '184';\n"); - sb.append("KW185 : 'KW' '185';\n"); - sb.append("KW186 : 'KW' '186';\n"); - sb.append("KW187 : 'KW' '187';\n"); - sb.append("KW188 : 'KW' '188';\n"); - sb.append("KW189 : 'KW' '189';\n"); - sb.append("KW190 : 'KW' '190';\n"); - sb.append("KW191 : 'KW' '191';\n"); - sb.append("KW192 : 'KW' '192';\n"); - sb.append("KW193 : 'KW' '193';\n"); - sb.append("KW194 : 'KW' '194';\n"); - sb.append("KW195 : 'KW' '195';\n"); - sb.append("KW196 : 'KW' '196';\n"); - sb.append("KW197 : 'KW' '197';\n"); - sb.append("KW198 : 'KW' '198';\n"); - sb.append("KW199 : 'KW' '199';\n"); - sb.append("KW200 : 'KW' '200';\n"); - sb.append("KW201 : 'KW' '201';\n"); - sb.append("KW202 : 'KW' '202';\n"); - sb.append("KW203 : 'KW' '203';\n"); - sb.append("KW204 : 'KW' '204';\n"); - sb.append("KW205 : 'KW' '205';\n"); - sb.append("KW206 : 'KW' '206';\n"); - sb.append("KW207 : 'KW' '207';\n"); - sb.append("KW208 : 'KW' '208';\n"); - sb.append("KW209 : 'KW' '209';\n"); - sb.append("KW210 : 'KW' '210';\n"); - sb.append("KW211 : 'KW' '211';\n"); - sb.append("KW212 : 'KW' '212';\n"); - sb.append("KW213 : 'KW' '213';\n"); - sb.append("KW214 : 'KW' '214';\n"); - sb.append("KW215 : 'KW' '215';\n"); - sb.append("KW216 : 'KW' '216';\n"); - sb.append("KW217 : 'KW' '217';\n"); - sb.append("KW218 : 'KW' '218';\n"); - sb.append("KW219 : 'KW' '219';\n"); - sb.append("KW220 : 'KW' '220';\n"); - sb.append("KW221 : 'KW' '221';\n"); - sb.append("KW222 : 'KW' '222';\n"); - sb.append("KW223 : 'KW' '223';\n"); - sb.append("KW224 : 'KW' '224';\n"); - sb.append("KW225 : 'KW' '225';\n"); - sb.append("KW226 : 'KW' '226';\n"); - sb.append("KW227 : 'KW' '227';\n"); - sb.append("KW228 : 'KW' '228';\n"); - sb.append("KW229 : 'KW' '229';\n"); - sb.append("KW230 : 'KW' '230';\n"); - sb.append("KW231 : 'KW' '231';\n"); - sb.append("KW232 : 'KW' '232';\n"); - sb.append("KW233 : 'KW' '233';\n"); - sb.append("KW234 : 'KW' '234';\n"); - sb.append("KW235 : 'KW' '235';\n"); - sb.append("KW236 : 'KW' '236';\n"); - sb.append("KW237 : 'KW' '237';\n"); - sb.append("KW238 : 'KW' '238';\n"); - sb.append("KW239 : 'KW' '239';\n"); - sb.append("KW240 : 'KW' '240';\n"); - sb.append("KW241 : 'KW' '241';\n"); - sb.append("KW242 : 'KW' '242';\n"); - sb.append("KW243 : 'KW' '243';\n"); - sb.append("KW244 : 'KW' '244';\n"); - sb.append("KW245 : 'KW' '245';\n"); - sb.append("KW246 : 'KW' '246';\n"); - sb.append("KW247 : 'KW' '247';\n"); - sb.append("KW248 : 'KW' '248';\n"); - sb.append("KW249 : 'KW' '249';\n"); - sb.append("KW250 : 'KW' '250';\n"); - sb.append("KW251 : 'KW' '251';\n"); - sb.append("KW252 : 'KW' '252';\n"); - sb.append("KW253 : 'KW' '253';\n"); - sb.append("KW254 : 'KW' '254';\n"); - sb.append("KW255 : 'KW' '255';\n"); - sb.append("KW256 : 'KW' '256';\n"); - sb.append("KW257 : 'KW' '257';\n"); - sb.append("KW258 : 'KW' '258';\n"); - sb.append("KW259 : 'KW' '259';\n"); - sb.append("KW260 : 'KW' '260';\n"); - sb.append("KW261 : 'KW' '261';\n"); - sb.append("KW262 : 'KW' '262';\n"); - sb.append("KW263 : 'KW' '263';\n"); - sb.append("KW264 : 'KW' '264';\n"); - sb.append("KW265 : 'KW' '265';\n"); - sb.append("KW266 : 'KW' '266';\n"); - sb.append("KW267 : 'KW' '267';\n"); - sb.append("KW268 : 'KW' '268';\n"); - sb.append("KW269 : 'KW' '269';\n"); - sb.append("KW270 : 'KW' '270';\n"); - sb.append("KW271 : 'KW' '271';\n"); - sb.append("KW272 : 'KW' '272';\n"); - sb.append("KW273 : 'KW' '273';\n"); - sb.append("KW274 : 'KW' '274';\n"); - sb.append("KW275 : 'KW' '275';\n"); - sb.append("KW276 : 'KW' '276';\n"); - sb.append("KW277 : 'KW' '277';\n"); - sb.append("KW278 : 'KW' '278';\n"); - sb.append("KW279 : 'KW' '279';\n"); - sb.append("KW280 : 'KW' '280';\n"); - sb.append("KW281 : 'KW' '281';\n"); - sb.append("KW282 : 'KW' '282';\n"); - sb.append("KW283 : 'KW' '283';\n"); - sb.append("KW284 : 'KW' '284';\n"); - sb.append("KW285 : 'KW' '285';\n"); - sb.append("KW286 : 'KW' '286';\n"); - sb.append("KW287 : 'KW' '287';\n"); - sb.append("KW288 : 'KW' '288';\n"); - sb.append("KW289 : 'KW' '289';\n"); - sb.append("KW290 : 'KW' '290';\n"); - sb.append("KW291 : 'KW' '291';\n"); - sb.append("KW292 : 'KW' '292';\n"); - sb.append("KW293 : 'KW' '293';\n"); - sb.append("KW294 : 'KW' '294';\n"); - sb.append("KW295 : 'KW' '295';\n"); - sb.append("KW296 : 'KW' '296';\n"); - sb.append("KW297 : 'KW' '297';\n"); - sb.append("KW298 : 'KW' '298';\n"); - sb.append("KW299 : 'KW' '299';\n"); - sb.append("KW300 : 'KW' '300';\n"); - sb.append("KW301 : 'KW' '301';\n"); - sb.append("KW302 : 'KW' '302';\n"); - sb.append("KW303 : 'KW' '303';\n"); - sb.append("KW304 : 'KW' '304';\n"); - sb.append("KW305 : 'KW' '305';\n"); - sb.append("KW306 : 'KW' '306';\n"); - sb.append("KW307 : 'KW' '307';\n"); - sb.append("KW308 : 'KW' '308';\n"); - sb.append("KW309 : 'KW' '309';\n"); - sb.append("KW310 : 'KW' '310';\n"); - sb.append("KW311 : 'KW' '311';\n"); - sb.append("KW312 : 'KW' '312';\n"); - sb.append("KW313 : 'KW' '313';\n"); - sb.append("KW314 : 'KW' '314';\n"); - sb.append("KW315 : 'KW' '315';\n"); - sb.append("KW316 : 'KW' '316';\n"); - sb.append("KW317 : 'KW' '317';\n"); - sb.append("KW318 : 'KW' '318';\n"); - sb.append("KW319 : 'KW' '319';\n"); - sb.append("KW320 : 'KW' '320';\n"); - sb.append("KW321 : 'KW' '321';\n"); - sb.append("KW322 : 'KW' '322';\n"); - sb.append("KW323 : 'KW' '323';\n"); - sb.append("KW324 : 'KW' '324';\n"); - sb.append("KW325 : 'KW' '325';\n"); - sb.append("KW326 : 'KW' '326';\n"); - sb.append("KW327 : 'KW' '327';\n"); - sb.append("KW328 : 'KW' '328';\n"); - sb.append("KW329 : 'KW' '329';\n"); - sb.append("KW330 : 'KW' '330';\n"); - sb.append("KW331 : 'KW' '331';\n"); - sb.append("KW332 : 'KW' '332';\n"); - sb.append("KW333 : 'KW' '333';\n"); - sb.append("KW334 : 'KW' '334';\n"); - sb.append("KW335 : 'KW' '335';\n"); - sb.append("KW336 : 'KW' '336';\n"); - sb.append("KW337 : 'KW' '337';\n"); - sb.append("KW338 : 'KW' '338';\n"); - sb.append("KW339 : 'KW' '339';\n"); - sb.append("KW340 : 'KW' '340';\n"); - sb.append("KW341 : 'KW' '341';\n"); - sb.append("KW342 : 'KW' '342';\n"); - sb.append("KW343 : 'KW' '343';\n"); - sb.append("KW344 : 'KW' '344';\n"); - sb.append("KW345 : 'KW' '345';\n"); - sb.append("KW346 : 'KW' '346';\n"); - sb.append("KW347 : 'KW' '347';\n"); - sb.append("KW348 : 'KW' '348';\n"); - sb.append("KW349 : 'KW' '349';\n"); - sb.append("KW350 : 'KW' '350';\n"); - sb.append("KW351 : 'KW' '351';\n"); - sb.append("KW352 : 'KW' '352';\n"); - sb.append("KW353 : 'KW' '353';\n"); - sb.append("KW354 : 'KW' '354';\n"); - sb.append("KW355 : 'KW' '355';\n"); - sb.append("KW356 : 'KW' '356';\n"); - sb.append("KW357 : 'KW' '357';\n"); - sb.append("KW358 : 'KW' '358';\n"); - sb.append("KW359 : 'KW' '359';\n"); - sb.append("KW360 : 'KW' '360';\n"); - sb.append("KW361 : 'KW' '361';\n"); - sb.append("KW362 : 'KW' '362';\n"); - sb.append("KW363 : 'KW' '363';\n"); - sb.append("KW364 : 'KW' '364';\n"); - sb.append("KW365 : 'KW' '365';\n"); - sb.append("KW366 : 'KW' '366';\n"); - sb.append("KW367 : 'KW' '367';\n"); - sb.append("KW368 : 'KW' '368';\n"); - sb.append("KW369 : 'KW' '369';\n"); - sb.append("KW370 : 'KW' '370';\n"); - sb.append("KW371 : 'KW' '371';\n"); - sb.append("KW372 : 'KW' '372';\n"); - sb.append("KW373 : 'KW' '373';\n"); - sb.append("KW374 : 'KW' '374';\n"); - sb.append("KW375 : 'KW' '375';\n"); - sb.append("KW376 : 'KW' '376';\n"); - sb.append("KW377 : 'KW' '377';\n"); - sb.append("KW378 : 'KW' '378';\n"); - sb.append("KW379 : 'KW' '379';\n"); - sb.append("KW380 : 'KW' '380';\n"); - sb.append("KW381 : 'KW' '381';\n"); - sb.append("KW382 : 'KW' '382';\n"); - sb.append("KW383 : 'KW' '383';\n"); - sb.append("KW384 : 'KW' '384';\n"); - sb.append("KW385 : 'KW' '385';\n"); - sb.append("KW386 : 'KW' '386';\n"); - sb.append("KW387 : 'KW' '387';\n"); - sb.append("KW388 : 'KW' '388';\n"); - sb.append("KW389 : 'KW' '389';\n"); - sb.append("KW390 : 'KW' '390';\n"); - sb.append("KW391 : 'KW' '391';\n"); - sb.append("KW392 : 'KW' '392';\n"); - sb.append("KW393 : 'KW' '393';\n"); - sb.append("KW394 : 'KW' '394';\n"); - sb.append("KW395 : 'KW' '395';\n"); - sb.append("KW396 : 'KW' '396';\n"); - sb.append("KW397 : 'KW' '397';\n"); - sb.append("KW398 : 'KW' '398';\n"); - sb.append("KW399 : 'KW' '399';\n"); - sb.append("KW400 : 'KW' '400';\n"); - sb.append("KW401 : 'KW' '401';\n"); - sb.append("KW402 : 'KW' '402';\n"); - sb.append("KW403 : 'KW' '403';\n"); - sb.append("KW404 : 'KW' '404';\n"); - sb.append("KW405 : 'KW' '405';\n"); - sb.append("KW406 : 'KW' '406';\n"); - sb.append("KW407 : 'KW' '407';\n"); - sb.append("KW408 : 'KW' '408';\n"); - sb.append("KW409 : 'KW' '409';\n"); - sb.append("KW410 : 'KW' '410';\n"); - sb.append("KW411 : 'KW' '411';\n"); - sb.append("KW412 : 'KW' '412';\n"); - sb.append("KW413 : 'KW' '413';\n"); - sb.append("KW414 : 'KW' '414';\n"); - sb.append("KW415 : 'KW' '415';\n"); - sb.append("KW416 : 'KW' '416';\n"); - sb.append("KW417 : 'KW' '417';\n"); - sb.append("KW418 : 'KW' '418';\n"); - sb.append("KW419 : 'KW' '419';\n"); - sb.append("KW420 : 'KW' '420';\n"); - sb.append("KW421 : 'KW' '421';\n"); - sb.append("KW422 : 'KW' '422';\n"); - sb.append("KW423 : 'KW' '423';\n"); - sb.append("KW424 : 'KW' '424';\n"); - sb.append("KW425 : 'KW' '425';\n"); - sb.append("KW426 : 'KW' '426';\n"); - sb.append("KW427 : 'KW' '427';\n"); - sb.append("KW428 : 'KW' '428';\n"); - sb.append("KW429 : 'KW' '429';\n"); - sb.append("KW430 : 'KW' '430';\n"); - sb.append("KW431 : 'KW' '431';\n"); - sb.append("KW432 : 'KW' '432';\n"); - sb.append("KW433 : 'KW' '433';\n"); - sb.append("KW434 : 'KW' '434';\n"); - sb.append("KW435 : 'KW' '435';\n"); - sb.append("KW436 : 'KW' '436';\n"); - sb.append("KW437 : 'KW' '437';\n"); - sb.append("KW438 : 'KW' '438';\n"); - sb.append("KW439 : 'KW' '439';\n"); - sb.append("KW440 : 'KW' '440';\n"); - sb.append("KW441 : 'KW' '441';\n"); - sb.append("KW442 : 'KW' '442';\n"); - sb.append("KW443 : 'KW' '443';\n"); - sb.append("KW444 : 'KW' '444';\n"); - sb.append("KW445 : 'KW' '445';\n"); - sb.append("KW446 : 'KW' '446';\n"); - sb.append("KW447 : 'KW' '447';\n"); - sb.append("KW448 : 'KW' '448';\n"); - sb.append("KW449 : 'KW' '449';\n"); - sb.append("KW450 : 'KW' '450';\n"); - sb.append("KW451 : 'KW' '451';\n"); - sb.append("KW452 : 'KW' '452';\n"); - sb.append("KW453 : 'KW' '453';\n"); - sb.append("KW454 : 'KW' '454';\n"); - sb.append("KW455 : 'KW' '455';\n"); - sb.append("KW456 : 'KW' '456';\n"); - sb.append("KW457 : 'KW' '457';\n"); - sb.append("KW458 : 'KW' '458';\n"); - sb.append("KW459 : 'KW' '459';\n"); - sb.append("KW460 : 'KW' '460';\n"); - sb.append("KW461 : 'KW' '461';\n"); - sb.append("KW462 : 'KW' '462';\n"); - sb.append("KW463 : 'KW' '463';\n"); - sb.append("KW464 : 'KW' '464';\n"); - sb.append("KW465 : 'KW' '465';\n"); - sb.append("KW466 : 'KW' '466';\n"); - sb.append("KW467 : 'KW' '467';\n"); - sb.append("KW468 : 'KW' '468';\n"); - sb.append("KW469 : 'KW' '469';\n"); - sb.append("KW470 : 'KW' '470';\n"); - sb.append("KW471 : 'KW' '471';\n"); - sb.append("KW472 : 'KW' '472';\n"); - sb.append("KW473 : 'KW' '473';\n"); - sb.append("KW474 : 'KW' '474';\n"); - sb.append("KW475 : 'KW' '475';\n"); - sb.append("KW476 : 'KW' '476';\n"); - sb.append("KW477 : 'KW' '477';\n"); - sb.append("KW478 : 'KW' '478';\n"); - sb.append("KW479 : 'KW' '479';\n"); - sb.append("KW480 : 'KW' '480';\n"); - sb.append("KW481 : 'KW' '481';\n"); - sb.append("KW482 : 'KW' '482';\n"); - sb.append("KW483 : 'KW' '483';\n"); - sb.append("KW484 : 'KW' '484';\n"); - sb.append("KW485 : 'KW' '485';\n"); - sb.append("KW486 : 'KW' '486';\n"); - sb.append("KW487 : 'KW' '487';\n"); - sb.append("KW488 : 'KW' '488';\n"); - sb.append("KW489 : 'KW' '489';\n"); - sb.append("KW490 : 'KW' '490';\n"); - sb.append("KW491 : 'KW' '491';\n"); - sb.append("KW492 : 'KW' '492';\n"); - sb.append("KW493 : 'KW' '493';\n"); - sb.append("KW494 : 'KW' '494';\n"); - sb.append("KW495 : 'KW' '495';\n"); - sb.append("KW496 : 'KW' '496';\n"); - sb.append("KW497 : 'KW' '497';\n"); - sb.append("KW498 : 'KW' '498';\n"); - sb.append("KW499 : 'KW' '499';\n"); - sb.append("KW500 : 'KW' '500';\n"); - sb.append("KW501 : 'KW' '501';\n"); - sb.append("KW502 : 'KW' '502';\n"); - sb.append("KW503 : 'KW' '503';\n"); - sb.append("KW504 : 'KW' '504';\n"); - sb.append("KW505 : 'KW' '505';\n"); - sb.append("KW506 : 'KW' '506';\n"); - sb.append("KW507 : 'KW' '507';\n"); - sb.append("KW508 : 'KW' '508';\n"); - sb.append("KW509 : 'KW' '509';\n"); - sb.append("KW510 : 'KW' '510';\n"); - sb.append("KW511 : 'KW' '511';\n"); - sb.append("KW512 : 'KW' '512';\n"); - sb.append("KW513 : 'KW' '513';\n"); - sb.append("KW514 : 'KW' '514';\n"); - sb.append("KW515 : 'KW' '515';\n"); - sb.append("KW516 : 'KW' '516';\n"); - sb.append("KW517 : 'KW' '517';\n"); - sb.append("KW518 : 'KW' '518';\n"); - sb.append("KW519 : 'KW' '519';\n"); - sb.append("KW520 : 'KW' '520';\n"); - sb.append("KW521 : 'KW' '521';\n"); - sb.append("KW522 : 'KW' '522';\n"); - sb.append("KW523 : 'KW' '523';\n"); - sb.append("KW524 : 'KW' '524';\n"); - sb.append("KW525 : 'KW' '525';\n"); - sb.append("KW526 : 'KW' '526';\n"); - sb.append("KW527 : 'KW' '527';\n"); - sb.append("KW528 : 'KW' '528';\n"); - sb.append("KW529 : 'KW' '529';\n"); - sb.append("KW530 : 'KW' '530';\n"); - sb.append("KW531 : 'KW' '531';\n"); - sb.append("KW532 : 'KW' '532';\n"); - sb.append("KW533 : 'KW' '533';\n"); - sb.append("KW534 : 'KW' '534';\n"); - sb.append("KW535 : 'KW' '535';\n"); - sb.append("KW536 : 'KW' '536';\n"); - sb.append("KW537 : 'KW' '537';\n"); - sb.append("KW538 : 'KW' '538';\n"); - sb.append("KW539 : 'KW' '539';\n"); - sb.append("KW540 : 'KW' '540';\n"); - sb.append("KW541 : 'KW' '541';\n"); - sb.append("KW542 : 'KW' '542';\n"); - sb.append("KW543 : 'KW' '543';\n"); - sb.append("KW544 : 'KW' '544';\n"); - sb.append("KW545 : 'KW' '545';\n"); - sb.append("KW546 : 'KW' '546';\n"); - sb.append("KW547 : 'KW' '547';\n"); - sb.append("KW548 : 'KW' '548';\n"); - sb.append("KW549 : 'KW' '549';\n"); - sb.append("KW550 : 'KW' '550';\n"); - sb.append("KW551 : 'KW' '551';\n"); - sb.append("KW552 : 'KW' '552';\n"); - sb.append("KW553 : 'KW' '553';\n"); - sb.append("KW554 : 'KW' '554';\n"); - sb.append("KW555 : 'KW' '555';\n"); - sb.append("KW556 : 'KW' '556';\n"); - sb.append("KW557 : 'KW' '557';\n"); - sb.append("KW558 : 'KW' '558';\n"); - sb.append("KW559 : 'KW' '559';\n"); - sb.append("KW560 : 'KW' '560';\n"); - sb.append("KW561 : 'KW' '561';\n"); - sb.append("KW562 : 'KW' '562';\n"); - sb.append("KW563 : 'KW' '563';\n"); - sb.append("KW564 : 'KW' '564';\n"); - sb.append("KW565 : 'KW' '565';\n"); - sb.append("KW566 : 'KW' '566';\n"); - sb.append("KW567 : 'KW' '567';\n"); - sb.append("KW568 : 'KW' '568';\n"); - sb.append("KW569 : 'KW' '569';\n"); - sb.append("KW570 : 'KW' '570';\n"); - sb.append("KW571 : 'KW' '571';\n"); - sb.append("KW572 : 'KW' '572';\n"); - sb.append("KW573 : 'KW' '573';\n"); - sb.append("KW574 : 'KW' '574';\n"); - sb.append("KW575 : 'KW' '575';\n"); - sb.append("KW576 : 'KW' '576';\n"); - sb.append("KW577 : 'KW' '577';\n"); - sb.append("KW578 : 'KW' '578';\n"); - sb.append("KW579 : 'KW' '579';\n"); - sb.append("KW580 : 'KW' '580';\n"); - sb.append("KW581 : 'KW' '581';\n"); - sb.append("KW582 : 'KW' '582';\n"); - sb.append("KW583 : 'KW' '583';\n"); - sb.append("KW584 : 'KW' '584';\n"); - sb.append("KW585 : 'KW' '585';\n"); - sb.append("KW586 : 'KW' '586';\n"); - sb.append("KW587 : 'KW' '587';\n"); - sb.append("KW588 : 'KW' '588';\n"); - sb.append("KW589 : 'KW' '589';\n"); - sb.append("KW590 : 'KW' '590';\n"); - sb.append("KW591 : 'KW' '591';\n"); - sb.append("KW592 : 'KW' '592';\n"); - sb.append("KW593 : 'KW' '593';\n"); - sb.append("KW594 : 'KW' '594';\n"); - sb.append("KW595 : 'KW' '595';\n"); - sb.append("KW596 : 'KW' '596';\n"); - sb.append("KW597 : 'KW' '597';\n"); - sb.append("KW598 : 'KW' '598';\n"); - sb.append("KW599 : 'KW' '599';\n"); - sb.append("KW600 : 'KW' '600';\n"); - sb.append("KW601 : 'KW' '601';\n"); - sb.append("KW602 : 'KW' '602';\n"); - sb.append("KW603 : 'KW' '603';\n"); - sb.append("KW604 : 'KW' '604';\n"); - sb.append("KW605 : 'KW' '605';\n"); - sb.append("KW606 : 'KW' '606';\n"); - sb.append("KW607 : 'KW' '607';\n"); - sb.append("KW608 : 'KW' '608';\n"); - sb.append("KW609 : 'KW' '609';\n"); - sb.append("KW610 : 'KW' '610';\n"); - sb.append("KW611 : 'KW' '611';\n"); - sb.append("KW612 : 'KW' '612';\n"); - sb.append("KW613 : 'KW' '613';\n"); - sb.append("KW614 : 'KW' '614';\n"); - sb.append("KW615 : 'KW' '615';\n"); - sb.append("KW616 : 'KW' '616';\n"); - sb.append("KW617 : 'KW' '617';\n"); - sb.append("KW618 : 'KW' '618';\n"); - sb.append("KW619 : 'KW' '619';\n"); - sb.append("KW620 : 'KW' '620';\n"); - sb.append("KW621 : 'KW' '621';\n"); - sb.append("KW622 : 'KW' '622';\n"); - sb.append("KW623 : 'KW' '623';\n"); - sb.append("KW624 : 'KW' '624';\n"); - sb.append("KW625 : 'KW' '625';\n"); - sb.append("KW626 : 'KW' '626';\n"); - sb.append("KW627 : 'KW' '627';\n"); - sb.append("KW628 : 'KW' '628';\n"); - sb.append("KW629 : 'KW' '629';\n"); - sb.append("KW630 : 'KW' '630';\n"); - sb.append("KW631 : 'KW' '631';\n"); - sb.append("KW632 : 'KW' '632';\n"); - sb.append("KW633 : 'KW' '633';\n"); - sb.append("KW634 : 'KW' '634';\n"); - sb.append("KW635 : 'KW' '635';\n"); - sb.append("KW636 : 'KW' '636';\n"); - sb.append("KW637 : 'KW' '637';\n"); - sb.append("KW638 : 'KW' '638';\n"); - sb.append("KW639 : 'KW' '639';\n"); - sb.append("KW640 : 'KW' '640';\n"); - sb.append("KW641 : 'KW' '641';\n"); - sb.append("KW642 : 'KW' '642';\n"); - sb.append("KW643 : 'KW' '643';\n"); - sb.append("KW644 : 'KW' '644';\n"); - sb.append("KW645 : 'KW' '645';\n"); - sb.append("KW646 : 'KW' '646';\n"); - sb.append("KW647 : 'KW' '647';\n"); - sb.append("KW648 : 'KW' '648';\n"); - sb.append("KW649 : 'KW' '649';\n"); - sb.append("KW650 : 'KW' '650';\n"); - sb.append("KW651 : 'KW' '651';\n"); - sb.append("KW652 : 'KW' '652';\n"); - sb.append("KW653 : 'KW' '653';\n"); - sb.append("KW654 : 'KW' '654';\n"); - sb.append("KW655 : 'KW' '655';\n"); - sb.append("KW656 : 'KW' '656';\n"); - sb.append("KW657 : 'KW' '657';\n"); - sb.append("KW658 : 'KW' '658';\n"); - sb.append("KW659 : 'KW' '659';\n"); - sb.append("KW660 : 'KW' '660';\n"); - sb.append("KW661 : 'KW' '661';\n"); - sb.append("KW662 : 'KW' '662';\n"); - sb.append("KW663 : 'KW' '663';\n"); - sb.append("KW664 : 'KW' '664';\n"); - sb.append("KW665 : 'KW' '665';\n"); - sb.append("KW666 : 'KW' '666';\n"); - sb.append("KW667 : 'KW' '667';\n"); - sb.append("KW668 : 'KW' '668';\n"); - sb.append("KW669 : 'KW' '669';\n"); - sb.append("KW670 : 'KW' '670';\n"); - sb.append("KW671 : 'KW' '671';\n"); - sb.append("KW672 : 'KW' '672';\n"); - sb.append("KW673 : 'KW' '673';\n"); - sb.append("KW674 : 'KW' '674';\n"); - sb.append("KW675 : 'KW' '675';\n"); - sb.append("KW676 : 'KW' '676';\n"); - sb.append("KW677 : 'KW' '677';\n"); - sb.append("KW678 : 'KW' '678';\n"); - sb.append("KW679 : 'KW' '679';\n"); - sb.append("KW680 : 'KW' '680';\n"); - sb.append("KW681 : 'KW' '681';\n"); - sb.append("KW682 : 'KW' '682';\n"); - sb.append("KW683 : 'KW' '683';\n"); - sb.append("KW684 : 'KW' '684';\n"); - sb.append("KW685 : 'KW' '685';\n"); - sb.append("KW686 : 'KW' '686';\n"); - sb.append("KW687 : 'KW' '687';\n"); - sb.append("KW688 : 'KW' '688';\n"); - sb.append("KW689 : 'KW' '689';\n"); - sb.append("KW690 : 'KW' '690';\n"); - sb.append("KW691 : 'KW' '691';\n"); - sb.append("KW692 : 'KW' '692';\n"); - sb.append("KW693 : 'KW' '693';\n"); - sb.append("KW694 : 'KW' '694';\n"); - sb.append("KW695 : 'KW' '695';\n"); - sb.append("KW696 : 'KW' '696';\n"); - sb.append("KW697 : 'KW' '697';\n"); - sb.append("KW698 : 'KW' '698';\n"); - sb.append("KW699 : 'KW' '699';\n"); - sb.append("KW700 : 'KW' '700';\n"); - sb.append("KW701 : 'KW' '701';\n"); - sb.append("KW702 : 'KW' '702';\n"); - sb.append("KW703 : 'KW' '703';\n"); - sb.append("KW704 : 'KW' '704';\n"); - sb.append("KW705 : 'KW' '705';\n"); - sb.append("KW706 : 'KW' '706';\n"); - sb.append("KW707 : 'KW' '707';\n"); - sb.append("KW708 : 'KW' '708';\n"); - sb.append("KW709 : 'KW' '709';\n"); - sb.append("KW710 : 'KW' '710';\n"); - sb.append("KW711 : 'KW' '711';\n"); - sb.append("KW712 : 'KW' '712';\n"); - sb.append("KW713 : 'KW' '713';\n"); - sb.append("KW714 : 'KW' '714';\n"); - sb.append("KW715 : 'KW' '715';\n"); - sb.append("KW716 : 'KW' '716';\n"); - sb.append("KW717 : 'KW' '717';\n"); - sb.append("KW718 : 'KW' '718';\n"); - sb.append("KW719 : 'KW' '719';\n"); - sb.append("KW720 : 'KW' '720';\n"); - sb.append("KW721 : 'KW' '721';\n"); - sb.append("KW722 : 'KW' '722';\n"); - sb.append("KW723 : 'KW' '723';\n"); - sb.append("KW724 : 'KW' '724';\n"); - sb.append("KW725 : 'KW' '725';\n"); - sb.append("KW726 : 'KW' '726';\n"); - sb.append("KW727 : 'KW' '727';\n"); - sb.append("KW728 : 'KW' '728';\n"); - sb.append("KW729 : 'KW' '729';\n"); - sb.append("KW730 : 'KW' '730';\n"); - sb.append("KW731 : 'KW' '731';\n"); - sb.append("KW732 : 'KW' '732';\n"); - sb.append("KW733 : 'KW' '733';\n"); - sb.append("KW734 : 'KW' '734';\n"); - sb.append("KW735 : 'KW' '735';\n"); - sb.append("KW736 : 'KW' '736';\n"); - sb.append("KW737 : 'KW' '737';\n"); - sb.append("KW738 : 'KW' '738';\n"); - sb.append("KW739 : 'KW' '739';\n"); - sb.append("KW740 : 'KW' '740';\n"); - sb.append("KW741 : 'KW' '741';\n"); - sb.append("KW742 : 'KW' '742';\n"); - sb.append("KW743 : 'KW' '743';\n"); - sb.append("KW744 : 'KW' '744';\n"); - sb.append("KW745 : 'KW' '745';\n"); - sb.append("KW746 : 'KW' '746';\n"); - sb.append("KW747 : 'KW' '747';\n"); - sb.append("KW748 : 'KW' '748';\n"); - sb.append("KW749 : 'KW' '749';\n"); - sb.append("KW750 : 'KW' '750';\n"); - sb.append("KW751 : 'KW' '751';\n"); - sb.append("KW752 : 'KW' '752';\n"); - sb.append("KW753 : 'KW' '753';\n"); - sb.append("KW754 : 'KW' '754';\n"); - sb.append("KW755 : 'KW' '755';\n"); - sb.append("KW756 : 'KW' '756';\n"); - sb.append("KW757 : 'KW' '757';\n"); - sb.append("KW758 : 'KW' '758';\n"); - sb.append("KW759 : 'KW' '759';\n"); - sb.append("KW760 : 'KW' '760';\n"); - sb.append("KW761 : 'KW' '761';\n"); - sb.append("KW762 : 'KW' '762';\n"); - sb.append("KW763 : 'KW' '763';\n"); - sb.append("KW764 : 'KW' '764';\n"); - sb.append("KW765 : 'KW' '765';\n"); - sb.append("KW766 : 'KW' '766';\n"); - sb.append("KW767 : 'KW' '767';\n"); - sb.append("KW768 : 'KW' '768';\n"); - sb.append("KW769 : 'KW' '769';\n"); - sb.append("KW770 : 'KW' '770';\n"); - sb.append("KW771 : 'KW' '771';\n"); - sb.append("KW772 : 'KW' '772';\n"); - sb.append("KW773 : 'KW' '773';\n"); - sb.append("KW774 : 'KW' '774';\n"); - sb.append("KW775 : 'KW' '775';\n"); - sb.append("KW776 : 'KW' '776';\n"); - sb.append("KW777 : 'KW' '777';\n"); - sb.append("KW778 : 'KW' '778';\n"); - sb.append("KW779 : 'KW' '779';\n"); - sb.append("KW780 : 'KW' '780';\n"); - sb.append("KW781 : 'KW' '781';\n"); - sb.append("KW782 : 'KW' '782';\n"); - sb.append("KW783 : 'KW' '783';\n"); - sb.append("KW784 : 'KW' '784';\n"); - sb.append("KW785 : 'KW' '785';\n"); - sb.append("KW786 : 'KW' '786';\n"); - sb.append("KW787 : 'KW' '787';\n"); - sb.append("KW788 : 'KW' '788';\n"); - sb.append("KW789 : 'KW' '789';\n"); - sb.append("KW790 : 'KW' '790';\n"); - sb.append("KW791 : 'KW' '791';\n"); - sb.append("KW792 : 'KW' '792';\n"); - sb.append("KW793 : 'KW' '793';\n"); - sb.append("KW794 : 'KW' '794';\n"); - sb.append("KW795 : 'KW' '795';\n"); - sb.append("KW796 : 'KW' '796';\n"); - sb.append("KW797 : 'KW' '797';\n"); - sb.append("KW798 : 'KW' '798';\n"); - sb.append("KW799 : 'KW' '799';\n"); - sb.append("KW800 : 'KW' '800';\n"); - sb.append("KW801 : 'KW' '801';\n"); - sb.append("KW802 : 'KW' '802';\n"); - sb.append("KW803 : 'KW' '803';\n"); - sb.append("KW804 : 'KW' '804';\n"); - sb.append("KW805 : 'KW' '805';\n"); - sb.append("KW806 : 'KW' '806';\n"); - sb.append("KW807 : 'KW' '807';\n"); - sb.append("KW808 : 'KW' '808';\n"); - sb.append("KW809 : 'KW' '809';\n"); - sb.append("KW810 : 'KW' '810';\n"); - sb.append("KW811 : 'KW' '811';\n"); - sb.append("KW812 : 'KW' '812';\n"); - sb.append("KW813 : 'KW' '813';\n"); - sb.append("KW814 : 'KW' '814';\n"); - sb.append("KW815 : 'KW' '815';\n"); - sb.append("KW816 : 'KW' '816';\n"); - sb.append("KW817 : 'KW' '817';\n"); - sb.append("KW818 : 'KW' '818';\n"); - sb.append("KW819 : 'KW' '819';\n"); - sb.append("KW820 : 'KW' '820';\n"); - sb.append("KW821 : 'KW' '821';\n"); - sb.append("KW822 : 'KW' '822';\n"); - sb.append("KW823 : 'KW' '823';\n"); - sb.append("KW824 : 'KW' '824';\n"); - sb.append("KW825 : 'KW' '825';\n"); - sb.append("KW826 : 'KW' '826';\n"); - sb.append("KW827 : 'KW' '827';\n"); - sb.append("KW828 : 'KW' '828';\n"); - sb.append("KW829 : 'KW' '829';\n"); - sb.append("KW830 : 'KW' '830';\n"); - sb.append("KW831 : 'KW' '831';\n"); - sb.append("KW832 : 'KW' '832';\n"); - sb.append("KW833 : 'KW' '833';\n"); - sb.append("KW834 : 'KW' '834';\n"); - sb.append("KW835 : 'KW' '835';\n"); - sb.append("KW836 : 'KW' '836';\n"); - sb.append("KW837 : 'KW' '837';\n"); - sb.append("KW838 : 'KW' '838';\n"); - sb.append("KW839 : 'KW' '839';\n"); - sb.append("KW840 : 'KW' '840';\n"); - sb.append("KW841 : 'KW' '841';\n"); - sb.append("KW842 : 'KW' '842';\n"); - sb.append("KW843 : 'KW' '843';\n"); - sb.append("KW844 : 'KW' '844';\n"); - sb.append("KW845 : 'KW' '845';\n"); - sb.append("KW846 : 'KW' '846';\n"); - sb.append("KW847 : 'KW' '847';\n"); - sb.append("KW848 : 'KW' '848';\n"); - sb.append("KW849 : 'KW' '849';\n"); - sb.append("KW850 : 'KW' '850';\n"); - sb.append("KW851 : 'KW' '851';\n"); - sb.append("KW852 : 'KW' '852';\n"); - sb.append("KW853 : 'KW' '853';\n"); - sb.append("KW854 : 'KW' '854';\n"); - sb.append("KW855 : 'KW' '855';\n"); - sb.append("KW856 : 'KW' '856';\n"); - sb.append("KW857 : 'KW' '857';\n"); - sb.append("KW858 : 'KW' '858';\n"); - sb.append("KW859 : 'KW' '859';\n"); - sb.append("KW860 : 'KW' '860';\n"); - sb.append("KW861 : 'KW' '861';\n"); - sb.append("KW862 : 'KW' '862';\n"); - sb.append("KW863 : 'KW' '863';\n"); - sb.append("KW864 : 'KW' '864';\n"); - sb.append("KW865 : 'KW' '865';\n"); - sb.append("KW866 : 'KW' '866';\n"); - sb.append("KW867 : 'KW' '867';\n"); - sb.append("KW868 : 'KW' '868';\n"); - sb.append("KW869 : 'KW' '869';\n"); - sb.append("KW870 : 'KW' '870';\n"); - sb.append("KW871 : 'KW' '871';\n"); - sb.append("KW872 : 'KW' '872';\n"); - sb.append("KW873 : 'KW' '873';\n"); - sb.append("KW874 : 'KW' '874';\n"); - sb.append("KW875 : 'KW' '875';\n"); - sb.append("KW876 : 'KW' '876';\n"); - sb.append("KW877 : 'KW' '877';\n"); - sb.append("KW878 : 'KW' '878';\n"); - sb.append("KW879 : 'KW' '879';\n"); - sb.append("KW880 : 'KW' '880';\n"); - sb.append("KW881 : 'KW' '881';\n"); - sb.append("KW882 : 'KW' '882';\n"); - sb.append("KW883 : 'KW' '883';\n"); - sb.append("KW884 : 'KW' '884';\n"); - sb.append("KW885 : 'KW' '885';\n"); - sb.append("KW886 : 'KW' '886';\n"); - sb.append("KW887 : 'KW' '887';\n"); - sb.append("KW888 : 'KW' '888';\n"); - sb.append("KW889 : 'KW' '889';\n"); - sb.append("KW890 : 'KW' '890';\n"); - sb.append("KW891 : 'KW' '891';\n"); - sb.append("KW892 : 'KW' '892';\n"); - sb.append("KW893 : 'KW' '893';\n"); - sb.append("KW894 : 'KW' '894';\n"); - sb.append("KW895 : 'KW' '895';\n"); - sb.append("KW896 : 'KW' '896';\n"); - sb.append("KW897 : 'KW' '897';\n"); - sb.append("KW898 : 'KW' '898';\n"); - sb.append("KW899 : 'KW' '899';\n"); - sb.append("KW900 : 'KW' '900';\n"); - sb.append("KW901 : 'KW' '901';\n"); - sb.append("KW902 : 'KW' '902';\n"); - sb.append("KW903 : 'KW' '903';\n"); - sb.append("KW904 : 'KW' '904';\n"); - sb.append("KW905 : 'KW' '905';\n"); - sb.append("KW906 : 'KW' '906';\n"); - sb.append("KW907 : 'KW' '907';\n"); - sb.append("KW908 : 'KW' '908';\n"); - sb.append("KW909 : 'KW' '909';\n"); - sb.append("KW910 : 'KW' '910';\n"); - sb.append("KW911 : 'KW' '911';\n"); - sb.append("KW912 : 'KW' '912';\n"); - sb.append("KW913 : 'KW' '913';\n"); - sb.append("KW914 : 'KW' '914';\n"); - sb.append("KW915 : 'KW' '915';\n"); - sb.append("KW916 : 'KW' '916';\n"); - sb.append("KW917 : 'KW' '917';\n"); - sb.append("KW918 : 'KW' '918';\n"); - sb.append("KW919 : 'KW' '919';\n"); - sb.append("KW920 : 'KW' '920';\n"); - sb.append("KW921 : 'KW' '921';\n"); - sb.append("KW922 : 'KW' '922';\n"); - sb.append("KW923 : 'KW' '923';\n"); - sb.append("KW924 : 'KW' '924';\n"); - sb.append("KW925 : 'KW' '925';\n"); - sb.append("KW926 : 'KW' '926';\n"); - sb.append("KW927 : 'KW' '927';\n"); - sb.append("KW928 : 'KW' '928';\n"); - sb.append("KW929 : 'KW' '929';\n"); - sb.append("KW930 : 'KW' '930';\n"); - sb.append("KW931 : 'KW' '931';\n"); - sb.append("KW932 : 'KW' '932';\n"); - sb.append("KW933 : 'KW' '933';\n"); - sb.append("KW934 : 'KW' '934';\n"); - sb.append("KW935 : 'KW' '935';\n"); - sb.append("KW936 : 'KW' '936';\n"); - sb.append("KW937 : 'KW' '937';\n"); - sb.append("KW938 : 'KW' '938';\n"); - sb.append("KW939 : 'KW' '939';\n"); - sb.append("KW940 : 'KW' '940';\n"); - sb.append("KW941 : 'KW' '941';\n"); - sb.append("KW942 : 'KW' '942';\n"); - sb.append("KW943 : 'KW' '943';\n"); - sb.append("KW944 : 'KW' '944';\n"); - sb.append("KW945 : 'KW' '945';\n"); - sb.append("KW946 : 'KW' '946';\n"); - sb.append("KW947 : 'KW' '947';\n"); - sb.append("KW948 : 'KW' '948';\n"); - sb.append("KW949 : 'KW' '949';\n"); - sb.append("KW950 : 'KW' '950';\n"); - sb.append("KW951 : 'KW' '951';\n"); - sb.append("KW952 : 'KW' '952';\n"); - sb.append("KW953 : 'KW' '953';\n"); - sb.append("KW954 : 'KW' '954';\n"); - sb.append("KW955 : 'KW' '955';\n"); - sb.append("KW956 : 'KW' '956';\n"); - sb.append("KW957 : 'KW' '957';\n"); - sb.append("KW958 : 'KW' '958';\n"); - sb.append("KW959 : 'KW' '959';\n"); - sb.append("KW960 : 'KW' '960';\n"); - sb.append("KW961 : 'KW' '961';\n"); - sb.append("KW962 : 'KW' '962';\n"); - sb.append("KW963 : 'KW' '963';\n"); - sb.append("KW964 : 'KW' '964';\n"); - sb.append("KW965 : 'KW' '965';\n"); - sb.append("KW966 : 'KW' '966';\n"); - sb.append("KW967 : 'KW' '967';\n"); - sb.append("KW968 : 'KW' '968';\n"); - sb.append("KW969 : 'KW' '969';\n"); - sb.append("KW970 : 'KW' '970';\n"); - sb.append("KW971 : 'KW' '971';\n"); - sb.append("KW972 : 'KW' '972';\n"); - sb.append("KW973 : 'KW' '973';\n"); - sb.append("KW974 : 'KW' '974';\n"); - sb.append("KW975 : 'KW' '975';\n"); - sb.append("KW976 : 'KW' '976';\n"); - sb.append("KW977 : 'KW' '977';\n"); - sb.append("KW978 : 'KW' '978';\n"); - sb.append("KW979 : 'KW' '979';\n"); - sb.append("KW980 : 'KW' '980';\n"); - sb.append("KW981 : 'KW' '981';\n"); - sb.append("KW982 : 'KW' '982';\n"); - sb.append("KW983 : 'KW' '983';\n"); - sb.append("KW984 : 'KW' '984';\n"); - sb.append("KW985 : 'KW' '985';\n"); - sb.append("KW986 : 'KW' '986';\n"); - sb.append("KW987 : 'KW' '987';\n"); - sb.append("KW988 : 'KW' '988';\n"); - sb.append("KW989 : 'KW' '989';\n"); - sb.append("KW990 : 'KW' '990';\n"); - sb.append("KW991 : 'KW' '991';\n"); - sb.append("KW992 : 'KW' '992';\n"); - sb.append("KW993 : 'KW' '993';\n"); - sb.append("KW994 : 'KW' '994';\n"); - sb.append("KW995 : 'KW' '995';\n"); - sb.append("KW996 : 'KW' '996';\n"); - sb.append("KW997 : 'KW' '997';\n"); - sb.append("KW998 : 'KW' '998';\n"); - sb.append("KW999 : 'KW' '999';\n"); - sb.append("KW1000 : 'KW' '1000';\n"); - sb.append("KW1001 : 'KW' '1001';\n"); - sb.append("KW1002 : 'KW' '1002';\n"); - sb.append("KW1003 : 'KW' '1003';\n"); - sb.append("KW1004 : 'KW' '1004';\n"); - sb.append("KW1005 : 'KW' '1005';\n"); - sb.append("KW1006 : 'KW' '1006';\n"); - sb.append("KW1007 : 'KW' '1007';\n"); - sb.append("KW1008 : 'KW' '1008';\n"); - sb.append("KW1009 : 'KW' '1009';\n"); - sb.append("KW1010 : 'KW' '1010';\n"); - sb.append("KW1011 : 'KW' '1011';\n"); - sb.append("KW1012 : 'KW' '1012';\n"); - sb.append("KW1013 : 'KW' '1013';\n"); - sb.append("KW1014 : 'KW' '1014';\n"); - sb.append("KW1015 : 'KW' '1015';\n"); - sb.append("KW1016 : 'KW' '1016';\n"); - sb.append("KW1017 : 'KW' '1017';\n"); - sb.append("KW1018 : 'KW' '1018';\n"); - sb.append("KW1019 : 'KW' '1019';\n"); - sb.append("KW1020 : 'KW' '1020';\n"); - sb.append("KW1021 : 'KW' '1021';\n"); - sb.append("KW1022 : 'KW' '1022';\n"); - sb.append("KW1023 : 'KW' '1023';\n"); - sb.append("KW1024 : 'KW' '1024';\n"); - sb.append("KW1025 : 'KW' '1025';\n"); - sb.append("KW1026 : 'KW' '1026';\n"); - sb.append("KW1027 : 'KW' '1027';\n"); - sb.append("KW1028 : 'KW' '1028';\n"); - sb.append("KW1029 : 'KW' '1029';\n"); - sb.append("KW1030 : 'KW' '1030';\n"); - sb.append("KW1031 : 'KW' '1031';\n"); - sb.append("KW1032 : 'KW' '1032';\n"); - sb.append("KW1033 : 'KW' '1033';\n"); - sb.append("KW1034 : 'KW' '1034';\n"); - sb.append("KW1035 : 'KW' '1035';\n"); - sb.append("KW1036 : 'KW' '1036';\n"); - sb.append("KW1037 : 'KW' '1037';\n"); - sb.append("KW1038 : 'KW' '1038';\n"); - sb.append("KW1039 : 'KW' '1039';\n"); - sb.append("KW1040 : 'KW' '1040';\n"); - sb.append("KW1041 : 'KW' '1041';\n"); - sb.append("KW1042 : 'KW' '1042';\n"); - sb.append("KW1043 : 'KW' '1043';\n"); - sb.append("KW1044 : 'KW' '1044';\n"); - sb.append("KW1045 : 'KW' '1045';\n"); - sb.append("KW1046 : 'KW' '1046';\n"); - sb.append("KW1047 : 'KW' '1047';\n"); - sb.append("KW1048 : 'KW' '1048';\n"); - sb.append("KW1049 : 'KW' '1049';\n"); - sb.append("KW1050 : 'KW' '1050';\n"); - sb.append("KW1051 : 'KW' '1051';\n"); - sb.append("KW1052 : 'KW' '1052';\n"); - sb.append("KW1053 : 'KW' '1053';\n"); - sb.append("KW1054 : 'KW' '1054';\n"); - sb.append("KW1055 : 'KW' '1055';\n"); - sb.append("KW1056 : 'KW' '1056';\n"); - sb.append("KW1057 : 'KW' '1057';\n"); - sb.append("KW1058 : 'KW' '1058';\n"); - sb.append("KW1059 : 'KW' '1059';\n"); - sb.append("KW1060 : 'KW' '1060';\n"); - sb.append("KW1061 : 'KW' '1061';\n"); - sb.append("KW1062 : 'KW' '1062';\n"); - sb.append("KW1063 : 'KW' '1063';\n"); - sb.append("KW1064 : 'KW' '1064';\n"); - sb.append("KW1065 : 'KW' '1065';\n"); - sb.append("KW1066 : 'KW' '1066';\n"); - sb.append("KW1067 : 'KW' '1067';\n"); - sb.append("KW1068 : 'KW' '1068';\n"); - sb.append("KW1069 : 'KW' '1069';\n"); - sb.append("KW1070 : 'KW' '1070';\n"); - sb.append("KW1071 : 'KW' '1071';\n"); - sb.append("KW1072 : 'KW' '1072';\n"); - sb.append("KW1073 : 'KW' '1073';\n"); - sb.append("KW1074 : 'KW' '1074';\n"); - sb.append("KW1075 : 'KW' '1075';\n"); - sb.append("KW1076 : 'KW' '1076';\n"); - sb.append("KW1077 : 'KW' '1077';\n"); - sb.append("KW1078 : 'KW' '1078';\n"); - sb.append("KW1079 : 'KW' '1079';\n"); - sb.append("KW1080 : 'KW' '1080';\n"); - sb.append("KW1081 : 'KW' '1081';\n"); - sb.append("KW1082 : 'KW' '1082';\n"); - sb.append("KW1083 : 'KW' '1083';\n"); - sb.append("KW1084 : 'KW' '1084';\n"); - sb.append("KW1085 : 'KW' '1085';\n"); - sb.append("KW1086 : 'KW' '1086';\n"); - sb.append("KW1087 : 'KW' '1087';\n"); - sb.append("KW1088 : 'KW' '1088';\n"); - sb.append("KW1089 : 'KW' '1089';\n"); - sb.append("KW1090 : 'KW' '1090';\n"); - sb.append("KW1091 : 'KW' '1091';\n"); - sb.append("KW1092 : 'KW' '1092';\n"); - sb.append("KW1093 : 'KW' '1093';\n"); - sb.append("KW1094 : 'KW' '1094';\n"); - sb.append("KW1095 : 'KW' '1095';\n"); - sb.append("KW1096 : 'KW' '1096';\n"); - sb.append("KW1097 : 'KW' '1097';\n"); - sb.append("KW1098 : 'KW' '1098';\n"); - sb.append("KW1099 : 'KW' '1099';\n"); - sb.append("KW1100 : 'KW' '1100';\n"); - sb.append("KW1101 : 'KW' '1101';\n"); - sb.append("KW1102 : 'KW' '1102';\n"); - sb.append("KW1103 : 'KW' '1103';\n"); - sb.append("KW1104 : 'KW' '1104';\n"); - sb.append("KW1105 : 'KW' '1105';\n"); - sb.append("KW1106 : 'KW' '1106';\n"); - sb.append("KW1107 : 'KW' '1107';\n"); - sb.append("KW1108 : 'KW' '1108';\n"); - sb.append("KW1109 : 'KW' '1109';\n"); - sb.append("KW1110 : 'KW' '1110';\n"); - sb.append("KW1111 : 'KW' '1111';\n"); - sb.append("KW1112 : 'KW' '1112';\n"); - sb.append("KW1113 : 'KW' '1113';\n"); - sb.append("KW1114 : 'KW' '1114';\n"); - sb.append("KW1115 : 'KW' '1115';\n"); - sb.append("KW1116 : 'KW' '1116';\n"); - sb.append("KW1117 : 'KW' '1117';\n"); - sb.append("KW1118 : 'KW' '1118';\n"); - sb.append("KW1119 : 'KW' '1119';\n"); - sb.append("KW1120 : 'KW' '1120';\n"); - sb.append("KW1121 : 'KW' '1121';\n"); - sb.append("KW1122 : 'KW' '1122';\n"); - sb.append("KW1123 : 'KW' '1123';\n"); - sb.append("KW1124 : 'KW' '1124';\n"); - sb.append("KW1125 : 'KW' '1125';\n"); - sb.append("KW1126 : 'KW' '1126';\n"); - sb.append("KW1127 : 'KW' '1127';\n"); - sb.append("KW1128 : 'KW' '1128';\n"); - sb.append("KW1129 : 'KW' '1129';\n"); - sb.append("KW1130 : 'KW' '1130';\n"); - sb.append("KW1131 : 'KW' '1131';\n"); - sb.append("KW1132 : 'KW' '1132';\n"); - sb.append("KW1133 : 'KW' '1133';\n"); - sb.append("KW1134 : 'KW' '1134';\n"); - sb.append("KW1135 : 'KW' '1135';\n"); - sb.append("KW1136 : 'KW' '1136';\n"); - sb.append("KW1137 : 'KW' '1137';\n"); - sb.append("KW1138 : 'KW' '1138';\n"); - sb.append("KW1139 : 'KW' '1139';\n"); - sb.append("KW1140 : 'KW' '1140';\n"); - sb.append("KW1141 : 'KW' '1141';\n"); - sb.append("KW1142 : 'KW' '1142';\n"); - sb.append("KW1143 : 'KW' '1143';\n"); - sb.append("KW1144 : 'KW' '1144';\n"); - sb.append("KW1145 : 'KW' '1145';\n"); - sb.append("KW1146 : 'KW' '1146';\n"); - sb.append("KW1147 : 'KW' '1147';\n"); - sb.append("KW1148 : 'KW' '1148';\n"); - sb.append("KW1149 : 'KW' '1149';\n"); - sb.append("KW1150 : 'KW' '1150';\n"); - sb.append("KW1151 : 'KW' '1151';\n"); - sb.append("KW1152 : 'KW' '1152';\n"); - sb.append("KW1153 : 'KW' '1153';\n"); - sb.append("KW1154 : 'KW' '1154';\n"); - sb.append("KW1155 : 'KW' '1155';\n"); - sb.append("KW1156 : 'KW' '1156';\n"); - sb.append("KW1157 : 'KW' '1157';\n"); - sb.append("KW1158 : 'KW' '1158';\n"); - sb.append("KW1159 : 'KW' '1159';\n"); - sb.append("KW1160 : 'KW' '1160';\n"); - sb.append("KW1161 : 'KW' '1161';\n"); - sb.append("KW1162 : 'KW' '1162';\n"); - sb.append("KW1163 : 'KW' '1163';\n"); - sb.append("KW1164 : 'KW' '1164';\n"); - sb.append("KW1165 : 'KW' '1165';\n"); - sb.append("KW1166 : 'KW' '1166';\n"); - sb.append("KW1167 : 'KW' '1167';\n"); - sb.append("KW1168 : 'KW' '1168';\n"); - sb.append("KW1169 : 'KW' '1169';\n"); - sb.append("KW1170 : 'KW' '1170';\n"); - sb.append("KW1171 : 'KW' '1171';\n"); - sb.append("KW1172 : 'KW' '1172';\n"); - sb.append("KW1173 : 'KW' '1173';\n"); - sb.append("KW1174 : 'KW' '1174';\n"); - sb.append("KW1175 : 'KW' '1175';\n"); - sb.append("KW1176 : 'KW' '1176';\n"); - sb.append("KW1177 : 'KW' '1177';\n"); - sb.append("KW1178 : 'KW' '1178';\n"); - sb.append("KW1179 : 'KW' '1179';\n"); - sb.append("KW1180 : 'KW' '1180';\n"); - sb.append("KW1181 : 'KW' '1181';\n"); - sb.append("KW1182 : 'KW' '1182';\n"); - sb.append("KW1183 : 'KW' '1183';\n"); - sb.append("KW1184 : 'KW' '1184';\n"); - sb.append("KW1185 : 'KW' '1185';\n"); - sb.append("KW1186 : 'KW' '1186';\n"); - sb.append("KW1187 : 'KW' '1187';\n"); - sb.append("KW1188 : 'KW' '1188';\n"); - sb.append("KW1189 : 'KW' '1189';\n"); - sb.append("KW1190 : 'KW' '1190';\n"); - sb.append("KW1191 : 'KW' '1191';\n"); - sb.append("KW1192 : 'KW' '1192';\n"); - sb.append("KW1193 : 'KW' '1193';\n"); - sb.append("KW1194 : 'KW' '1194';\n"); - sb.append("KW1195 : 'KW' '1195';\n"); - sb.append("KW1196 : 'KW' '1196';\n"); - sb.append("KW1197 : 'KW' '1197';\n"); - sb.append("KW1198 : 'KW' '1198';\n"); - sb.append("KW1199 : 'KW' '1199';\n"); - sb.append("KW1200 : 'KW' '1200';\n"); - sb.append("KW1201 : 'KW' '1201';\n"); - sb.append("KW1202 : 'KW' '1202';\n"); - sb.append("KW1203 : 'KW' '1203';\n"); - sb.append("KW1204 : 'KW' '1204';\n"); - sb.append("KW1205 : 'KW' '1205';\n"); - sb.append("KW1206 : 'KW' '1206';\n"); - sb.append("KW1207 : 'KW' '1207';\n"); - sb.append("KW1208 : 'KW' '1208';\n"); - sb.append("KW1209 : 'KW' '1209';\n"); - sb.append("KW1210 : 'KW' '1210';\n"); - sb.append("KW1211 : 'KW' '1211';\n"); - sb.append("KW1212 : 'KW' '1212';\n"); - sb.append("KW1213 : 'KW' '1213';\n"); - sb.append("KW1214 : 'KW' '1214';\n"); - sb.append("KW1215 : 'KW' '1215';\n"); - sb.append("KW1216 : 'KW' '1216';\n"); - sb.append("KW1217 : 'KW' '1217';\n"); - sb.append("KW1218 : 'KW' '1218';\n"); - sb.append("KW1219 : 'KW' '1219';\n"); - sb.append("KW1220 : 'KW' '1220';\n"); - sb.append("KW1221 : 'KW' '1221';\n"); - sb.append("KW1222 : 'KW' '1222';\n"); - sb.append("KW1223 : 'KW' '1223';\n"); - sb.append("KW1224 : 'KW' '1224';\n"); - sb.append("KW1225 : 'KW' '1225';\n"); - sb.append("KW1226 : 'KW' '1226';\n"); - sb.append("KW1227 : 'KW' '1227';\n"); - sb.append("KW1228 : 'KW' '1228';\n"); - sb.append("KW1229 : 'KW' '1229';\n"); - sb.append("KW1230 : 'KW' '1230';\n"); - sb.append("KW1231 : 'KW' '1231';\n"); - sb.append("KW1232 : 'KW' '1232';\n"); - sb.append("KW1233 : 'KW' '1233';\n"); - sb.append("KW1234 : 'KW' '1234';\n"); - sb.append("KW1235 : 'KW' '1235';\n"); - sb.append("KW1236 : 'KW' '1236';\n"); - sb.append("KW1237 : 'KW' '1237';\n"); - sb.append("KW1238 : 'KW' '1238';\n"); - sb.append("KW1239 : 'KW' '1239';\n"); - sb.append("KW1240 : 'KW' '1240';\n"); - sb.append("KW1241 : 'KW' '1241';\n"); - sb.append("KW1242 : 'KW' '1242';\n"); - sb.append("KW1243 : 'KW' '1243';\n"); - sb.append("KW1244 : 'KW' '1244';\n"); - sb.append("KW1245 : 'KW' '1245';\n"); - sb.append("KW1246 : 'KW' '1246';\n"); - sb.append("KW1247 : 'KW' '1247';\n"); - sb.append("KW1248 : 'KW' '1248';\n"); - sb.append("KW1249 : 'KW' '1249';\n"); - sb.append("KW1250 : 'KW' '1250';\n"); - sb.append("KW1251 : 'KW' '1251';\n"); - sb.append("KW1252 : 'KW' '1252';\n"); - sb.append("KW1253 : 'KW' '1253';\n"); - sb.append("KW1254 : 'KW' '1254';\n"); - sb.append("KW1255 : 'KW' '1255';\n"); - sb.append("KW1256 : 'KW' '1256';\n"); - sb.append("KW1257 : 'KW' '1257';\n"); - sb.append("KW1258 : 'KW' '1258';\n"); - sb.append("KW1259 : 'KW' '1259';\n"); - sb.append("KW1260 : 'KW' '1260';\n"); - sb.append("KW1261 : 'KW' '1261';\n"); - sb.append("KW1262 : 'KW' '1262';\n"); - sb.append("KW1263 : 'KW' '1263';\n"); - sb.append("KW1264 : 'KW' '1264';\n"); - sb.append("KW1265 : 'KW' '1265';\n"); - sb.append("KW1266 : 'KW' '1266';\n"); - sb.append("KW1267 : 'KW' '1267';\n"); - sb.append("KW1268 : 'KW' '1268';\n"); - sb.append("KW1269 : 'KW' '1269';\n"); - sb.append("KW1270 : 'KW' '1270';\n"); - sb.append("KW1271 : 'KW' '1271';\n"); - sb.append("KW1272 : 'KW' '1272';\n"); - sb.append("KW1273 : 'KW' '1273';\n"); - sb.append("KW1274 : 'KW' '1274';\n"); - sb.append("KW1275 : 'KW' '1275';\n"); - sb.append("KW1276 : 'KW' '1276';\n"); - sb.append("KW1277 : 'KW' '1277';\n"); - sb.append("KW1278 : 'KW' '1278';\n"); - sb.append("KW1279 : 'KW' '1279';\n"); - sb.append("KW1280 : 'KW' '1280';\n"); - sb.append("KW1281 : 'KW' '1281';\n"); - sb.append("KW1282 : 'KW' '1282';\n"); - sb.append("KW1283 : 'KW' '1283';\n"); - sb.append("KW1284 : 'KW' '1284';\n"); - sb.append("KW1285 : 'KW' '1285';\n"); - sb.append("KW1286 : 'KW' '1286';\n"); - sb.append("KW1287 : 'KW' '1287';\n"); - sb.append("KW1288 : 'KW' '1288';\n"); - sb.append("KW1289 : 'KW' '1289';\n"); - sb.append("KW1290 : 'KW' '1290';\n"); - sb.append("KW1291 : 'KW' '1291';\n"); - sb.append("KW1292 : 'KW' '1292';\n"); - sb.append("KW1293 : 'KW' '1293';\n"); - sb.append("KW1294 : 'KW' '1294';\n"); - sb.append("KW1295 : 'KW' '1295';\n"); - sb.append("KW1296 : 'KW' '1296';\n"); - sb.append("KW1297 : 'KW' '1297';\n"); - sb.append("KW1298 : 'KW' '1298';\n"); - sb.append("KW1299 : 'KW' '1299';\n"); - sb.append("KW1300 : 'KW' '1300';\n"); - sb.append("KW1301 : 'KW' '1301';\n"); - sb.append("KW1302 : 'KW' '1302';\n"); - sb.append("KW1303 : 'KW' '1303';\n"); - sb.append("KW1304 : 'KW' '1304';\n"); - sb.append("KW1305 : 'KW' '1305';\n"); - sb.append("KW1306 : 'KW' '1306';\n"); - sb.append("KW1307 : 'KW' '1307';\n"); - sb.append("KW1308 : 'KW' '1308';\n"); - sb.append("KW1309 : 'KW' '1309';\n"); - sb.append("KW1310 : 'KW' '1310';\n"); - sb.append("KW1311 : 'KW' '1311';\n"); - sb.append("KW1312 : 'KW' '1312';\n"); - sb.append("KW1313 : 'KW' '1313';\n"); - sb.append("KW1314 : 'KW' '1314';\n"); - sb.append("KW1315 : 'KW' '1315';\n"); - sb.append("KW1316 : 'KW' '1316';\n"); - sb.append("KW1317 : 'KW' '1317';\n"); - sb.append("KW1318 : 'KW' '1318';\n"); - sb.append("KW1319 : 'KW' '1319';\n"); - sb.append("KW1320 : 'KW' '1320';\n"); - sb.append("KW1321 : 'KW' '1321';\n"); - sb.append("KW1322 : 'KW' '1322';\n"); - sb.append("KW1323 : 'KW' '1323';\n"); - sb.append("KW1324 : 'KW' '1324';\n"); - sb.append("KW1325 : 'KW' '1325';\n"); - sb.append("KW1326 : 'KW' '1326';\n"); - sb.append("KW1327 : 'KW' '1327';\n"); - sb.append("KW1328 : 'KW' '1328';\n"); - sb.append("KW1329 : 'KW' '1329';\n"); - sb.append("KW1330 : 'KW' '1330';\n"); - sb.append("KW1331 : 'KW' '1331';\n"); - sb.append("KW1332 : 'KW' '1332';\n"); - sb.append("KW1333 : 'KW' '1333';\n"); - sb.append("KW1334 : 'KW' '1334';\n"); - sb.append("KW1335 : 'KW' '1335';\n"); - sb.append("KW1336 : 'KW' '1336';\n"); - sb.append("KW1337 : 'KW' '1337';\n"); - sb.append("KW1338 : 'KW' '1338';\n"); - sb.append("KW1339 : 'KW' '1339';\n"); - sb.append("KW1340 : 'KW' '1340';\n"); - sb.append("KW1341 : 'KW' '1341';\n"); - sb.append("KW1342 : 'KW' '1342';\n"); - sb.append("KW1343 : 'KW' '1343';\n"); - sb.append("KW1344 : 'KW' '1344';\n"); - sb.append("KW1345 : 'KW' '1345';\n"); - sb.append("KW1346 : 'KW' '1346';\n"); - sb.append("KW1347 : 'KW' '1347';\n"); - sb.append("KW1348 : 'KW' '1348';\n"); - sb.append("KW1349 : 'KW' '1349';\n"); - sb.append("KW1350 : 'KW' '1350';\n"); - sb.append("KW1351 : 'KW' '1351';\n"); - sb.append("KW1352 : 'KW' '1352';\n"); - sb.append("KW1353 : 'KW' '1353';\n"); - sb.append("KW1354 : 'KW' '1354';\n"); - sb.append("KW1355 : 'KW' '1355';\n"); - sb.append("KW1356 : 'KW' '1356';\n"); - sb.append("KW1357 : 'KW' '1357';\n"); - sb.append("KW1358 : 'KW' '1358';\n"); - sb.append("KW1359 : 'KW' '1359';\n"); - sb.append("KW1360 : 'KW' '1360';\n"); - sb.append("KW1361 : 'KW' '1361';\n"); - sb.append("KW1362 : 'KW' '1362';\n"); - sb.append("KW1363 : 'KW' '1363';\n"); - sb.append("KW1364 : 'KW' '1364';\n"); - sb.append("KW1365 : 'KW' '1365';\n"); - sb.append("KW1366 : 'KW' '1366';\n"); - sb.append("KW1367 : 'KW' '1367';\n"); - sb.append("KW1368 : 'KW' '1368';\n"); - sb.append("KW1369 : 'KW' '1369';\n"); - sb.append("KW1370 : 'KW' '1370';\n"); - sb.append("KW1371 : 'KW' '1371';\n"); - sb.append("KW1372 : 'KW' '1372';\n"); - sb.append("KW1373 : 'KW' '1373';\n"); - sb.append("KW1374 : 'KW' '1374';\n"); - sb.append("KW1375 : 'KW' '1375';\n"); - sb.append("KW1376 : 'KW' '1376';\n"); - sb.append("KW1377 : 'KW' '1377';\n"); - sb.append("KW1378 : 'KW' '1378';\n"); - sb.append("KW1379 : 'KW' '1379';\n"); - sb.append("KW1380 : 'KW' '1380';\n"); - sb.append("KW1381 : 'KW' '1381';\n"); - sb.append("KW1382 : 'KW' '1382';\n"); - sb.append("KW1383 : 'KW' '1383';\n"); - sb.append("KW1384 : 'KW' '1384';\n"); - sb.append("KW1385 : 'KW' '1385';\n"); - sb.append("KW1386 : 'KW' '1386';\n"); - sb.append("KW1387 : 'KW' '1387';\n"); - sb.append("KW1388 : 'KW' '1388';\n"); - sb.append("KW1389 : 'KW' '1389';\n"); - sb.append("KW1390 : 'KW' '1390';\n"); - sb.append("KW1391 : 'KW' '1391';\n"); - sb.append("KW1392 : 'KW' '1392';\n"); - sb.append("KW1393 : 'KW' '1393';\n"); - sb.append("KW1394 : 'KW' '1394';\n"); - sb.append("KW1395 : 'KW' '1395';\n"); - sb.append("KW1396 : 'KW' '1396';\n"); - sb.append("KW1397 : 'KW' '1397';\n"); - sb.append("KW1398 : 'KW' '1398';\n"); - sb.append("KW1399 : 'KW' '1399';\n"); - sb.append("KW1400 : 'KW' '1400';\n"); - sb.append("KW1401 : 'KW' '1401';\n"); - sb.append("KW1402 : 'KW' '1402';\n"); - sb.append("KW1403 : 'KW' '1403';\n"); - sb.append("KW1404 : 'KW' '1404';\n"); - sb.append("KW1405 : 'KW' '1405';\n"); - sb.append("KW1406 : 'KW' '1406';\n"); - sb.append("KW1407 : 'KW' '1407';\n"); - sb.append("KW1408 : 'KW' '1408';\n"); - sb.append("KW1409 : 'KW' '1409';\n"); - sb.append("KW1410 : 'KW' '1410';\n"); - sb.append("KW1411 : 'KW' '1411';\n"); - sb.append("KW1412 : 'KW' '1412';\n"); - sb.append("KW1413 : 'KW' '1413';\n"); - sb.append("KW1414 : 'KW' '1414';\n"); - sb.append("KW1415 : 'KW' '1415';\n"); - sb.append("KW1416 : 'KW' '1416';\n"); - sb.append("KW1417 : 'KW' '1417';\n"); - sb.append("KW1418 : 'KW' '1418';\n"); - sb.append("KW1419 : 'KW' '1419';\n"); - sb.append("KW1420 : 'KW' '1420';\n"); - sb.append("KW1421 : 'KW' '1421';\n"); - sb.append("KW1422 : 'KW' '1422';\n"); - sb.append("KW1423 : 'KW' '1423';\n"); - sb.append("KW1424 : 'KW' '1424';\n"); - sb.append("KW1425 : 'KW' '1425';\n"); - sb.append("KW1426 : 'KW' '1426';\n"); - sb.append("KW1427 : 'KW' '1427';\n"); - sb.append("KW1428 : 'KW' '1428';\n"); - sb.append("KW1429 : 'KW' '1429';\n"); - sb.append("KW1430 : 'KW' '1430';\n"); - sb.append("KW1431 : 'KW' '1431';\n"); - sb.append("KW1432 : 'KW' '1432';\n"); - sb.append("KW1433 : 'KW' '1433';\n"); - sb.append("KW1434 : 'KW' '1434';\n"); - sb.append("KW1435 : 'KW' '1435';\n"); - sb.append("KW1436 : 'KW' '1436';\n"); - sb.append("KW1437 : 'KW' '1437';\n"); - sb.append("KW1438 : 'KW' '1438';\n"); - sb.append("KW1439 : 'KW' '1439';\n"); - sb.append("KW1440 : 'KW' '1440';\n"); - sb.append("KW1441 : 'KW' '1441';\n"); - sb.append("KW1442 : 'KW' '1442';\n"); - sb.append("KW1443 : 'KW' '1443';\n"); - sb.append("KW1444 : 'KW' '1444';\n"); - sb.append("KW1445 : 'KW' '1445';\n"); - sb.append("KW1446 : 'KW' '1446';\n"); - sb.append("KW1447 : 'KW' '1447';\n"); - sb.append("KW1448 : 'KW' '1448';\n"); - sb.append("KW1449 : 'KW' '1449';\n"); - sb.append("KW1450 : 'KW' '1450';\n"); - sb.append("KW1451 : 'KW' '1451';\n"); - sb.append("KW1452 : 'KW' '1452';\n"); - sb.append("KW1453 : 'KW' '1453';\n"); - sb.append("KW1454 : 'KW' '1454';\n"); - sb.append("KW1455 : 'KW' '1455';\n"); - sb.append("KW1456 : 'KW' '1456';\n"); - sb.append("KW1457 : 'KW' '1457';\n"); - sb.append("KW1458 : 'KW' '1458';\n"); - sb.append("KW1459 : 'KW' '1459';\n"); - sb.append("KW1460 : 'KW' '1460';\n"); - sb.append("KW1461 : 'KW' '1461';\n"); - sb.append("KW1462 : 'KW' '1462';\n"); - sb.append("KW1463 : 'KW' '1463';\n"); - sb.append("KW1464 : 'KW' '1464';\n"); - sb.append("KW1465 : 'KW' '1465';\n"); - sb.append("KW1466 : 'KW' '1466';\n"); - sb.append("KW1467 : 'KW' '1467';\n"); - sb.append("KW1468 : 'KW' '1468';\n"); - sb.append("KW1469 : 'KW' '1469';\n"); - sb.append("KW1470 : 'KW' '1470';\n"); - sb.append("KW1471 : 'KW' '1471';\n"); - sb.append("KW1472 : 'KW' '1472';\n"); - sb.append("KW1473 : 'KW' '1473';\n"); - sb.append("KW1474 : 'KW' '1474';\n"); - sb.append("KW1475 : 'KW' '1475';\n"); - sb.append("KW1476 : 'KW' '1476';\n"); - sb.append("KW1477 : 'KW' '1477';\n"); - sb.append("KW1478 : 'KW' '1478';\n"); - sb.append("KW1479 : 'KW' '1479';\n"); - sb.append("KW1480 : 'KW' '1480';\n"); - sb.append("KW1481 : 'KW' '1481';\n"); - sb.append("KW1482 : 'KW' '1482';\n"); - sb.append("KW1483 : 'KW' '1483';\n"); - sb.append("KW1484 : 'KW' '1484';\n"); - sb.append("KW1485 : 'KW' '1485';\n"); - sb.append("KW1486 : 'KW' '1486';\n"); - sb.append("KW1487 : 'KW' '1487';\n"); - sb.append("KW1488 : 'KW' '1488';\n"); - sb.append("KW1489 : 'KW' '1489';\n"); - sb.append("KW1490 : 'KW' '1490';\n"); - sb.append("KW1491 : 'KW' '1491';\n"); - sb.append("KW1492 : 'KW' '1492';\n"); - sb.append("KW1493 : 'KW' '1493';\n"); - sb.append("KW1494 : 'KW' '1494';\n"); - sb.append("KW1495 : 'KW' '1495';\n"); - sb.append("KW1496 : 'KW' '1496';\n"); - sb.append("KW1497 : 'KW' '1497';\n"); - sb.append("KW1498 : 'KW' '1498';\n"); - sb.append("KW1499 : 'KW' '1499';\n"); - sb.append("KW1500 : 'KW' '1500';\n"); - sb.append("KW1501 : 'KW' '1501';\n"); - sb.append("KW1502 : 'KW' '1502';\n"); - sb.append("KW1503 : 'KW' '1503';\n"); - sb.append("KW1504 : 'KW' '1504';\n"); - sb.append("KW1505 : 'KW' '1505';\n"); - sb.append("KW1506 : 'KW' '1506';\n"); - sb.append("KW1507 : 'KW' '1507';\n"); - sb.append("KW1508 : 'KW' '1508';\n"); - sb.append("KW1509 : 'KW' '1509';\n"); - sb.append("KW1510 : 'KW' '1510';\n"); - sb.append("KW1511 : 'KW' '1511';\n"); - sb.append("KW1512 : 'KW' '1512';\n"); - sb.append("KW1513 : 'KW' '1513';\n"); - sb.append("KW1514 : 'KW' '1514';\n"); - sb.append("KW1515 : 'KW' '1515';\n"); - sb.append("KW1516 : 'KW' '1516';\n"); - sb.append("KW1517 : 'KW' '1517';\n"); - sb.append("KW1518 : 'KW' '1518';\n"); - sb.append("KW1519 : 'KW' '1519';\n"); - sb.append("KW1520 : 'KW' '1520';\n"); - sb.append("KW1521 : 'KW' '1521';\n"); - sb.append("KW1522 : 'KW' '1522';\n"); - sb.append("KW1523 : 'KW' '1523';\n"); - sb.append("KW1524 : 'KW' '1524';\n"); - sb.append("KW1525 : 'KW' '1525';\n"); - sb.append("KW1526 : 'KW' '1526';\n"); - sb.append("KW1527 : 'KW' '1527';\n"); - sb.append("KW1528 : 'KW' '1528';\n"); - sb.append("KW1529 : 'KW' '1529';\n"); - sb.append("KW1530 : 'KW' '1530';\n"); - sb.append("KW1531 : 'KW' '1531';\n"); - sb.append("KW1532 : 'KW' '1532';\n"); - sb.append("KW1533 : 'KW' '1533';\n"); - sb.append("KW1534 : 'KW' '1534';\n"); - sb.append("KW1535 : 'KW' '1535';\n"); - sb.append("KW1536 : 'KW' '1536';\n"); - sb.append("KW1537 : 'KW' '1537';\n"); - sb.append("KW1538 : 'KW' '1538';\n"); - sb.append("KW1539 : 'KW' '1539';\n"); - sb.append("KW1540 : 'KW' '1540';\n"); - sb.append("KW1541 : 'KW' '1541';\n"); - sb.append("KW1542 : 'KW' '1542';\n"); - sb.append("KW1543 : 'KW' '1543';\n"); - sb.append("KW1544 : 'KW' '1544';\n"); - sb.append("KW1545 : 'KW' '1545';\n"); - sb.append("KW1546 : 'KW' '1546';\n"); - sb.append("KW1547 : 'KW' '1547';\n"); - sb.append("KW1548 : 'KW' '1548';\n"); - sb.append("KW1549 : 'KW' '1549';\n"); - sb.append("KW1550 : 'KW' '1550';\n"); - sb.append("KW1551 : 'KW' '1551';\n"); - sb.append("KW1552 : 'KW' '1552';\n"); - sb.append("KW1553 : 'KW' '1553';\n"); - sb.append("KW1554 : 'KW' '1554';\n"); - sb.append("KW1555 : 'KW' '1555';\n"); - sb.append("KW1556 : 'KW' '1556';\n"); - sb.append("KW1557 : 'KW' '1557';\n"); - sb.append("KW1558 : 'KW' '1558';\n"); - sb.append("KW1559 : 'KW' '1559';\n"); - sb.append("KW1560 : 'KW' '1560';\n"); - sb.append("KW1561 : 'KW' '1561';\n"); - sb.append("KW1562 : 'KW' '1562';\n"); - sb.append("KW1563 : 'KW' '1563';\n"); - sb.append("KW1564 : 'KW' '1564';\n"); - sb.append("KW1565 : 'KW' '1565';\n"); - sb.append("KW1566 : 'KW' '1566';\n"); - sb.append("KW1567 : 'KW' '1567';\n"); - sb.append("KW1568 : 'KW' '1568';\n"); - sb.append("KW1569 : 'KW' '1569';\n"); - sb.append("KW1570 : 'KW' '1570';\n"); - sb.append("KW1571 : 'KW' '1571';\n"); - sb.append("KW1572 : 'KW' '1572';\n"); - sb.append("KW1573 : 'KW' '1573';\n"); - sb.append("KW1574 : 'KW' '1574';\n"); - sb.append("KW1575 : 'KW' '1575';\n"); - sb.append("KW1576 : 'KW' '1576';\n"); - sb.append("KW1577 : 'KW' '1577';\n"); - sb.append("KW1578 : 'KW' '1578';\n"); - sb.append("KW1579 : 'KW' '1579';\n"); - sb.append("KW1580 : 'KW' '1580';\n"); - sb.append("KW1581 : 'KW' '1581';\n"); - sb.append("KW1582 : 'KW' '1582';\n"); - sb.append("KW1583 : 'KW' '1583';\n"); - sb.append("KW1584 : 'KW' '1584';\n"); - sb.append("KW1585 : 'KW' '1585';\n"); - sb.append("KW1586 : 'KW' '1586';\n"); - sb.append("KW1587 : 'KW' '1587';\n"); - sb.append("KW1588 : 'KW' '1588';\n"); - sb.append("KW1589 : 'KW' '1589';\n"); - sb.append("KW1590 : 'KW' '1590';\n"); - sb.append("KW1591 : 'KW' '1591';\n"); - sb.append("KW1592 : 'KW' '1592';\n"); - sb.append("KW1593 : 'KW' '1593';\n"); - sb.append("KW1594 : 'KW' '1594';\n"); - sb.append("KW1595 : 'KW' '1595';\n"); - sb.append("KW1596 : 'KW' '1596';\n"); - sb.append("KW1597 : 'KW' '1597';\n"); - sb.append("KW1598 : 'KW' '1598';\n"); - sb.append("KW1599 : 'KW' '1599';\n"); - sb.append("KW1600 : 'KW' '1600';\n"); - sb.append("KW1601 : 'KW' '1601';\n"); - sb.append("KW1602 : 'KW' '1602';\n"); - sb.append("KW1603 : 'KW' '1603';\n"); - sb.append("KW1604 : 'KW' '1604';\n"); - sb.append("KW1605 : 'KW' '1605';\n"); - sb.append("KW1606 : 'KW' '1606';\n"); - sb.append("KW1607 : 'KW' '1607';\n"); - sb.append("KW1608 : 'KW' '1608';\n"); - sb.append("KW1609 : 'KW' '1609';\n"); - sb.append("KW1610 : 'KW' '1610';\n"); - sb.append("KW1611 : 'KW' '1611';\n"); - sb.append("KW1612 : 'KW' '1612';\n"); - sb.append("KW1613 : 'KW' '1613';\n"); - sb.append("KW1614 : 'KW' '1614';\n"); - sb.append("KW1615 : 'KW' '1615';\n"); - sb.append("KW1616 : 'KW' '1616';\n"); - sb.append("KW1617 : 'KW' '1617';\n"); - sb.append("KW1618 : 'KW' '1618';\n"); - sb.append("KW1619 : 'KW' '1619';\n"); - sb.append("KW1620 : 'KW' '1620';\n"); - sb.append("KW1621 : 'KW' '1621';\n"); - sb.append("KW1622 : 'KW' '1622';\n"); - sb.append("KW1623 : 'KW' '1623';\n"); - sb.append("KW1624 : 'KW' '1624';\n"); - sb.append("KW1625 : 'KW' '1625';\n"); - sb.append("KW1626 : 'KW' '1626';\n"); - sb.append("KW1627 : 'KW' '1627';\n"); - sb.append("KW1628 : 'KW' '1628';\n"); - sb.append("KW1629 : 'KW' '1629';\n"); - sb.append("KW1630 : 'KW' '1630';\n"); - sb.append("KW1631 : 'KW' '1631';\n"); - sb.append("KW1632 : 'KW' '1632';\n"); - sb.append("KW1633 : 'KW' '1633';\n"); - sb.append("KW1634 : 'KW' '1634';\n"); - sb.append("KW1635 : 'KW' '1635';\n"); - sb.append("KW1636 : 'KW' '1636';\n"); - sb.append("KW1637 : 'KW' '1637';\n"); - sb.append("KW1638 : 'KW' '1638';\n"); - sb.append("KW1639 : 'KW' '1639';\n"); - sb.append("KW1640 : 'KW' '1640';\n"); - sb.append("KW1641 : 'KW' '1641';\n"); - sb.append("KW1642 : 'KW' '1642';\n"); - sb.append("KW1643 : 'KW' '1643';\n"); - sb.append("KW1644 : 'KW' '1644';\n"); - sb.append("KW1645 : 'KW' '1645';\n"); - sb.append("KW1646 : 'KW' '1646';\n"); - sb.append("KW1647 : 'KW' '1647';\n"); - sb.append("KW1648 : 'KW' '1648';\n"); - sb.append("KW1649 : 'KW' '1649';\n"); - sb.append("KW1650 : 'KW' '1650';\n"); - sb.append("KW1651 : 'KW' '1651';\n"); - sb.append("KW1652 : 'KW' '1652';\n"); - sb.append("KW1653 : 'KW' '1653';\n"); - sb.append("KW1654 : 'KW' '1654';\n"); - sb.append("KW1655 : 'KW' '1655';\n"); - sb.append("KW1656 : 'KW' '1656';\n"); - sb.append("KW1657 : 'KW' '1657';\n"); - sb.append("KW1658 : 'KW' '1658';\n"); - sb.append("KW1659 : 'KW' '1659';\n"); - sb.append("KW1660 : 'KW' '1660';\n"); - sb.append("KW1661 : 'KW' '1661';\n"); - sb.append("KW1662 : 'KW' '1662';\n"); - sb.append("KW1663 : 'KW' '1663';\n"); - sb.append("KW1664 : 'KW' '1664';\n"); - sb.append("KW1665 : 'KW' '1665';\n"); - sb.append("KW1666 : 'KW' '1666';\n"); - sb.append("KW1667 : 'KW' '1667';\n"); - sb.append("KW1668 : 'KW' '1668';\n"); - sb.append("KW1669 : 'KW' '1669';\n"); - sb.append("KW1670 : 'KW' '1670';\n"); - sb.append("KW1671 : 'KW' '1671';\n"); - sb.append("KW1672 : 'KW' '1672';\n"); - sb.append("KW1673 : 'KW' '1673';\n"); - sb.append("KW1674 : 'KW' '1674';\n"); - sb.append("KW1675 : 'KW' '1675';\n"); - sb.append("KW1676 : 'KW' '1676';\n"); - sb.append("KW1677 : 'KW' '1677';\n"); - sb.append("KW1678 : 'KW' '1678';\n"); - sb.append("KW1679 : 'KW' '1679';\n"); - sb.append("KW1680 : 'KW' '1680';\n"); - sb.append("KW1681 : 'KW' '1681';\n"); - sb.append("KW1682 : 'KW' '1682';\n"); - sb.append("KW1683 : 'KW' '1683';\n"); - sb.append("KW1684 : 'KW' '1684';\n"); - sb.append("KW1685 : 'KW' '1685';\n"); - sb.append("KW1686 : 'KW' '1686';\n"); - sb.append("KW1687 : 'KW' '1687';\n"); - sb.append("KW1688 : 'KW' '1688';\n"); - sb.append("KW1689 : 'KW' '1689';\n"); - sb.append("KW1690 : 'KW' '1690';\n"); - sb.append("KW1691 : 'KW' '1691';\n"); - sb.append("KW1692 : 'KW' '1692';\n"); - sb.append("KW1693 : 'KW' '1693';\n"); - sb.append("KW1694 : 'KW' '1694';\n"); - sb.append("KW1695 : 'KW' '1695';\n"); - sb.append("KW1696 : 'KW' '1696';\n"); - sb.append("KW1697 : 'KW' '1697';\n"); - sb.append("KW1698 : 'KW' '1698';\n"); - sb.append("KW1699 : 'KW' '1699';\n"); - sb.append("KW1700 : 'KW' '1700';\n"); - sb.append("KW1701 : 'KW' '1701';\n"); - sb.append("KW1702 : 'KW' '1702';\n"); - sb.append("KW1703 : 'KW' '1703';\n"); - sb.append("KW1704 : 'KW' '1704';\n"); - sb.append("KW1705 : 'KW' '1705';\n"); - sb.append("KW1706 : 'KW' '1706';\n"); - sb.append("KW1707 : 'KW' '1707';\n"); - sb.append("KW1708 : 'KW' '1708';\n"); - sb.append("KW1709 : 'KW' '1709';\n"); - sb.append("KW1710 : 'KW' '1710';\n"); - sb.append("KW1711 : 'KW' '1711';\n"); - sb.append("KW1712 : 'KW' '1712';\n"); - sb.append("KW1713 : 'KW' '1713';\n"); - sb.append("KW1714 : 'KW' '1714';\n"); - sb.append("KW1715 : 'KW' '1715';\n"); - sb.append("KW1716 : 'KW' '1716';\n"); - sb.append("KW1717 : 'KW' '1717';\n"); - sb.append("KW1718 : 'KW' '1718';\n"); - sb.append("KW1719 : 'KW' '1719';\n"); - sb.append("KW1720 : 'KW' '1720';\n"); - sb.append("KW1721 : 'KW' '1721';\n"); - sb.append("KW1722 : 'KW' '1722';\n"); - sb.append("KW1723 : 'KW' '1723';\n"); - sb.append("KW1724 : 'KW' '1724';\n"); - sb.append("KW1725 : 'KW' '1725';\n"); - sb.append("KW1726 : 'KW' '1726';\n"); - sb.append("KW1727 : 'KW' '1727';\n"); - sb.append("KW1728 : 'KW' '1728';\n"); - sb.append("KW1729 : 'KW' '1729';\n"); - sb.append("KW1730 : 'KW' '1730';\n"); - sb.append("KW1731 : 'KW' '1731';\n"); - sb.append("KW1732 : 'KW' '1732';\n"); - sb.append("KW1733 : 'KW' '1733';\n"); - sb.append("KW1734 : 'KW' '1734';\n"); - sb.append("KW1735 : 'KW' '1735';\n"); - sb.append("KW1736 : 'KW' '1736';\n"); - sb.append("KW1737 : 'KW' '1737';\n"); - sb.append("KW1738 : 'KW' '1738';\n"); - sb.append("KW1739 : 'KW' '1739';\n"); - sb.append("KW1740 : 'KW' '1740';\n"); - sb.append("KW1741 : 'KW' '1741';\n"); - sb.append("KW1742 : 'KW' '1742';\n"); - sb.append("KW1743 : 'KW' '1743';\n"); - sb.append("KW1744 : 'KW' '1744';\n"); - sb.append("KW1745 : 'KW' '1745';\n"); - sb.append("KW1746 : 'KW' '1746';\n"); - sb.append("KW1747 : 'KW' '1747';\n"); - sb.append("KW1748 : 'KW' '1748';\n"); - sb.append("KW1749 : 'KW' '1749';\n"); - sb.append("KW1750 : 'KW' '1750';\n"); - sb.append("KW1751 : 'KW' '1751';\n"); - sb.append("KW1752 : 'KW' '1752';\n"); - sb.append("KW1753 : 'KW' '1753';\n"); - sb.append("KW1754 : 'KW' '1754';\n"); - sb.append("KW1755 : 'KW' '1755';\n"); - sb.append("KW1756 : 'KW' '1756';\n"); - sb.append("KW1757 : 'KW' '1757';\n"); - sb.append("KW1758 : 'KW' '1758';\n"); - sb.append("KW1759 : 'KW' '1759';\n"); - sb.append("KW1760 : 'KW' '1760';\n"); - sb.append("KW1761 : 'KW' '1761';\n"); - sb.append("KW1762 : 'KW' '1762';\n"); - sb.append("KW1763 : 'KW' '1763';\n"); - sb.append("KW1764 : 'KW' '1764';\n"); - sb.append("KW1765 : 'KW' '1765';\n"); - sb.append("KW1766 : 'KW' '1766';\n"); - sb.append("KW1767 : 'KW' '1767';\n"); - sb.append("KW1768 : 'KW' '1768';\n"); - sb.append("KW1769 : 'KW' '1769';\n"); - sb.append("KW1770 : 'KW' '1770';\n"); - sb.append("KW1771 : 'KW' '1771';\n"); - sb.append("KW1772 : 'KW' '1772';\n"); - sb.append("KW1773 : 'KW' '1773';\n"); - sb.append("KW1774 : 'KW' '1774';\n"); - sb.append("KW1775 : 'KW' '1775';\n"); - sb.append("KW1776 : 'KW' '1776';\n"); - sb.append("KW1777 : 'KW' '1777';\n"); - sb.append("KW1778 : 'KW' '1778';\n"); - sb.append("KW1779 : 'KW' '1779';\n"); - sb.append("KW1780 : 'KW' '1780';\n"); - sb.append("KW1781 : 'KW' '1781';\n"); - sb.append("KW1782 : 'KW' '1782';\n"); - sb.append("KW1783 : 'KW' '1783';\n"); - sb.append("KW1784 : 'KW' '1784';\n"); - sb.append("KW1785 : 'KW' '1785';\n"); - sb.append("KW1786 : 'KW' '1786';\n"); - sb.append("KW1787 : 'KW' '1787';\n"); - sb.append("KW1788 : 'KW' '1788';\n"); - sb.append("KW1789 : 'KW' '1789';\n"); - sb.append("KW1790 : 'KW' '1790';\n"); - sb.append("KW1791 : 'KW' '1791';\n"); - sb.append("KW1792 : 'KW' '1792';\n"); - sb.append("KW1793 : 'KW' '1793';\n"); - sb.append("KW1794 : 'KW' '1794';\n"); - sb.append("KW1795 : 'KW' '1795';\n"); - sb.append("KW1796 : 'KW' '1796';\n"); - sb.append("KW1797 : 'KW' '1797';\n"); - sb.append("KW1798 : 'KW' '1798';\n"); - sb.append("KW1799 : 'KW' '1799';\n"); - sb.append("KW1800 : 'KW' '1800';\n"); - sb.append("KW1801 : 'KW' '1801';\n"); - sb.append("KW1802 : 'KW' '1802';\n"); - sb.append("KW1803 : 'KW' '1803';\n"); - sb.append("KW1804 : 'KW' '1804';\n"); - sb.append("KW1805 : 'KW' '1805';\n"); - sb.append("KW1806 : 'KW' '1806';\n"); - sb.append("KW1807 : 'KW' '1807';\n"); - sb.append("KW1808 : 'KW' '1808';\n"); - sb.append("KW1809 : 'KW' '1809';\n"); - sb.append("KW1810 : 'KW' '1810';\n"); - sb.append("KW1811 : 'KW' '1811';\n"); - sb.append("KW1812 : 'KW' '1812';\n"); - sb.append("KW1813 : 'KW' '1813';\n"); - sb.append("KW1814 : 'KW' '1814';\n"); - sb.append("KW1815 : 'KW' '1815';\n"); - sb.append("KW1816 : 'KW' '1816';\n"); - sb.append("KW1817 : 'KW' '1817';\n"); - sb.append("KW1818 : 'KW' '1818';\n"); - sb.append("KW1819 : 'KW' '1819';\n"); - sb.append("KW1820 : 'KW' '1820';\n"); - sb.append("KW1821 : 'KW' '1821';\n"); - sb.append("KW1822 : 'KW' '1822';\n"); - sb.append("KW1823 : 'KW' '1823';\n"); - sb.append("KW1824 : 'KW' '1824';\n"); - sb.append("KW1825 : 'KW' '1825';\n"); - sb.append("KW1826 : 'KW' '1826';\n"); - sb.append("KW1827 : 'KW' '1827';\n"); - sb.append("KW1828 : 'KW' '1828';\n"); - sb.append("KW1829 : 'KW' '1829';\n"); - sb.append("KW1830 : 'KW' '1830';\n"); - sb.append("KW1831 : 'KW' '1831';\n"); - sb.append("KW1832 : 'KW' '1832';\n"); - sb.append("KW1833 : 'KW' '1833';\n"); - sb.append("KW1834 : 'KW' '1834';\n"); - sb.append("KW1835 : 'KW' '1835';\n"); - sb.append("KW1836 : 'KW' '1836';\n"); - sb.append("KW1837 : 'KW' '1837';\n"); - sb.append("KW1838 : 'KW' '1838';\n"); - sb.append("KW1839 : 'KW' '1839';\n"); - sb.append("KW1840 : 'KW' '1840';\n"); - sb.append("KW1841 : 'KW' '1841';\n"); - sb.append("KW1842 : 'KW' '1842';\n"); - sb.append("KW1843 : 'KW' '1843';\n"); - sb.append("KW1844 : 'KW' '1844';\n"); - sb.append("KW1845 : 'KW' '1845';\n"); - sb.append("KW1846 : 'KW' '1846';\n"); - sb.append("KW1847 : 'KW' '1847';\n"); - sb.append("KW1848 : 'KW' '1848';\n"); - sb.append("KW1849 : 'KW' '1849';\n"); - sb.append("KW1850 : 'KW' '1850';\n"); - sb.append("KW1851 : 'KW' '1851';\n"); - sb.append("KW1852 : 'KW' '1852';\n"); - sb.append("KW1853 : 'KW' '1853';\n"); - sb.append("KW1854 : 'KW' '1854';\n"); - sb.append("KW1855 : 'KW' '1855';\n"); - sb.append("KW1856 : 'KW' '1856';\n"); - sb.append("KW1857 : 'KW' '1857';\n"); - sb.append("KW1858 : 'KW' '1858';\n"); - sb.append("KW1859 : 'KW' '1859';\n"); - sb.append("KW1860 : 'KW' '1860';\n"); - sb.append("KW1861 : 'KW' '1861';\n"); - sb.append("KW1862 : 'KW' '1862';\n"); - sb.append("KW1863 : 'KW' '1863';\n"); - sb.append("KW1864 : 'KW' '1864';\n"); - sb.append("KW1865 : 'KW' '1865';\n"); - sb.append("KW1866 : 'KW' '1866';\n"); - sb.append("KW1867 : 'KW' '1867';\n"); - sb.append("KW1868 : 'KW' '1868';\n"); - sb.append("KW1869 : 'KW' '1869';\n"); - sb.append("KW1870 : 'KW' '1870';\n"); - sb.append("KW1871 : 'KW' '1871';\n"); - sb.append("KW1872 : 'KW' '1872';\n"); - sb.append("KW1873 : 'KW' '1873';\n"); - sb.append("KW1874 : 'KW' '1874';\n"); - sb.append("KW1875 : 'KW' '1875';\n"); - sb.append("KW1876 : 'KW' '1876';\n"); - sb.append("KW1877 : 'KW' '1877';\n"); - sb.append("KW1878 : 'KW' '1878';\n"); - sb.append("KW1879 : 'KW' '1879';\n"); - sb.append("KW1880 : 'KW' '1880';\n"); - sb.append("KW1881 : 'KW' '1881';\n"); - sb.append("KW1882 : 'KW' '1882';\n"); - sb.append("KW1883 : 'KW' '1883';\n"); - sb.append("KW1884 : 'KW' '1884';\n"); - sb.append("KW1885 : 'KW' '1885';\n"); - sb.append("KW1886 : 'KW' '1886';\n"); - sb.append("KW1887 : 'KW' '1887';\n"); - sb.append("KW1888 : 'KW' '1888';\n"); - sb.append("KW1889 : 'KW' '1889';\n"); - sb.append("KW1890 : 'KW' '1890';\n"); - sb.append("KW1891 : 'KW' '1891';\n"); - sb.append("KW1892 : 'KW' '1892';\n"); - sb.append("KW1893 : 'KW' '1893';\n"); - sb.append("KW1894 : 'KW' '1894';\n"); - sb.append("KW1895 : 'KW' '1895';\n"); - sb.append("KW1896 : 'KW' '1896';\n"); - sb.append("KW1897 : 'KW' '1897';\n"); - sb.append("KW1898 : 'KW' '1898';\n"); - sb.append("KW1899 : 'KW' '1899';\n"); - sb.append("KW1900 : 'KW' '1900';\n"); - sb.append("KW1901 : 'KW' '1901';\n"); - sb.append("KW1902 : 'KW' '1902';\n"); - sb.append("KW1903 : 'KW' '1903';\n"); - sb.append("KW1904 : 'KW' '1904';\n"); - sb.append("KW1905 : 'KW' '1905';\n"); - sb.append("KW1906 : 'KW' '1906';\n"); - sb.append("KW1907 : 'KW' '1907';\n"); - sb.append("KW1908 : 'KW' '1908';\n"); - sb.append("KW1909 : 'KW' '1909';\n"); - sb.append("KW1910 : 'KW' '1910';\n"); - sb.append("KW1911 : 'KW' '1911';\n"); - sb.append("KW1912 : 'KW' '1912';\n"); - sb.append("KW1913 : 'KW' '1913';\n"); - sb.append("KW1914 : 'KW' '1914';\n"); - sb.append("KW1915 : 'KW' '1915';\n"); - sb.append("KW1916 : 'KW' '1916';\n"); - sb.append("KW1917 : 'KW' '1917';\n"); - sb.append("KW1918 : 'KW' '1918';\n"); - sb.append("KW1919 : 'KW' '1919';\n"); - sb.append("KW1920 : 'KW' '1920';\n"); - sb.append("KW1921 : 'KW' '1921';\n"); - sb.append("KW1922 : 'KW' '1922';\n"); - sb.append("KW1923 : 'KW' '1923';\n"); - sb.append("KW1924 : 'KW' '1924';\n"); - sb.append("KW1925 : 'KW' '1925';\n"); - sb.append("KW1926 : 'KW' '1926';\n"); - sb.append("KW1927 : 'KW' '1927';\n"); - sb.append("KW1928 : 'KW' '1928';\n"); - sb.append("KW1929 : 'KW' '1929';\n"); - sb.append("KW1930 : 'KW' '1930';\n"); - sb.append("KW1931 : 'KW' '1931';\n"); - sb.append("KW1932 : 'KW' '1932';\n"); - sb.append("KW1933 : 'KW' '1933';\n"); - sb.append("KW1934 : 'KW' '1934';\n"); - sb.append("KW1935 : 'KW' '1935';\n"); - sb.append("KW1936 : 'KW' '1936';\n"); - sb.append("KW1937 : 'KW' '1937';\n"); - sb.append("KW1938 : 'KW' '1938';\n"); - sb.append("KW1939 : 'KW' '1939';\n"); - sb.append("KW1940 : 'KW' '1940';\n"); - sb.append("KW1941 : 'KW' '1941';\n"); - sb.append("KW1942 : 'KW' '1942';\n"); - sb.append("KW1943 : 'KW' '1943';\n"); - sb.append("KW1944 : 'KW' '1944';\n"); - sb.append("KW1945 : 'KW' '1945';\n"); - sb.append("KW1946 : 'KW' '1946';\n"); - sb.append("KW1947 : 'KW' '1947';\n"); - sb.append("KW1948 : 'KW' '1948';\n"); - sb.append("KW1949 : 'KW' '1949';\n"); - sb.append("KW1950 : 'KW' '1950';\n"); - sb.append("KW1951 : 'KW' '1951';\n"); - sb.append("KW1952 : 'KW' '1952';\n"); - sb.append("KW1953 : 'KW' '1953';\n"); - sb.append("KW1954 : 'KW' '1954';\n"); - sb.append("KW1955 : 'KW' '1955';\n"); - sb.append("KW1956 : 'KW' '1956';\n"); - sb.append("KW1957 : 'KW' '1957';\n"); - sb.append("KW1958 : 'KW' '1958';\n"); - sb.append("KW1959 : 'KW' '1959';\n"); - sb.append("KW1960 : 'KW' '1960';\n"); - sb.append("KW1961 : 'KW' '1961';\n"); - sb.append("KW1962 : 'KW' '1962';\n"); - sb.append("KW1963 : 'KW' '1963';\n"); - sb.append("KW1964 : 'KW' '1964';\n"); - sb.append("KW1965 : 'KW' '1965';\n"); - sb.append("KW1966 : 'KW' '1966';\n"); - sb.append("KW1967 : 'KW' '1967';\n"); - sb.append("KW1968 : 'KW' '1968';\n"); - sb.append("KW1969 : 'KW' '1969';\n"); - sb.append("KW1970 : 'KW' '1970';\n"); - sb.append("KW1971 : 'KW' '1971';\n"); - sb.append("KW1972 : 'KW' '1972';\n"); - sb.append("KW1973 : 'KW' '1973';\n"); - sb.append("KW1974 : 'KW' '1974';\n"); - sb.append("KW1975 : 'KW' '1975';\n"); - sb.append("KW1976 : 'KW' '1976';\n"); - sb.append("KW1977 : 'KW' '1977';\n"); - sb.append("KW1978 : 'KW' '1978';\n"); - sb.append("KW1979 : 'KW' '1979';\n"); - sb.append("KW1980 : 'KW' '1980';\n"); - sb.append("KW1981 : 'KW' '1981';\n"); - sb.append("KW1982 : 'KW' '1982';\n"); - sb.append("KW1983 : 'KW' '1983';\n"); - sb.append("KW1984 : 'KW' '1984';\n"); - sb.append("KW1985 : 'KW' '1985';\n"); - sb.append("KW1986 : 'KW' '1986';\n"); - sb.append("KW1987 : 'KW' '1987';\n"); - sb.append("KW1988 : 'KW' '1988';\n"); - sb.append("KW1989 : 'KW' '1989';\n"); - sb.append("KW1990 : 'KW' '1990';\n"); - sb.append("KW1991 : 'KW' '1991';\n"); - sb.append("KW1992 : 'KW' '1992';\n"); - sb.append("KW1993 : 'KW' '1993';\n"); - sb.append("KW1994 : 'KW' '1994';\n"); - sb.append("KW1995 : 'KW' '1995';\n"); - sb.append("KW1996 : 'KW' '1996';\n"); - sb.append("KW1997 : 'KW' '1997';\n"); - sb.append("KW1998 : 'KW' '1998';\n"); - sb.append("KW1999 : 'KW' '1999';\n"); - sb.append("KW2000 : 'KW' '2000';\n"); - sb.append("KW2001 : 'KW' '2001';\n"); - sb.append("KW2002 : 'KW' '2002';\n"); - sb.append("KW2003 : 'KW' '2003';\n"); - sb.append("KW2004 : 'KW' '2004';\n"); - sb.append("KW2005 : 'KW' '2005';\n"); - sb.append("KW2006 : 'KW' '2006';\n"); - sb.append("KW2007 : 'KW' '2007';\n"); - sb.append("KW2008 : 'KW' '2008';\n"); - sb.append("KW2009 : 'KW' '2009';\n"); - sb.append("KW2010 : 'KW' '2010';\n"); - sb.append("KW2011 : 'KW' '2011';\n"); - sb.append("KW2012 : 'KW' '2012';\n"); - sb.append("KW2013 : 'KW' '2013';\n"); - sb.append("KW2014 : 'KW' '2014';\n"); - sb.append("KW2015 : 'KW' '2015';\n"); - sb.append("KW2016 : 'KW' '2016';\n"); - sb.append("KW2017 : 'KW' '2017';\n"); - sb.append("KW2018 : 'KW' '2018';\n"); - sb.append("KW2019 : 'KW' '2019';\n"); - sb.append("KW2020 : 'KW' '2020';\n"); - sb.append("KW2021 : 'KW' '2021';\n"); - sb.append("KW2022 : 'KW' '2022';\n"); - sb.append("KW2023 : 'KW' '2023';\n"); - sb.append("KW2024 : 'KW' '2024';\n"); - sb.append("KW2025 : 'KW' '2025';\n"); - sb.append("KW2026 : 'KW' '2026';\n"); - sb.append("KW2027 : 'KW' '2027';\n"); - sb.append("KW2028 : 'KW' '2028';\n"); - sb.append("KW2029 : 'KW' '2029';\n"); - sb.append("KW2030 : 'KW' '2030';\n"); - sb.append("KW2031 : 'KW' '2031';\n"); - sb.append("KW2032 : 'KW' '2032';\n"); - sb.append("KW2033 : 'KW' '2033';\n"); - sb.append("KW2034 : 'KW' '2034';\n"); - sb.append("KW2035 : 'KW' '2035';\n"); - sb.append("KW2036 : 'KW' '2036';\n"); - sb.append("KW2037 : 'KW' '2037';\n"); - sb.append("KW2038 : 'KW' '2038';\n"); - sb.append("KW2039 : 'KW' '2039';\n"); - sb.append("KW2040 : 'KW' '2040';\n"); - sb.append("KW2041 : 'KW' '2041';\n"); - sb.append("KW2042 : 'KW' '2042';\n"); - sb.append("KW2043 : 'KW' '2043';\n"); - sb.append("KW2044 : 'KW' '2044';\n"); - sb.append("KW2045 : 'KW' '2045';\n"); - sb.append("KW2046 : 'KW' '2046';\n"); - sb.append("KW2047 : 'KW' '2047';\n"); - sb.append("KW2048 : 'KW' '2048';\n"); - sb.append("KW2049 : 'KW' '2049';\n"); - sb.append("KW2050 : 'KW' '2050';\n"); - sb.append("KW2051 : 'KW' '2051';\n"); - sb.append("KW2052 : 'KW' '2052';\n"); - sb.append("KW2053 : 'KW' '2053';\n"); - sb.append("KW2054 : 'KW' '2054';\n"); - sb.append("KW2055 : 'KW' '2055';\n"); - sb.append("KW2056 : 'KW' '2056';\n"); - sb.append("KW2057 : 'KW' '2057';\n"); - sb.append("KW2058 : 'KW' '2058';\n"); - sb.append("KW2059 : 'KW' '2059';\n"); - sb.append("KW2060 : 'KW' '2060';\n"); - sb.append("KW2061 : 'KW' '2061';\n"); - sb.append("KW2062 : 'KW' '2062';\n"); - sb.append("KW2063 : 'KW' '2063';\n"); - sb.append("KW2064 : 'KW' '2064';\n"); - sb.append("KW2065 : 'KW' '2065';\n"); - sb.append("KW2066 : 'KW' '2066';\n"); - sb.append("KW2067 : 'KW' '2067';\n"); - sb.append("KW2068 : 'KW' '2068';\n"); - sb.append("KW2069 : 'KW' '2069';\n"); - sb.append("KW2070 : 'KW' '2070';\n"); - sb.append("KW2071 : 'KW' '2071';\n"); - sb.append("KW2072 : 'KW' '2072';\n"); - sb.append("KW2073 : 'KW' '2073';\n"); - sb.append("KW2074 : 'KW' '2074';\n"); - sb.append("KW2075 : 'KW' '2075';\n"); - sb.append("KW2076 : 'KW' '2076';\n"); - sb.append("KW2077 : 'KW' '2077';\n"); - sb.append("KW2078 : 'KW' '2078';\n"); - sb.append("KW2079 : 'KW' '2079';\n"); - sb.append("KW2080 : 'KW' '2080';\n"); - sb.append("KW2081 : 'KW' '2081';\n"); - sb.append("KW2082 : 'KW' '2082';\n"); - sb.append("KW2083 : 'KW' '2083';\n"); - sb.append("KW2084 : 'KW' '2084';\n"); - sb.append("KW2085 : 'KW' '2085';\n"); - sb.append("KW2086 : 'KW' '2086';\n"); - sb.append("KW2087 : 'KW' '2087';\n"); - sb.append("KW2088 : 'KW' '2088';\n"); - sb.append("KW2089 : 'KW' '2089';\n"); - sb.append("KW2090 : 'KW' '2090';\n"); - sb.append("KW2091 : 'KW' '2091';\n"); - sb.append("KW2092 : 'KW' '2092';\n"); - sb.append("KW2093 : 'KW' '2093';\n"); - sb.append("KW2094 : 'KW' '2094';\n"); - sb.append("KW2095 : 'KW' '2095';\n"); - sb.append("KW2096 : 'KW' '2096';\n"); - sb.append("KW2097 : 'KW' '2097';\n"); - sb.append("KW2098 : 'KW' '2098';\n"); - sb.append("KW2099 : 'KW' '2099';\n"); - sb.append("KW2100 : 'KW' '2100';\n"); - sb.append("KW2101 : 'KW' '2101';\n"); - sb.append("KW2102 : 'KW' '2102';\n"); - sb.append("KW2103 : 'KW' '2103';\n"); - sb.append("KW2104 : 'KW' '2104';\n"); - sb.append("KW2105 : 'KW' '2105';\n"); - sb.append("KW2106 : 'KW' '2106';\n"); - sb.append("KW2107 : 'KW' '2107';\n"); - sb.append("KW2108 : 'KW' '2108';\n"); - sb.append("KW2109 : 'KW' '2109';\n"); - sb.append("KW2110 : 'KW' '2110';\n"); - sb.append("KW2111 : 'KW' '2111';\n"); - sb.append("KW2112 : 'KW' '2112';\n"); - sb.append("KW2113 : 'KW' '2113';\n"); - sb.append("KW2114 : 'KW' '2114';\n"); - sb.append("KW2115 : 'KW' '2115';\n"); - sb.append("KW2116 : 'KW' '2116';\n"); - sb.append("KW2117 : 'KW' '2117';\n"); - sb.append("KW2118 : 'KW' '2118';\n"); - sb.append("KW2119 : 'KW' '2119';\n"); - sb.append("KW2120 : 'KW' '2120';\n"); - sb.append("KW2121 : 'KW' '2121';\n"); - sb.append("KW2122 : 'KW' '2122';\n"); - sb.append("KW2123 : 'KW' '2123';\n"); - sb.append("KW2124 : 'KW' '2124';\n"); - sb.append("KW2125 : 'KW' '2125';\n"); - sb.append("KW2126 : 'KW' '2126';\n"); - sb.append("KW2127 : 'KW' '2127';\n"); - sb.append("KW2128 : 'KW' '2128';\n"); - sb.append("KW2129 : 'KW' '2129';\n"); - sb.append("KW2130 : 'KW' '2130';\n"); - sb.append("KW2131 : 'KW' '2131';\n"); - sb.append("KW2132 : 'KW' '2132';\n"); - sb.append("KW2133 : 'KW' '2133';\n"); - sb.append("KW2134 : 'KW' '2134';\n"); - sb.append("KW2135 : 'KW' '2135';\n"); - sb.append("KW2136 : 'KW' '2136';\n"); - sb.append("KW2137 : 'KW' '2137';\n"); - sb.append("KW2138 : 'KW' '2138';\n"); - sb.append("KW2139 : 'KW' '2139';\n"); - sb.append("KW2140 : 'KW' '2140';\n"); - sb.append("KW2141 : 'KW' '2141';\n"); - sb.append("KW2142 : 'KW' '2142';\n"); - sb.append("KW2143 : 'KW' '2143';\n"); - sb.append("KW2144 : 'KW' '2144';\n"); - sb.append("KW2145 : 'KW' '2145';\n"); - sb.append("KW2146 : 'KW' '2146';\n"); - sb.append("KW2147 : 'KW' '2147';\n"); - sb.append("KW2148 : 'KW' '2148';\n"); - sb.append("KW2149 : 'KW' '2149';\n"); - sb.append("KW2150 : 'KW' '2150';\n"); - sb.append("KW2151 : 'KW' '2151';\n"); - sb.append("KW2152 : 'KW' '2152';\n"); - sb.append("KW2153 : 'KW' '2153';\n"); - sb.append("KW2154 : 'KW' '2154';\n"); - sb.append("KW2155 : 'KW' '2155';\n"); - sb.append("KW2156 : 'KW' '2156';\n"); - sb.append("KW2157 : 'KW' '2157';\n"); - sb.append("KW2158 : 'KW' '2158';\n"); - sb.append("KW2159 : 'KW' '2159';\n"); - sb.append("KW2160 : 'KW' '2160';\n"); - sb.append("KW2161 : 'KW' '2161';\n"); - sb.append("KW2162 : 'KW' '2162';\n"); - sb.append("KW2163 : 'KW' '2163';\n"); - sb.append("KW2164 : 'KW' '2164';\n"); - sb.append("KW2165 : 'KW' '2165';\n"); - sb.append("KW2166 : 'KW' '2166';\n"); - sb.append("KW2167 : 'KW' '2167';\n"); - sb.append("KW2168 : 'KW' '2168';\n"); - sb.append("KW2169 : 'KW' '2169';\n"); - sb.append("KW2170 : 'KW' '2170';\n"); - sb.append("KW2171 : 'KW' '2171';\n"); - sb.append("KW2172 : 'KW' '2172';\n"); - sb.append("KW2173 : 'KW' '2173';\n"); - sb.append("KW2174 : 'KW' '2174';\n"); - sb.append("KW2175 : 'KW' '2175';\n"); - sb.append("KW2176 : 'KW' '2176';\n"); - sb.append("KW2177 : 'KW' '2177';\n"); - sb.append("KW2178 : 'KW' '2178';\n"); - sb.append("KW2179 : 'KW' '2179';\n"); - sb.append("KW2180 : 'KW' '2180';\n"); - sb.append("KW2181 : 'KW' '2181';\n"); - sb.append("KW2182 : 'KW' '2182';\n"); - sb.append("KW2183 : 'KW' '2183';\n"); - sb.append("KW2184 : 'KW' '2184';\n"); - sb.append("KW2185 : 'KW' '2185';\n"); - sb.append("KW2186 : 'KW' '2186';\n"); - sb.append("KW2187 : 'KW' '2187';\n"); - sb.append("KW2188 : 'KW' '2188';\n"); - sb.append("KW2189 : 'KW' '2189';\n"); - sb.append("KW2190 : 'KW' '2190';\n"); - sb.append("KW2191 : 'KW' '2191';\n"); - sb.append("KW2192 : 'KW' '2192';\n"); - sb.append("KW2193 : 'KW' '2193';\n"); - sb.append("KW2194 : 'KW' '2194';\n"); - sb.append("KW2195 : 'KW' '2195';\n"); - sb.append("KW2196 : 'KW' '2196';\n"); - sb.append("KW2197 : 'KW' '2197';\n"); - sb.append("KW2198 : 'KW' '2198';\n"); - sb.append("KW2199 : 'KW' '2199';\n"); - sb.append("KW2200 : 'KW' '2200';\n"); - sb.append("KW2201 : 'KW' '2201';\n"); - sb.append("KW2202 : 'KW' '2202';\n"); - sb.append("KW2203 : 'KW' '2203';\n"); - sb.append("KW2204 : 'KW' '2204';\n"); - sb.append("KW2205 : 'KW' '2205';\n"); - sb.append("KW2206 : 'KW' '2206';\n"); - sb.append("KW2207 : 'KW' '2207';\n"); - sb.append("KW2208 : 'KW' '2208';\n"); - sb.append("KW2209 : 'KW' '2209';\n"); - sb.append("KW2210 : 'KW' '2210';\n"); - sb.append("KW2211 : 'KW' '2211';\n"); - sb.append("KW2212 : 'KW' '2212';\n"); - sb.append("KW2213 : 'KW' '2213';\n"); - sb.append("KW2214 : 'KW' '2214';\n"); - sb.append("KW2215 : 'KW' '2215';\n"); - sb.append("KW2216 : 'KW' '2216';\n"); - sb.append("KW2217 : 'KW' '2217';\n"); - sb.append("KW2218 : 'KW' '2218';\n"); - sb.append("KW2219 : 'KW' '2219';\n"); - sb.append("KW2220 : 'KW' '2220';\n"); - sb.append("KW2221 : 'KW' '2221';\n"); - sb.append("KW2222 : 'KW' '2222';\n"); - sb.append("KW2223 : 'KW' '2223';\n"); - sb.append("KW2224 : 'KW' '2224';\n"); - sb.append("KW2225 : 'KW' '2225';\n"); - sb.append("KW2226 : 'KW' '2226';\n"); - sb.append("KW2227 : 'KW' '2227';\n"); - sb.append("KW2228 : 'KW' '2228';\n"); - sb.append("KW2229 : 'KW' '2229';\n"); - sb.append("KW2230 : 'KW' '2230';\n"); - sb.append("KW2231 : 'KW' '2231';\n"); - sb.append("KW2232 : 'KW' '2232';\n"); - sb.append("KW2233 : 'KW' '2233';\n"); - sb.append("KW2234 : 'KW' '2234';\n"); - sb.append("KW2235 : 'KW' '2235';\n"); - sb.append("KW2236 : 'KW' '2236';\n"); - sb.append("KW2237 : 'KW' '2237';\n"); - sb.append("KW2238 : 'KW' '2238';\n"); - sb.append("KW2239 : 'KW' '2239';\n"); - sb.append("KW2240 : 'KW' '2240';\n"); - sb.append("KW2241 : 'KW' '2241';\n"); - sb.append("KW2242 : 'KW' '2242';\n"); - sb.append("KW2243 : 'KW' '2243';\n"); - sb.append("KW2244 : 'KW' '2244';\n"); - sb.append("KW2245 : 'KW' '2245';\n"); - sb.append("KW2246 : 'KW' '2246';\n"); - sb.append("KW2247 : 'KW' '2247';\n"); - sb.append("KW2248 : 'KW' '2248';\n"); - sb.append("KW2249 : 'KW' '2249';\n"); - sb.append("KW2250 : 'KW' '2250';\n"); - sb.append("KW2251 : 'KW' '2251';\n"); - sb.append("KW2252 : 'KW' '2252';\n"); - sb.append("KW2253 : 'KW' '2253';\n"); - sb.append("KW2254 : 'KW' '2254';\n"); - sb.append("KW2255 : 'KW' '2255';\n"); - sb.append("KW2256 : 'KW' '2256';\n"); - sb.append("KW2257 : 'KW' '2257';\n"); - sb.append("KW2258 : 'KW' '2258';\n"); - sb.append("KW2259 : 'KW' '2259';\n"); - sb.append("KW2260 : 'KW' '2260';\n"); - sb.append("KW2261 : 'KW' '2261';\n"); - sb.append("KW2262 : 'KW' '2262';\n"); - sb.append("KW2263 : 'KW' '2263';\n"); - sb.append("KW2264 : 'KW' '2264';\n"); - sb.append("KW2265 : 'KW' '2265';\n"); - sb.append("KW2266 : 'KW' '2266';\n"); - sb.append("KW2267 : 'KW' '2267';\n"); - sb.append("KW2268 : 'KW' '2268';\n"); - sb.append("KW2269 : 'KW' '2269';\n"); - sb.append("KW2270 : 'KW' '2270';\n"); - sb.append("KW2271 : 'KW' '2271';\n"); - sb.append("KW2272 : 'KW' '2272';\n"); - sb.append("KW2273 : 'KW' '2273';\n"); - sb.append("KW2274 : 'KW' '2274';\n"); - sb.append("KW2275 : 'KW' '2275';\n"); - sb.append("KW2276 : 'KW' '2276';\n"); - sb.append("KW2277 : 'KW' '2277';\n"); - sb.append("KW2278 : 'KW' '2278';\n"); - sb.append("KW2279 : 'KW' '2279';\n"); - sb.append("KW2280 : 'KW' '2280';\n"); - sb.append("KW2281 : 'KW' '2281';\n"); - sb.append("KW2282 : 'KW' '2282';\n"); - sb.append("KW2283 : 'KW' '2283';\n"); - sb.append("KW2284 : 'KW' '2284';\n"); - sb.append("KW2285 : 'KW' '2285';\n"); - sb.append("KW2286 : 'KW' '2286';\n"); - sb.append("KW2287 : 'KW' '2287';\n"); - sb.append("KW2288 : 'KW' '2288';\n"); - sb.append("KW2289 : 'KW' '2289';\n"); - sb.append("KW2290 : 'KW' '2290';\n"); - sb.append("KW2291 : 'KW' '2291';\n"); - sb.append("KW2292 : 'KW' '2292';\n"); - sb.append("KW2293 : 'KW' '2293';\n"); - sb.append("KW2294 : 'KW' '2294';\n"); - sb.append("KW2295 : 'KW' '2295';\n"); - sb.append("KW2296 : 'KW' '2296';\n"); - sb.append("KW2297 : 'KW' '2297';\n"); - sb.append("KW2298 : 'KW' '2298';\n"); - sb.append("KW2299 : 'KW' '2299';\n"); - sb.append("KW2300 : 'KW' '2300';\n"); - sb.append("KW2301 : 'KW' '2301';\n"); - sb.append("KW2302 : 'KW' '2302';\n"); - sb.append("KW2303 : 'KW' '2303';\n"); - sb.append("KW2304 : 'KW' '2304';\n"); - sb.append("KW2305 : 'KW' '2305';\n"); - sb.append("KW2306 : 'KW' '2306';\n"); - sb.append("KW2307 : 'KW' '2307';\n"); - sb.append("KW2308 : 'KW' '2308';\n"); - sb.append("KW2309 : 'KW' '2309';\n"); - sb.append("KW2310 : 'KW' '2310';\n"); - sb.append("KW2311 : 'KW' '2311';\n"); - sb.append("KW2312 : 'KW' '2312';\n"); - sb.append("KW2313 : 'KW' '2313';\n"); - sb.append("KW2314 : 'KW' '2314';\n"); - sb.append("KW2315 : 'KW' '2315';\n"); - sb.append("KW2316 : 'KW' '2316';\n"); - sb.append("KW2317 : 'KW' '2317';\n"); - sb.append("KW2318 : 'KW' '2318';\n"); - sb.append("KW2319 : 'KW' '2319';\n"); - sb.append("KW2320 : 'KW' '2320';\n"); - sb.append("KW2321 : 'KW' '2321';\n"); - sb.append("KW2322 : 'KW' '2322';\n"); - sb.append("KW2323 : 'KW' '2323';\n"); - sb.append("KW2324 : 'KW' '2324';\n"); - sb.append("KW2325 : 'KW' '2325';\n"); - sb.append("KW2326 : 'KW' '2326';\n"); - sb.append("KW2327 : 'KW' '2327';\n"); - sb.append("KW2328 : 'KW' '2328';\n"); - sb.append("KW2329 : 'KW' '2329';\n"); - sb.append("KW2330 : 'KW' '2330';\n"); - sb.append("KW2331 : 'KW' '2331';\n"); - sb.append("KW2332 : 'KW' '2332';\n"); - sb.append("KW2333 : 'KW' '2333';\n"); - sb.append("KW2334 : 'KW' '2334';\n"); - sb.append("KW2335 : 'KW' '2335';\n"); - sb.append("KW2336 : 'KW' '2336';\n"); - sb.append("KW2337 : 'KW' '2337';\n"); - sb.append("KW2338 : 'KW' '2338';\n"); - sb.append("KW2339 : 'KW' '2339';\n"); - sb.append("KW2340 : 'KW' '2340';\n"); - sb.append("KW2341 : 'KW' '2341';\n"); - sb.append("KW2342 : 'KW' '2342';\n"); - sb.append("KW2343 : 'KW' '2343';\n"); - sb.append("KW2344 : 'KW' '2344';\n"); - sb.append("KW2345 : 'KW' '2345';\n"); - sb.append("KW2346 : 'KW' '2346';\n"); - sb.append("KW2347 : 'KW' '2347';\n"); - sb.append("KW2348 : 'KW' '2348';\n"); - sb.append("KW2349 : 'KW' '2349';\n"); - sb.append("KW2350 : 'KW' '2350';\n"); - sb.append("KW2351 : 'KW' '2351';\n"); - sb.append("KW2352 : 'KW' '2352';\n"); - sb.append("KW2353 : 'KW' '2353';\n"); - sb.append("KW2354 : 'KW' '2354';\n"); - sb.append("KW2355 : 'KW' '2355';\n"); - sb.append("KW2356 : 'KW' '2356';\n"); - sb.append("KW2357 : 'KW' '2357';\n"); - sb.append("KW2358 : 'KW' '2358';\n"); - sb.append("KW2359 : 'KW' '2359';\n"); - sb.append("KW2360 : 'KW' '2360';\n"); - sb.append("KW2361 : 'KW' '2361';\n"); - sb.append("KW2362 : 'KW' '2362';\n"); - sb.append("KW2363 : 'KW' '2363';\n"); - sb.append("KW2364 : 'KW' '2364';\n"); - sb.append("KW2365 : 'KW' '2365';\n"); - sb.append("KW2366 : 'KW' '2366';\n"); - sb.append("KW2367 : 'KW' '2367';\n"); - sb.append("KW2368 : 'KW' '2368';\n"); - sb.append("KW2369 : 'KW' '2369';\n"); - sb.append("KW2370 : 'KW' '2370';\n"); - sb.append("KW2371 : 'KW' '2371';\n"); - sb.append("KW2372 : 'KW' '2372';\n"); - sb.append("KW2373 : 'KW' '2373';\n"); - sb.append("KW2374 : 'KW' '2374';\n"); - sb.append("KW2375 : 'KW' '2375';\n"); - sb.append("KW2376 : 'KW' '2376';\n"); - sb.append("KW2377 : 'KW' '2377';\n"); - sb.append("KW2378 : 'KW' '2378';\n"); - sb.append("KW2379 : 'KW' '2379';\n"); - sb.append("KW2380 : 'KW' '2380';\n"); - sb.append("KW2381 : 'KW' '2381';\n"); - sb.append("KW2382 : 'KW' '2382';\n"); - sb.append("KW2383 : 'KW' '2383';\n"); - sb.append("KW2384 : 'KW' '2384';\n"); - sb.append("KW2385 : 'KW' '2385';\n"); - sb.append("KW2386 : 'KW' '2386';\n"); - sb.append("KW2387 : 'KW' '2387';\n"); - sb.append("KW2388 : 'KW' '2388';\n"); - sb.append("KW2389 : 'KW' '2389';\n"); - sb.append("KW2390 : 'KW' '2390';\n"); - sb.append("KW2391 : 'KW' '2391';\n"); - sb.append("KW2392 : 'KW' '2392';\n"); - sb.append("KW2393 : 'KW' '2393';\n"); - sb.append("KW2394 : 'KW' '2394';\n"); - sb.append("KW2395 : 'KW' '2395';\n"); - sb.append("KW2396 : 'KW' '2396';\n"); - sb.append("KW2397 : 'KW' '2397';\n"); - sb.append("KW2398 : 'KW' '2398';\n"); - sb.append("KW2399 : 'KW' '2399';\n"); - sb.append("KW2400 : 'KW' '2400';\n"); - sb.append("KW2401 : 'KW' '2401';\n"); - sb.append("KW2402 : 'KW' '2402';\n"); - sb.append("KW2403 : 'KW' '2403';\n"); - sb.append("KW2404 : 'KW' '2404';\n"); - sb.append("KW2405 : 'KW' '2405';\n"); - sb.append("KW2406 : 'KW' '2406';\n"); - sb.append("KW2407 : 'KW' '2407';\n"); - sb.append("KW2408 : 'KW' '2408';\n"); - sb.append("KW2409 : 'KW' '2409';\n"); - sb.append("KW2410 : 'KW' '2410';\n"); - sb.append("KW2411 : 'KW' '2411';\n"); - sb.append("KW2412 : 'KW' '2412';\n"); - sb.append("KW2413 : 'KW' '2413';\n"); - sb.append("KW2414 : 'KW' '2414';\n"); - sb.append("KW2415 : 'KW' '2415';\n"); - sb.append("KW2416 : 'KW' '2416';\n"); - sb.append("KW2417 : 'KW' '2417';\n"); - sb.append("KW2418 : 'KW' '2418';\n"); - sb.append("KW2419 : 'KW' '2419';\n"); - sb.append("KW2420 : 'KW' '2420';\n"); - sb.append("KW2421 : 'KW' '2421';\n"); - sb.append("KW2422 : 'KW' '2422';\n"); - sb.append("KW2423 : 'KW' '2423';\n"); - sb.append("KW2424 : 'KW' '2424';\n"); - sb.append("KW2425 : 'KW' '2425';\n"); - sb.append("KW2426 : 'KW' '2426';\n"); - sb.append("KW2427 : 'KW' '2427';\n"); - sb.append("KW2428 : 'KW' '2428';\n"); - sb.append("KW2429 : 'KW' '2429';\n"); - sb.append("KW2430 : 'KW' '2430';\n"); - sb.append("KW2431 : 'KW' '2431';\n"); - sb.append("KW2432 : 'KW' '2432';\n"); - sb.append("KW2433 : 'KW' '2433';\n"); - sb.append("KW2434 : 'KW' '2434';\n"); - sb.append("KW2435 : 'KW' '2435';\n"); - sb.append("KW2436 : 'KW' '2436';\n"); - sb.append("KW2437 : 'KW' '2437';\n"); - sb.append("KW2438 : 'KW' '2438';\n"); - sb.append("KW2439 : 'KW' '2439';\n"); - sb.append("KW2440 : 'KW' '2440';\n"); - sb.append("KW2441 : 'KW' '2441';\n"); - sb.append("KW2442 : 'KW' '2442';\n"); - sb.append("KW2443 : 'KW' '2443';\n"); - sb.append("KW2444 : 'KW' '2444';\n"); - sb.append("KW2445 : 'KW' '2445';\n"); - sb.append("KW2446 : 'KW' '2446';\n"); - sb.append("KW2447 : 'KW' '2447';\n"); - sb.append("KW2448 : 'KW' '2448';\n"); - sb.append("KW2449 : 'KW' '2449';\n"); - sb.append("KW2450 : 'KW' '2450';\n"); - sb.append("KW2451 : 'KW' '2451';\n"); - sb.append("KW2452 : 'KW' '2452';\n"); - sb.append("KW2453 : 'KW' '2453';\n"); - sb.append("KW2454 : 'KW' '2454';\n"); - sb.append("KW2455 : 'KW' '2455';\n"); - sb.append("KW2456 : 'KW' '2456';\n"); - sb.append("KW2457 : 'KW' '2457';\n"); - sb.append("KW2458 : 'KW' '2458';\n"); - sb.append("KW2459 : 'KW' '2459';\n"); - sb.append("KW2460 : 'KW' '2460';\n"); - sb.append("KW2461 : 'KW' '2461';\n"); - sb.append("KW2462 : 'KW' '2462';\n"); - sb.append("KW2463 : 'KW' '2463';\n"); - sb.append("KW2464 : 'KW' '2464';\n"); - sb.append("KW2465 : 'KW' '2465';\n"); - sb.append("KW2466 : 'KW' '2466';\n"); - sb.append("KW2467 : 'KW' '2467';\n"); - sb.append("KW2468 : 'KW' '2468';\n"); - sb.append("KW2469 : 'KW' '2469';\n"); - sb.append("KW2470 : 'KW' '2470';\n"); - sb.append("KW2471 : 'KW' '2471';\n"); - sb.append("KW2472 : 'KW' '2472';\n"); - sb.append("KW2473 : 'KW' '2473';\n"); - sb.append("KW2474 : 'KW' '2474';\n"); - sb.append("KW2475 : 'KW' '2475';\n"); - sb.append("KW2476 : 'KW' '2476';\n"); - sb.append("KW2477 : 'KW' '2477';\n"); - sb.append("KW2478 : 'KW' '2478';\n"); - sb.append("KW2479 : 'KW' '2479';\n"); - sb.append("KW2480 : 'KW' '2480';\n"); - sb.append("KW2481 : 'KW' '2481';\n"); - sb.append("KW2482 : 'KW' '2482';\n"); - sb.append("KW2483 : 'KW' '2483';\n"); - sb.append("KW2484 : 'KW' '2484';\n"); - sb.append("KW2485 : 'KW' '2485';\n"); - sb.append("KW2486 : 'KW' '2486';\n"); - sb.append("KW2487 : 'KW' '2487';\n"); - sb.append("KW2488 : 'KW' '2488';\n"); - sb.append("KW2489 : 'KW' '2489';\n"); - sb.append("KW2490 : 'KW' '2490';\n"); - sb.append("KW2491 : 'KW' '2491';\n"); - sb.append("KW2492 : 'KW' '2492';\n"); - sb.append("KW2493 : 'KW' '2493';\n"); - sb.append("KW2494 : 'KW' '2494';\n"); - sb.append("KW2495 : 'KW' '2495';\n"); - sb.append("KW2496 : 'KW' '2496';\n"); - sb.append("KW2497 : 'KW' '2497';\n"); - sb.append("KW2498 : 'KW' '2498';\n"); - sb.append("KW2499 : 'KW' '2499';\n"); - sb.append("KW2500 : 'KW' '2500';\n"); - sb.append("KW2501 : 'KW' '2501';\n"); - sb.append("KW2502 : 'KW' '2502';\n"); - sb.append("KW2503 : 'KW' '2503';\n"); - sb.append("KW2504 : 'KW' '2504';\n"); - sb.append("KW2505 : 'KW' '2505';\n"); - sb.append("KW2506 : 'KW' '2506';\n"); - sb.append("KW2507 : 'KW' '2507';\n"); - sb.append("KW2508 : 'KW' '2508';\n"); - sb.append("KW2509 : 'KW' '2509';\n"); - sb.append("KW2510 : 'KW' '2510';\n"); - sb.append("KW2511 : 'KW' '2511';\n"); - sb.append("KW2512 : 'KW' '2512';\n"); - sb.append("KW2513 : 'KW' '2513';\n"); - sb.append("KW2514 : 'KW' '2514';\n"); - sb.append("KW2515 : 'KW' '2515';\n"); - sb.append("KW2516 : 'KW' '2516';\n"); - sb.append("KW2517 : 'KW' '2517';\n"); - sb.append("KW2518 : 'KW' '2518';\n"); - sb.append("KW2519 : 'KW' '2519';\n"); - sb.append("KW2520 : 'KW' '2520';\n"); - sb.append("KW2521 : 'KW' '2521';\n"); - sb.append("KW2522 : 'KW' '2522';\n"); - sb.append("KW2523 : 'KW' '2523';\n"); - sb.append("KW2524 : 'KW' '2524';\n"); - sb.append("KW2525 : 'KW' '2525';\n"); - sb.append("KW2526 : 'KW' '2526';\n"); - sb.append("KW2527 : 'KW' '2527';\n"); - sb.append("KW2528 : 'KW' '2528';\n"); - sb.append("KW2529 : 'KW' '2529';\n"); - sb.append("KW2530 : 'KW' '2530';\n"); - sb.append("KW2531 : 'KW' '2531';\n"); - sb.append("KW2532 : 'KW' '2532';\n"); - sb.append("KW2533 : 'KW' '2533';\n"); - sb.append("KW2534 : 'KW' '2534';\n"); - sb.append("KW2535 : 'KW' '2535';\n"); - sb.append("KW2536 : 'KW' '2536';\n"); - sb.append("KW2537 : 'KW' '2537';\n"); - sb.append("KW2538 : 'KW' '2538';\n"); - sb.append("KW2539 : 'KW' '2539';\n"); - sb.append("KW2540 : 'KW' '2540';\n"); - sb.append("KW2541 : 'KW' '2541';\n"); - sb.append("KW2542 : 'KW' '2542';\n"); - sb.append("KW2543 : 'KW' '2543';\n"); - sb.append("KW2544 : 'KW' '2544';\n"); - sb.append("KW2545 : 'KW' '2545';\n"); - sb.append("KW2546 : 'KW' '2546';\n"); - sb.append("KW2547 : 'KW' '2547';\n"); - sb.append("KW2548 : 'KW' '2548';\n"); - sb.append("KW2549 : 'KW' '2549';\n"); - sb.append("KW2550 : 'KW' '2550';\n"); - sb.append("KW2551 : 'KW' '2551';\n"); - sb.append("KW2552 : 'KW' '2552';\n"); - sb.append("KW2553 : 'KW' '2553';\n"); - sb.append("KW2554 : 'KW' '2554';\n"); - sb.append("KW2555 : 'KW' '2555';\n"); - sb.append("KW2556 : 'KW' '2556';\n"); - sb.append("KW2557 : 'KW' '2557';\n"); - sb.append("KW2558 : 'KW' '2558';\n"); - sb.append("KW2559 : 'KW' '2559';\n"); - sb.append("KW2560 : 'KW' '2560';\n"); - sb.append("KW2561 : 'KW' '2561';\n"); - sb.append("KW2562 : 'KW' '2562';\n"); - sb.append("KW2563 : 'KW' '2563';\n"); - sb.append("KW2564 : 'KW' '2564';\n"); - sb.append("KW2565 : 'KW' '2565';\n"); - sb.append("KW2566 : 'KW' '2566';\n"); - sb.append("KW2567 : 'KW' '2567';\n"); - sb.append("KW2568 : 'KW' '2568';\n"); - sb.append("KW2569 : 'KW' '2569';\n"); - sb.append("KW2570 : 'KW' '2570';\n"); - sb.append("KW2571 : 'KW' '2571';\n"); - sb.append("KW2572 : 'KW' '2572';\n"); - sb.append("KW2573 : 'KW' '2573';\n"); - sb.append("KW2574 : 'KW' '2574';\n"); - sb.append("KW2575 : 'KW' '2575';\n"); - sb.append("KW2576 : 'KW' '2576';\n"); - sb.append("KW2577 : 'KW' '2577';\n"); - sb.append("KW2578 : 'KW' '2578';\n"); - sb.append("KW2579 : 'KW' '2579';\n"); - sb.append("KW2580 : 'KW' '2580';\n"); - sb.append("KW2581 : 'KW' '2581';\n"); - sb.append("KW2582 : 'KW' '2582';\n"); - sb.append("KW2583 : 'KW' '2583';\n"); - sb.append("KW2584 : 'KW' '2584';\n"); - sb.append("KW2585 : 'KW' '2585';\n"); - sb.append("KW2586 : 'KW' '2586';\n"); - sb.append("KW2587 : 'KW' '2587';\n"); - sb.append("KW2588 : 'KW' '2588';\n"); - sb.append("KW2589 : 'KW' '2589';\n"); - sb.append("KW2590 : 'KW' '2590';\n"); - sb.append("KW2591 : 'KW' '2591';\n"); - sb.append("KW2592 : 'KW' '2592';\n"); - sb.append("KW2593 : 'KW' '2593';\n"); - sb.append("KW2594 : 'KW' '2594';\n"); - sb.append("KW2595 : 'KW' '2595';\n"); - sb.append("KW2596 : 'KW' '2596';\n"); - sb.append("KW2597 : 'KW' '2597';\n"); - sb.append("KW2598 : 'KW' '2598';\n"); - sb.append("KW2599 : 'KW' '2599';\n"); - sb.append("KW2600 : 'KW' '2600';\n"); - sb.append("KW2601 : 'KW' '2601';\n"); - sb.append("KW2602 : 'KW' '2602';\n"); - sb.append("KW2603 : 'KW' '2603';\n"); - sb.append("KW2604 : 'KW' '2604';\n"); - sb.append("KW2605 : 'KW' '2605';\n"); - sb.append("KW2606 : 'KW' '2606';\n"); - sb.append("KW2607 : 'KW' '2607';\n"); - sb.append("KW2608 : 'KW' '2608';\n"); - sb.append("KW2609 : 'KW' '2609';\n"); - sb.append("KW2610 : 'KW' '2610';\n"); - sb.append("KW2611 : 'KW' '2611';\n"); - sb.append("KW2612 : 'KW' '2612';\n"); - sb.append("KW2613 : 'KW' '2613';\n"); - sb.append("KW2614 : 'KW' '2614';\n"); - sb.append("KW2615 : 'KW' '2615';\n"); - sb.append("KW2616 : 'KW' '2616';\n"); - sb.append("KW2617 : 'KW' '2617';\n"); - sb.append("KW2618 : 'KW' '2618';\n"); - sb.append("KW2619 : 'KW' '2619';\n"); - sb.append("KW2620 : 'KW' '2620';\n"); - sb.append("KW2621 : 'KW' '2621';\n"); - sb.append("KW2622 : 'KW' '2622';\n"); - sb.append("KW2623 : 'KW' '2623';\n"); - sb.append("KW2624 : 'KW' '2624';\n"); - sb.append("KW2625 : 'KW' '2625';\n"); - sb.append("KW2626 : 'KW' '2626';\n"); - sb.append("KW2627 : 'KW' '2627';\n"); - sb.append("KW2628 : 'KW' '2628';\n"); - sb.append("KW2629 : 'KW' '2629';\n"); - sb.append("KW2630 : 'KW' '2630';\n"); - sb.append("KW2631 : 'KW' '2631';\n"); - sb.append("KW2632 : 'KW' '2632';\n"); - sb.append("KW2633 : 'KW' '2633';\n"); - sb.append("KW2634 : 'KW' '2634';\n"); - sb.append("KW2635 : 'KW' '2635';\n"); - sb.append("KW2636 : 'KW' '2636';\n"); - sb.append("KW2637 : 'KW' '2637';\n"); - sb.append("KW2638 : 'KW' '2638';\n"); - sb.append("KW2639 : 'KW' '2639';\n"); - sb.append("KW2640 : 'KW' '2640';\n"); - sb.append("KW2641 : 'KW' '2641';\n"); - sb.append("KW2642 : 'KW' '2642';\n"); - sb.append("KW2643 : 'KW' '2643';\n"); - sb.append("KW2644 : 'KW' '2644';\n"); - sb.append("KW2645 : 'KW' '2645';\n"); - sb.append("KW2646 : 'KW' '2646';\n"); - sb.append("KW2647 : 'KW' '2647';\n"); - sb.append("KW2648 : 'KW' '2648';\n"); - sb.append("KW2649 : 'KW' '2649';\n"); - sb.append("KW2650 : 'KW' '2650';\n"); - sb.append("KW2651 : 'KW' '2651';\n"); - sb.append("KW2652 : 'KW' '2652';\n"); - sb.append("KW2653 : 'KW' '2653';\n"); - sb.append("KW2654 : 'KW' '2654';\n"); - sb.append("KW2655 : 'KW' '2655';\n"); - sb.append("KW2656 : 'KW' '2656';\n"); - sb.append("KW2657 : 'KW' '2657';\n"); - sb.append("KW2658 : 'KW' '2658';\n"); - sb.append("KW2659 : 'KW' '2659';\n"); - sb.append("KW2660 : 'KW' '2660';\n"); - sb.append("KW2661 : 'KW' '2661';\n"); - sb.append("KW2662 : 'KW' '2662';\n"); - sb.append("KW2663 : 'KW' '2663';\n"); - sb.append("KW2664 : 'KW' '2664';\n"); - sb.append("KW2665 : 'KW' '2665';\n"); - sb.append("KW2666 : 'KW' '2666';\n"); - sb.append("KW2667 : 'KW' '2667';\n"); - sb.append("KW2668 : 'KW' '2668';\n"); - sb.append("KW2669 : 'KW' '2669';\n"); - sb.append("KW2670 : 'KW' '2670';\n"); - sb.append("KW2671 : 'KW' '2671';\n"); - sb.append("KW2672 : 'KW' '2672';\n"); - sb.append("KW2673 : 'KW' '2673';\n"); - sb.append("KW2674 : 'KW' '2674';\n"); - sb.append("KW2675 : 'KW' '2675';\n"); - sb.append("KW2676 : 'KW' '2676';\n"); - sb.append("KW2677 : 'KW' '2677';\n"); - sb.append("KW2678 : 'KW' '2678';\n"); - sb.append("KW2679 : 'KW' '2679';\n"); - sb.append("KW2680 : 'KW' '2680';\n"); - sb.append("KW2681 : 'KW' '2681';\n"); - sb.append("KW2682 : 'KW' '2682';\n"); - sb.append("KW2683 : 'KW' '2683';\n"); - sb.append("KW2684 : 'KW' '2684';\n"); - sb.append("KW2685 : 'KW' '2685';\n"); - sb.append("KW2686 : 'KW' '2686';\n"); - sb.append("KW2687 : 'KW' '2687';\n"); - sb.append("KW2688 : 'KW' '2688';\n"); - sb.append("KW2689 : 'KW' '2689';\n"); - sb.append("KW2690 : 'KW' '2690';\n"); - sb.append("KW2691 : 'KW' '2691';\n"); - sb.append("KW2692 : 'KW' '2692';\n"); - sb.append("KW2693 : 'KW' '2693';\n"); - sb.append("KW2694 : 'KW' '2694';\n"); - sb.append("KW2695 : 'KW' '2695';\n"); - sb.append("KW2696 : 'KW' '2696';\n"); - sb.append("KW2697 : 'KW' '2697';\n"); - sb.append("KW2698 : 'KW' '2698';\n"); - sb.append("KW2699 : 'KW' '2699';\n"); - sb.append("KW2700 : 'KW' '2700';\n"); - sb.append("KW2701 : 'KW' '2701';\n"); - sb.append("KW2702 : 'KW' '2702';\n"); - sb.append("KW2703 : 'KW' '2703';\n"); - sb.append("KW2704 : 'KW' '2704';\n"); - sb.append("KW2705 : 'KW' '2705';\n"); - sb.append("KW2706 : 'KW' '2706';\n"); - sb.append("KW2707 : 'KW' '2707';\n"); - sb.append("KW2708 : 'KW' '2708';\n"); - sb.append("KW2709 : 'KW' '2709';\n"); - sb.append("KW2710 : 'KW' '2710';\n"); - sb.append("KW2711 : 'KW' '2711';\n"); - sb.append("KW2712 : 'KW' '2712';\n"); - sb.append("KW2713 : 'KW' '2713';\n"); - sb.append("KW2714 : 'KW' '2714';\n"); - sb.append("KW2715 : 'KW' '2715';\n"); - sb.append("KW2716 : 'KW' '2716';\n"); - sb.append("KW2717 : 'KW' '2717';\n"); - sb.append("KW2718 : 'KW' '2718';\n"); - sb.append("KW2719 : 'KW' '2719';\n"); - sb.append("KW2720 : 'KW' '2720';\n"); - sb.append("KW2721 : 'KW' '2721';\n"); - sb.append("KW2722 : 'KW' '2722';\n"); - sb.append("KW2723 : 'KW' '2723';\n"); - sb.append("KW2724 : 'KW' '2724';\n"); - sb.append("KW2725 : 'KW' '2725';\n"); - sb.append("KW2726 : 'KW' '2726';\n"); - sb.append("KW2727 : 'KW' '2727';\n"); - sb.append("KW2728 : 'KW' '2728';\n"); - sb.append("KW2729 : 'KW' '2729';\n"); - sb.append("KW2730 : 'KW' '2730';\n"); - sb.append("KW2731 : 'KW' '2731';\n"); - sb.append("KW2732 : 'KW' '2732';\n"); - sb.append("KW2733 : 'KW' '2733';\n"); - sb.append("KW2734 : 'KW' '2734';\n"); - sb.append("KW2735 : 'KW' '2735';\n"); - sb.append("KW2736 : 'KW' '2736';\n"); - sb.append("KW2737 : 'KW' '2737';\n"); - sb.append("KW2738 : 'KW' '2738';\n"); - sb.append("KW2739 : 'KW' '2739';\n"); - sb.append("KW2740 : 'KW' '2740';\n"); - sb.append("KW2741 : 'KW' '2741';\n"); - sb.append("KW2742 : 'KW' '2742';\n"); - sb.append("KW2743 : 'KW' '2743';\n"); - sb.append("KW2744 : 'KW' '2744';\n"); - sb.append("KW2745 : 'KW' '2745';\n"); - sb.append("KW2746 : 'KW' '2746';\n"); - sb.append("KW2747 : 'KW' '2747';\n"); - sb.append("KW2748 : 'KW' '2748';\n"); - sb.append("KW2749 : 'KW' '2749';\n"); - sb.append("KW2750 : 'KW' '2750';\n"); - sb.append("KW2751 : 'KW' '2751';\n"); - sb.append("KW2752 : 'KW' '2752';\n"); - sb.append("KW2753 : 'KW' '2753';\n"); - sb.append("KW2754 : 'KW' '2754';\n"); - sb.append("KW2755 : 'KW' '2755';\n"); - sb.append("KW2756 : 'KW' '2756';\n"); - sb.append("KW2757 : 'KW' '2757';\n"); - sb.append("KW2758 : 'KW' '2758';\n"); - sb.append("KW2759 : 'KW' '2759';\n"); - sb.append("KW2760 : 'KW' '2760';\n"); - sb.append("KW2761 : 'KW' '2761';\n"); - sb.append("KW2762 : 'KW' '2762';\n"); - sb.append("KW2763 : 'KW' '2763';\n"); - sb.append("KW2764 : 'KW' '2764';\n"); - sb.append("KW2765 : 'KW' '2765';\n"); - sb.append("KW2766 : 'KW' '2766';\n"); - sb.append("KW2767 : 'KW' '2767';\n"); - sb.append("KW2768 : 'KW' '2768';\n"); - sb.append("KW2769 : 'KW' '2769';\n"); - sb.append("KW2770 : 'KW' '2770';\n"); - sb.append("KW2771 : 'KW' '2771';\n"); - sb.append("KW2772 : 'KW' '2772';\n"); - sb.append("KW2773 : 'KW' '2773';\n"); - sb.append("KW2774 : 'KW' '2774';\n"); - sb.append("KW2775 : 'KW' '2775';\n"); - sb.append("KW2776 : 'KW' '2776';\n"); - sb.append("KW2777 : 'KW' '2777';\n"); - sb.append("KW2778 : 'KW' '2778';\n"); - sb.append("KW2779 : 'KW' '2779';\n"); - sb.append("KW2780 : 'KW' '2780';\n"); - sb.append("KW2781 : 'KW' '2781';\n"); - sb.append("KW2782 : 'KW' '2782';\n"); - sb.append("KW2783 : 'KW' '2783';\n"); - sb.append("KW2784 : 'KW' '2784';\n"); - sb.append("KW2785 : 'KW' '2785';\n"); - sb.append("KW2786 : 'KW' '2786';\n"); - sb.append("KW2787 : 'KW' '2787';\n"); - sb.append("KW2788 : 'KW' '2788';\n"); - sb.append("KW2789 : 'KW' '2789';\n"); - sb.append("KW2790 : 'KW' '2790';\n"); - sb.append("KW2791 : 'KW' '2791';\n"); - sb.append("KW2792 : 'KW' '2792';\n"); - sb.append("KW2793 : 'KW' '2793';\n"); - sb.append("KW2794 : 'KW' '2794';\n"); - sb.append("KW2795 : 'KW' '2795';\n"); - sb.append("KW2796 : 'KW' '2796';\n"); - sb.append("KW2797 : 'KW' '2797';\n"); - sb.append("KW2798 : 'KW' '2798';\n"); - sb.append("KW2799 : 'KW' '2799';\n"); - sb.append("KW2800 : 'KW' '2800';\n"); - sb.append("KW2801 : 'KW' '2801';\n"); - sb.append("KW2802 : 'KW' '2802';\n"); - sb.append("KW2803 : 'KW' '2803';\n"); - sb.append("KW2804 : 'KW' '2804';\n"); - sb.append("KW2805 : 'KW' '2805';\n"); - sb.append("KW2806 : 'KW' '2806';\n"); - sb.append("KW2807 : 'KW' '2807';\n"); - sb.append("KW2808 : 'KW' '2808';\n"); - sb.append("KW2809 : 'KW' '2809';\n"); - sb.append("KW2810 : 'KW' '2810';\n"); - sb.append("KW2811 : 'KW' '2811';\n"); - sb.append("KW2812 : 'KW' '2812';\n"); - sb.append("KW2813 : 'KW' '2813';\n"); - sb.append("KW2814 : 'KW' '2814';\n"); - sb.append("KW2815 : 'KW' '2815';\n"); - sb.append("KW2816 : 'KW' '2816';\n"); - sb.append("KW2817 : 'KW' '2817';\n"); - sb.append("KW2818 : 'KW' '2818';\n"); - sb.append("KW2819 : 'KW' '2819';\n"); - sb.append("KW2820 : 'KW' '2820';\n"); - sb.append("KW2821 : 'KW' '2821';\n"); - sb.append("KW2822 : 'KW' '2822';\n"); - sb.append("KW2823 : 'KW' '2823';\n"); - sb.append("KW2824 : 'KW' '2824';\n"); - sb.append("KW2825 : 'KW' '2825';\n"); - sb.append("KW2826 : 'KW' '2826';\n"); - sb.append("KW2827 : 'KW' '2827';\n"); - sb.append("KW2828 : 'KW' '2828';\n"); - sb.append("KW2829 : 'KW' '2829';\n"); - sb.append("KW2830 : 'KW' '2830';\n"); - sb.append("KW2831 : 'KW' '2831';\n"); - sb.append("KW2832 : 'KW' '2832';\n"); - sb.append("KW2833 : 'KW' '2833';\n"); - sb.append("KW2834 : 'KW' '2834';\n"); - sb.append("KW2835 : 'KW' '2835';\n"); - sb.append("KW2836 : 'KW' '2836';\n"); - sb.append("KW2837 : 'KW' '2837';\n"); - sb.append("KW2838 : 'KW' '2838';\n"); - sb.append("KW2839 : 'KW' '2839';\n"); - sb.append("KW2840 : 'KW' '2840';\n"); - sb.append("KW2841 : 'KW' '2841';\n"); - sb.append("KW2842 : 'KW' '2842';\n"); - sb.append("KW2843 : 'KW' '2843';\n"); - sb.append("KW2844 : 'KW' '2844';\n"); - sb.append("KW2845 : 'KW' '2845';\n"); - sb.append("KW2846 : 'KW' '2846';\n"); - sb.append("KW2847 : 'KW' '2847';\n"); - sb.append("KW2848 : 'KW' '2848';\n"); - sb.append("KW2849 : 'KW' '2849';\n"); - sb.append("KW2850 : 'KW' '2850';\n"); - sb.append("KW2851 : 'KW' '2851';\n"); - sb.append("KW2852 : 'KW' '2852';\n"); - sb.append("KW2853 : 'KW' '2853';\n"); - sb.append("KW2854 : 'KW' '2854';\n"); - sb.append("KW2855 : 'KW' '2855';\n"); - sb.append("KW2856 : 'KW' '2856';\n"); - sb.append("KW2857 : 'KW' '2857';\n"); - sb.append("KW2858 : 'KW' '2858';\n"); - sb.append("KW2859 : 'KW' '2859';\n"); - sb.append("KW2860 : 'KW' '2860';\n"); - sb.append("KW2861 : 'KW' '2861';\n"); - sb.append("KW2862 : 'KW' '2862';\n"); - sb.append("KW2863 : 'KW' '2863';\n"); - sb.append("KW2864 : 'KW' '2864';\n"); - sb.append("KW2865 : 'KW' '2865';\n"); - sb.append("KW2866 : 'KW' '2866';\n"); - sb.append("KW2867 : 'KW' '2867';\n"); - sb.append("KW2868 : 'KW' '2868';\n"); - sb.append("KW2869 : 'KW' '2869';\n"); - sb.append("KW2870 : 'KW' '2870';\n"); - sb.append("KW2871 : 'KW' '2871';\n"); - sb.append("KW2872 : 'KW' '2872';\n"); - sb.append("KW2873 : 'KW' '2873';\n"); - sb.append("KW2874 : 'KW' '2874';\n"); - sb.append("KW2875 : 'KW' '2875';\n"); - sb.append("KW2876 : 'KW' '2876';\n"); - sb.append("KW2877 : 'KW' '2877';\n"); - sb.append("KW2878 : 'KW' '2878';\n"); - sb.append("KW2879 : 'KW' '2879';\n"); - sb.append("KW2880 : 'KW' '2880';\n"); - sb.append("KW2881 : 'KW' '2881';\n"); - sb.append("KW2882 : 'KW' '2882';\n"); - sb.append("KW2883 : 'KW' '2883';\n"); - sb.append("KW2884 : 'KW' '2884';\n"); - sb.append("KW2885 : 'KW' '2885';\n"); - sb.append("KW2886 : 'KW' '2886';\n"); - sb.append("KW2887 : 'KW' '2887';\n"); - sb.append("KW2888 : 'KW' '2888';\n"); - sb.append("KW2889 : 'KW' '2889';\n"); - sb.append("KW2890 : 'KW' '2890';\n"); - sb.append("KW2891 : 'KW' '2891';\n"); - sb.append("KW2892 : 'KW' '2892';\n"); - sb.append("KW2893 : 'KW' '2893';\n"); - sb.append("KW2894 : 'KW' '2894';\n"); - sb.append("KW2895 : 'KW' '2895';\n"); - sb.append("KW2896 : 'KW' '2896';\n"); - sb.append("KW2897 : 'KW' '2897';\n"); - sb.append("KW2898 : 'KW' '2898';\n"); - sb.append("KW2899 : 'KW' '2899';\n"); - sb.append("KW2900 : 'KW' '2900';\n"); - sb.append("KW2901 : 'KW' '2901';\n"); - sb.append("KW2902 : 'KW' '2902';\n"); - sb.append("KW2903 : 'KW' '2903';\n"); - sb.append("KW2904 : 'KW' '2904';\n"); - sb.append("KW2905 : 'KW' '2905';\n"); - sb.append("KW2906 : 'KW' '2906';\n"); - sb.append("KW2907 : 'KW' '2907';\n"); - sb.append("KW2908 : 'KW' '2908';\n"); - sb.append("KW2909 : 'KW' '2909';\n"); - sb.append("KW2910 : 'KW' '2910';\n"); - sb.append("KW2911 : 'KW' '2911';\n"); - sb.append("KW2912 : 'KW' '2912';\n"); - sb.append("KW2913 : 'KW' '2913';\n"); - sb.append("KW2914 : 'KW' '2914';\n"); - sb.append("KW2915 : 'KW' '2915';\n"); - sb.append("KW2916 : 'KW' '2916';\n"); - sb.append("KW2917 : 'KW' '2917';\n"); - sb.append("KW2918 : 'KW' '2918';\n"); - sb.append("KW2919 : 'KW' '2919';\n"); - sb.append("KW2920 : 'KW' '2920';\n"); - sb.append("KW2921 : 'KW' '2921';\n"); - sb.append("KW2922 : 'KW' '2922';\n"); - sb.append("KW2923 : 'KW' '2923';\n"); - sb.append("KW2924 : 'KW' '2924';\n"); - sb.append("KW2925 : 'KW' '2925';\n"); - sb.append("KW2926 : 'KW' '2926';\n"); - sb.append("KW2927 : 'KW' '2927';\n"); - sb.append("KW2928 : 'KW' '2928';\n"); - sb.append("KW2929 : 'KW' '2929';\n"); - sb.append("KW2930 : 'KW' '2930';\n"); - sb.append("KW2931 : 'KW' '2931';\n"); - sb.append("KW2932 : 'KW' '2932';\n"); - sb.append("KW2933 : 'KW' '2933';\n"); - sb.append("KW2934 : 'KW' '2934';\n"); - sb.append("KW2935 : 'KW' '2935';\n"); - sb.append("KW2936 : 'KW' '2936';\n"); - sb.append("KW2937 : 'KW' '2937';\n"); - sb.append("KW2938 : 'KW' '2938';\n"); - sb.append("KW2939 : 'KW' '2939';\n"); - sb.append("KW2940 : 'KW' '2940';\n"); - sb.append("KW2941 : 'KW' '2941';\n"); - sb.append("KW2942 : 'KW' '2942';\n"); - sb.append("KW2943 : 'KW' '2943';\n"); - sb.append("KW2944 : 'KW' '2944';\n"); - sb.append("KW2945 : 'KW' '2945';\n"); - sb.append("KW2946 : 'KW' '2946';\n"); - sb.append("KW2947 : 'KW' '2947';\n"); - sb.append("KW2948 : 'KW' '2948';\n"); - sb.append("KW2949 : 'KW' '2949';\n"); - sb.append("KW2950 : 'KW' '2950';\n"); - sb.append("KW2951 : 'KW' '2951';\n"); - sb.append("KW2952 : 'KW' '2952';\n"); - sb.append("KW2953 : 'KW' '2953';\n"); - sb.append("KW2954 : 'KW' '2954';\n"); - sb.append("KW2955 : 'KW' '2955';\n"); - sb.append("KW2956 : 'KW' '2956';\n"); - sb.append("KW2957 : 'KW' '2957';\n"); - sb.append("KW2958 : 'KW' '2958';\n"); - sb.append("KW2959 : 'KW' '2959';\n"); - sb.append("KW2960 : 'KW' '2960';\n"); - sb.append("KW2961 : 'KW' '2961';\n"); - sb.append("KW2962 : 'KW' '2962';\n"); - sb.append("KW2963 : 'KW' '2963';\n"); - sb.append("KW2964 : 'KW' '2964';\n"); - sb.append("KW2965 : 'KW' '2965';\n"); - sb.append("KW2966 : 'KW' '2966';\n"); - sb.append("KW2967 : 'KW' '2967';\n"); - sb.append("KW2968 : 'KW' '2968';\n"); - sb.append("KW2969 : 'KW' '2969';\n"); - sb.append("KW2970 : 'KW' '2970';\n"); - sb.append("KW2971 : 'KW' '2971';\n"); - sb.append("KW2972 : 'KW' '2972';\n"); - sb.append("KW2973 : 'KW' '2973';\n"); - sb.append("KW2974 : 'KW' '2974';\n"); - sb.append("KW2975 : 'KW' '2975';\n"); - sb.append("KW2976 : 'KW' '2976';\n"); - sb.append("KW2977 : 'KW' '2977';\n"); - sb.append("KW2978 : 'KW' '2978';\n"); - sb.append("KW2979 : 'KW' '2979';\n"); - sb.append("KW2980 : 'KW' '2980';\n"); - sb.append("KW2981 : 'KW' '2981';\n"); - sb.append("KW2982 : 'KW' '2982';\n"); - sb.append("KW2983 : 'KW' '2983';\n"); - sb.append("KW2984 : 'KW' '2984';\n"); - sb.append("KW2985 : 'KW' '2985';\n"); - sb.append("KW2986 : 'KW' '2986';\n"); - sb.append("KW2987 : 'KW' '2987';\n"); - sb.append("KW2988 : 'KW' '2988';\n"); - sb.append("KW2989 : 'KW' '2989';\n"); - sb.append("KW2990 : 'KW' '2990';\n"); - sb.append("KW2991 : 'KW' '2991';\n"); - sb.append("KW2992 : 'KW' '2992';\n"); - sb.append("KW2993 : 'KW' '2993';\n"); - sb.append("KW2994 : 'KW' '2994';\n"); - sb.append("KW2995 : 'KW' '2995';\n"); - sb.append("KW2996 : 'KW' '2996';\n"); - sb.append("KW2997 : 'KW' '2997';\n"); - sb.append("KW2998 : 'KW' '2998';\n"); - sb.append("KW2999 : 'KW' '2999';\n"); - sb.append("KW3000 : 'KW' '3000';\n"); - sb.append("KW3001 : 'KW' '3001';\n"); - sb.append("KW3002 : 'KW' '3002';\n"); - sb.append("KW3003 : 'KW' '3003';\n"); - sb.append("KW3004 : 'KW' '3004';\n"); - sb.append("KW3005 : 'KW' '3005';\n"); - sb.append("KW3006 : 'KW' '3006';\n"); - sb.append("KW3007 : 'KW' '3007';\n"); - sb.append("KW3008 : 'KW' '3008';\n"); - sb.append("KW3009 : 'KW' '3009';\n"); - sb.append("KW3010 : 'KW' '3010';\n"); - sb.append("KW3011 : 'KW' '3011';\n"); - sb.append("KW3012 : 'KW' '3012';\n"); - sb.append("KW3013 : 'KW' '3013';\n"); - sb.append("KW3014 : 'KW' '3014';\n"); - sb.append("KW3015 : 'KW' '3015';\n"); - sb.append("KW3016 : 'KW' '3016';\n"); - sb.append("KW3017 : 'KW' '3017';\n"); - sb.append("KW3018 : 'KW' '3018';\n"); - sb.append("KW3019 : 'KW' '3019';\n"); - sb.append("KW3020 : 'KW' '3020';\n"); - sb.append("KW3021 : 'KW' '3021';\n"); - sb.append("KW3022 : 'KW' '3022';\n"); - sb.append("KW3023 : 'KW' '3023';\n"); - sb.append("KW3024 : 'KW' '3024';\n"); - sb.append("KW3025 : 'KW' '3025';\n"); - sb.append("KW3026 : 'KW' '3026';\n"); - sb.append("KW3027 : 'KW' '3027';\n"); - sb.append("KW3028 : 'KW' '3028';\n"); - sb.append("KW3029 : 'KW' '3029';\n"); - sb.append("KW3030 : 'KW' '3030';\n"); - sb.append("KW3031 : 'KW' '3031';\n"); - sb.append("KW3032 : 'KW' '3032';\n"); - sb.append("KW3033 : 'KW' '3033';\n"); - sb.append("KW3034 : 'KW' '3034';\n"); - sb.append("KW3035 : 'KW' '3035';\n"); - sb.append("KW3036 : 'KW' '3036';\n"); - sb.append("KW3037 : 'KW' '3037';\n"); - sb.append("KW3038 : 'KW' '3038';\n"); - sb.append("KW3039 : 'KW' '3039';\n"); - sb.append("KW3040 : 'KW' '3040';\n"); - sb.append("KW3041 : 'KW' '3041';\n"); - sb.append("KW3042 : 'KW' '3042';\n"); - sb.append("KW3043 : 'KW' '3043';\n"); - sb.append("KW3044 : 'KW' '3044';\n"); - sb.append("KW3045 : 'KW' '3045';\n"); - sb.append("KW3046 : 'KW' '3046';\n"); - sb.append("KW3047 : 'KW' '3047';\n"); - sb.append("KW3048 : 'KW' '3048';\n"); - sb.append("KW3049 : 'KW' '3049';\n"); - sb.append("KW3050 : 'KW' '3050';\n"); - sb.append("KW3051 : 'KW' '3051';\n"); - sb.append("KW3052 : 'KW' '3052';\n"); - sb.append("KW3053 : 'KW' '3053';\n"); - sb.append("KW3054 : 'KW' '3054';\n"); - sb.append("KW3055 : 'KW' '3055';\n"); - sb.append("KW3056 : 'KW' '3056';\n"); - sb.append("KW3057 : 'KW' '3057';\n"); - sb.append("KW3058 : 'KW' '3058';\n"); - sb.append("KW3059 : 'KW' '3059';\n"); - sb.append("KW3060 : 'KW' '3060';\n"); - sb.append("KW3061 : 'KW' '3061';\n"); - sb.append("KW3062 : 'KW' '3062';\n"); - sb.append("KW3063 : 'KW' '3063';\n"); - sb.append("KW3064 : 'KW' '3064';\n"); - sb.append("KW3065 : 'KW' '3065';\n"); - sb.append("KW3066 : 'KW' '3066';\n"); - sb.append("KW3067 : 'KW' '3067';\n"); - sb.append("KW3068 : 'KW' '3068';\n"); - sb.append("KW3069 : 'KW' '3069';\n"); - sb.append("KW3070 : 'KW' '3070';\n"); - sb.append("KW3071 : 'KW' '3071';\n"); - sb.append("KW3072 : 'KW' '3072';\n"); - sb.append("KW3073 : 'KW' '3073';\n"); - sb.append("KW3074 : 'KW' '3074';\n"); - sb.append("KW3075 : 'KW' '3075';\n"); - sb.append("KW3076 : 'KW' '3076';\n"); - sb.append("KW3077 : 'KW' '3077';\n"); - sb.append("KW3078 : 'KW' '3078';\n"); - sb.append("KW3079 : 'KW' '3079';\n"); - sb.append("KW3080 : 'KW' '3080';\n"); - sb.append("KW3081 : 'KW' '3081';\n"); - sb.append("KW3082 : 'KW' '3082';\n"); - sb.append("KW3083 : 'KW' '3083';\n"); - sb.append("KW3084 : 'KW' '3084';\n"); - sb.append("KW3085 : 'KW' '3085';\n"); - sb.append("KW3086 : 'KW' '3086';\n"); - sb.append("KW3087 : 'KW' '3087';\n"); - sb.append("KW3088 : 'KW' '3088';\n"); - sb.append("KW3089 : 'KW' '3089';\n"); - sb.append("KW3090 : 'KW' '3090';\n"); - sb.append("KW3091 : 'KW' '3091';\n"); - sb.append("KW3092 : 'KW' '3092';\n"); - sb.append("KW3093 : 'KW' '3093';\n"); - sb.append("KW3094 : 'KW' '3094';\n"); - sb.append("KW3095 : 'KW' '3095';\n"); - sb.append("KW3096 : 'KW' '3096';\n"); - sb.append("KW3097 : 'KW' '3097';\n"); - sb.append("KW3098 : 'KW' '3098';\n"); - sb.append("KW3099 : 'KW' '3099';\n"); - sb.append("KW3100 : 'KW' '3100';\n"); - sb.append("KW3101 : 'KW' '3101';\n"); - sb.append("KW3102 : 'KW' '3102';\n"); - sb.append("KW3103 : 'KW' '3103';\n"); - sb.append("KW3104 : 'KW' '3104';\n"); - sb.append("KW3105 : 'KW' '3105';\n"); - sb.append("KW3106 : 'KW' '3106';\n"); - sb.append("KW3107 : 'KW' '3107';\n"); - sb.append("KW3108 : 'KW' '3108';\n"); - sb.append("KW3109 : 'KW' '3109';\n"); - sb.append("KW3110 : 'KW' '3110';\n"); - sb.append("KW3111 : 'KW' '3111';\n"); - sb.append("KW3112 : 'KW' '3112';\n"); - sb.append("KW3113 : 'KW' '3113';\n"); - sb.append("KW3114 : 'KW' '3114';\n"); - sb.append("KW3115 : 'KW' '3115';\n"); - sb.append("KW3116 : 'KW' '3116';\n"); - sb.append("KW3117 : 'KW' '3117';\n"); - sb.append("KW3118 : 'KW' '3118';\n"); - sb.append("KW3119 : 'KW' '3119';\n"); - sb.append("KW3120 : 'KW' '3120';\n"); - sb.append("KW3121 : 'KW' '3121';\n"); - sb.append("KW3122 : 'KW' '3122';\n"); - sb.append("KW3123 : 'KW' '3123';\n"); - sb.append("KW3124 : 'KW' '3124';\n"); - sb.append("KW3125 : 'KW' '3125';\n"); - sb.append("KW3126 : 'KW' '3126';\n"); - sb.append("KW3127 : 'KW' '3127';\n"); - sb.append("KW3128 : 'KW' '3128';\n"); - sb.append("KW3129 : 'KW' '3129';\n"); - sb.append("KW3130 : 'KW' '3130';\n"); - sb.append("KW3131 : 'KW' '3131';\n"); - sb.append("KW3132 : 'KW' '3132';\n"); - sb.append("KW3133 : 'KW' '3133';\n"); - sb.append("KW3134 : 'KW' '3134';\n"); - sb.append("KW3135 : 'KW' '3135';\n"); - sb.append("KW3136 : 'KW' '3136';\n"); - sb.append("KW3137 : 'KW' '3137';\n"); - sb.append("KW3138 : 'KW' '3138';\n"); - sb.append("KW3139 : 'KW' '3139';\n"); - sb.append("KW3140 : 'KW' '3140';\n"); - sb.append("KW3141 : 'KW' '3141';\n"); - sb.append("KW3142 : 'KW' '3142';\n"); - sb.append("KW3143 : 'KW' '3143';\n"); - sb.append("KW3144 : 'KW' '3144';\n"); - sb.append("KW3145 : 'KW' '3145';\n"); - sb.append("KW3146 : 'KW' '3146';\n"); - sb.append("KW3147 : 'KW' '3147';\n"); - sb.append("KW3148 : 'KW' '3148';\n"); - sb.append("KW3149 : 'KW' '3149';\n"); - sb.append("KW3150 : 'KW' '3150';\n"); - sb.append("KW3151 : 'KW' '3151';\n"); - sb.append("KW3152 : 'KW' '3152';\n"); - sb.append("KW3153 : 'KW' '3153';\n"); - sb.append("KW3154 : 'KW' '3154';\n"); - sb.append("KW3155 : 'KW' '3155';\n"); - sb.append("KW3156 : 'KW' '3156';\n"); - sb.append("KW3157 : 'KW' '3157';\n"); - sb.append("KW3158 : 'KW' '3158';\n"); - sb.append("KW3159 : 'KW' '3159';\n"); - sb.append("KW3160 : 'KW' '3160';\n"); - sb.append("KW3161 : 'KW' '3161';\n"); - sb.append("KW3162 : 'KW' '3162';\n"); - sb.append("KW3163 : 'KW' '3163';\n"); - sb.append("KW3164 : 'KW' '3164';\n"); - sb.append("KW3165 : 'KW' '3165';\n"); - sb.append("KW3166 : 'KW' '3166';\n"); - sb.append("KW3167 : 'KW' '3167';\n"); - sb.append("KW3168 : 'KW' '3168';\n"); - sb.append("KW3169 : 'KW' '3169';\n"); - sb.append("KW3170 : 'KW' '3170';\n"); - sb.append("KW3171 : 'KW' '3171';\n"); - sb.append("KW3172 : 'KW' '3172';\n"); - sb.append("KW3173 : 'KW' '3173';\n"); - sb.append("KW3174 : 'KW' '3174';\n"); - sb.append("KW3175 : 'KW' '3175';\n"); - sb.append("KW3176 : 'KW' '3176';\n"); - sb.append("KW3177 : 'KW' '3177';\n"); - sb.append("KW3178 : 'KW' '3178';\n"); - sb.append("KW3179 : 'KW' '3179';\n"); - sb.append("KW3180 : 'KW' '3180';\n"); - sb.append("KW3181 : 'KW' '3181';\n"); - sb.append("KW3182 : 'KW' '3182';\n"); - sb.append("KW3183 : 'KW' '3183';\n"); - sb.append("KW3184 : 'KW' '3184';\n"); - sb.append("KW3185 : 'KW' '3185';\n"); - sb.append("KW3186 : 'KW' '3186';\n"); - sb.append("KW3187 : 'KW' '3187';\n"); - sb.append("KW3188 : 'KW' '3188';\n"); - sb.append("KW3189 : 'KW' '3189';\n"); - sb.append("KW3190 : 'KW' '3190';\n"); - sb.append("KW3191 : 'KW' '3191';\n"); - sb.append("KW3192 : 'KW' '3192';\n"); - sb.append("KW3193 : 'KW' '3193';\n"); - sb.append("KW3194 : 'KW' '3194';\n"); - sb.append("KW3195 : 'KW' '3195';\n"); - sb.append("KW3196 : 'KW' '3196';\n"); - sb.append("KW3197 : 'KW' '3197';\n"); - sb.append("KW3198 : 'KW' '3198';\n"); - sb.append("KW3199 : 'KW' '3199';\n"); - sb.append("KW3200 : 'KW' '3200';\n"); - sb.append("KW3201 : 'KW' '3201';\n"); - sb.append("KW3202 : 'KW' '3202';\n"); - sb.append("KW3203 : 'KW' '3203';\n"); - sb.append("KW3204 : 'KW' '3204';\n"); - sb.append("KW3205 : 'KW' '3205';\n"); - sb.append("KW3206 : 'KW' '3206';\n"); - sb.append("KW3207 : 'KW' '3207';\n"); - sb.append("KW3208 : 'KW' '3208';\n"); - sb.append("KW3209 : 'KW' '3209';\n"); - sb.append("KW3210 : 'KW' '3210';\n"); - sb.append("KW3211 : 'KW' '3211';\n"); - sb.append("KW3212 : 'KW' '3212';\n"); - sb.append("KW3213 : 'KW' '3213';\n"); - sb.append("KW3214 : 'KW' '3214';\n"); - sb.append("KW3215 : 'KW' '3215';\n"); - sb.append("KW3216 : 'KW' '3216';\n"); - sb.append("KW3217 : 'KW' '3217';\n"); - sb.append("KW3218 : 'KW' '3218';\n"); - sb.append("KW3219 : 'KW' '3219';\n"); - sb.append("KW3220 : 'KW' '3220';\n"); - sb.append("KW3221 : 'KW' '3221';\n"); - sb.append("KW3222 : 'KW' '3222';\n"); - sb.append("KW3223 : 'KW' '3223';\n"); - sb.append("KW3224 : 'KW' '3224';\n"); - sb.append("KW3225 : 'KW' '3225';\n"); - sb.append("KW3226 : 'KW' '3226';\n"); - sb.append("KW3227 : 'KW' '3227';\n"); - sb.append("KW3228 : 'KW' '3228';\n"); - sb.append("KW3229 : 'KW' '3229';\n"); - sb.append("KW3230 : 'KW' '3230';\n"); - sb.append("KW3231 : 'KW' '3231';\n"); - sb.append("KW3232 : 'KW' '3232';\n"); - sb.append("KW3233 : 'KW' '3233';\n"); - sb.append("KW3234 : 'KW' '3234';\n"); - sb.append("KW3235 : 'KW' '3235';\n"); - sb.append("KW3236 : 'KW' '3236';\n"); - sb.append("KW3237 : 'KW' '3237';\n"); - sb.append("KW3238 : 'KW' '3238';\n"); - sb.append("KW3239 : 'KW' '3239';\n"); - sb.append("KW3240 : 'KW' '3240';\n"); - sb.append("KW3241 : 'KW' '3241';\n"); - sb.append("KW3242 : 'KW' '3242';\n"); - sb.append("KW3243 : 'KW' '3243';\n"); - sb.append("KW3244 : 'KW' '3244';\n"); - sb.append("KW3245 : 'KW' '3245';\n"); - sb.append("KW3246 : 'KW' '3246';\n"); - sb.append("KW3247 : 'KW' '3247';\n"); - sb.append("KW3248 : 'KW' '3248';\n"); - sb.append("KW3249 : 'KW' '3249';\n"); - sb.append("KW3250 : 'KW' '3250';\n"); - sb.append("KW3251 : 'KW' '3251';\n"); - sb.append("KW3252 : 'KW' '3252';\n"); - sb.append("KW3253 : 'KW' '3253';\n"); - sb.append("KW3254 : 'KW' '3254';\n"); - sb.append("KW3255 : 'KW' '3255';\n"); - sb.append("KW3256 : 'KW' '3256';\n"); - sb.append("KW3257 : 'KW' '3257';\n"); - sb.append("KW3258 : 'KW' '3258';\n"); - sb.append("KW3259 : 'KW' '3259';\n"); - sb.append("KW3260 : 'KW' '3260';\n"); - sb.append("KW3261 : 'KW' '3261';\n"); - sb.append("KW3262 : 'KW' '3262';\n"); - sb.append("KW3263 : 'KW' '3263';\n"); - sb.append("KW3264 : 'KW' '3264';\n"); - sb.append("KW3265 : 'KW' '3265';\n"); - sb.append("KW3266 : 'KW' '3266';\n"); - sb.append("KW3267 : 'KW' '3267';\n"); - sb.append("KW3268 : 'KW' '3268';\n"); - sb.append("KW3269 : 'KW' '3269';\n"); - sb.append("KW3270 : 'KW' '3270';\n"); - sb.append("KW3271 : 'KW' '3271';\n"); - sb.append("KW3272 : 'KW' '3272';\n"); - sb.append("KW3273 : 'KW' '3273';\n"); - sb.append("KW3274 : 'KW' '3274';\n"); - sb.append("KW3275 : 'KW' '3275';\n"); - sb.append("KW3276 : 'KW' '3276';\n"); - sb.append("KW3277 : 'KW' '3277';\n"); - sb.append("KW3278 : 'KW' '3278';\n"); - sb.append("KW3279 : 'KW' '3279';\n"); - sb.append("KW3280 : 'KW' '3280';\n"); - sb.append("KW3281 : 'KW' '3281';\n"); - sb.append("KW3282 : 'KW' '3282';\n"); - sb.append("KW3283 : 'KW' '3283';\n"); - sb.append("KW3284 : 'KW' '3284';\n"); - sb.append("KW3285 : 'KW' '3285';\n"); - sb.append("KW3286 : 'KW' '3286';\n"); - sb.append("KW3287 : 'KW' '3287';\n"); - sb.append("KW3288 : 'KW' '3288';\n"); - sb.append("KW3289 : 'KW' '3289';\n"); - sb.append("KW3290 : 'KW' '3290';\n"); - sb.append("KW3291 : 'KW' '3291';\n"); - sb.append("KW3292 : 'KW' '3292';\n"); - sb.append("KW3293 : 'KW' '3293';\n"); - sb.append("KW3294 : 'KW' '3294';\n"); - sb.append("KW3295 : 'KW' '3295';\n"); - sb.append("KW3296 : 'KW' '3296';\n"); - sb.append("KW3297 : 'KW' '3297';\n"); - sb.append("KW3298 : 'KW' '3298';\n"); - sb.append("KW3299 : 'KW' '3299';\n"); - sb.append("KW3300 : 'KW' '3300';\n"); - sb.append("KW3301 : 'KW' '3301';\n"); - sb.append("KW3302 : 'KW' '3302';\n"); - sb.append("KW3303 : 'KW' '3303';\n"); - sb.append("KW3304 : 'KW' '3304';\n"); - sb.append("KW3305 : 'KW' '3305';\n"); - sb.append("KW3306 : 'KW' '3306';\n"); - sb.append("KW3307 : 'KW' '3307';\n"); - sb.append("KW3308 : 'KW' '3308';\n"); - sb.append("KW3309 : 'KW' '3309';\n"); - sb.append("KW3310 : 'KW' '3310';\n"); - sb.append("KW3311 : 'KW' '3311';\n"); - sb.append("KW3312 : 'KW' '3312';\n"); - sb.append("KW3313 : 'KW' '3313';\n"); - sb.append("KW3314 : 'KW' '3314';\n"); - sb.append("KW3315 : 'KW' '3315';\n"); - sb.append("KW3316 : 'KW' '3316';\n"); - sb.append("KW3317 : 'KW' '3317';\n"); - sb.append("KW3318 : 'KW' '3318';\n"); - sb.append("KW3319 : 'KW' '3319';\n"); - sb.append("KW3320 : 'KW' '3320';\n"); - sb.append("KW3321 : 'KW' '3321';\n"); - sb.append("KW3322 : 'KW' '3322';\n"); - sb.append("KW3323 : 'KW' '3323';\n"); - sb.append("KW3324 : 'KW' '3324';\n"); - sb.append("KW3325 : 'KW' '3325';\n"); - sb.append("KW3326 : 'KW' '3326';\n"); - sb.append("KW3327 : 'KW' '3327';\n"); - sb.append("KW3328 : 'KW' '3328';\n"); - sb.append("KW3329 : 'KW' '3329';\n"); - sb.append("KW3330 : 'KW' '3330';\n"); - sb.append("KW3331 : 'KW' '3331';\n"); - sb.append("KW3332 : 'KW' '3332';\n"); - sb.append("KW3333 : 'KW' '3333';\n"); - sb.append("KW3334 : 'KW' '3334';\n"); - sb.append("KW3335 : 'KW' '3335';\n"); - sb.append("KW3336 : 'KW' '3336';\n"); - sb.append("KW3337 : 'KW' '3337';\n"); - sb.append("KW3338 : 'KW' '3338';\n"); - sb.append("KW3339 : 'KW' '3339';\n"); - sb.append("KW3340 : 'KW' '3340';\n"); - sb.append("KW3341 : 'KW' '3341';\n"); - sb.append("KW3342 : 'KW' '3342';\n"); - sb.append("KW3343 : 'KW' '3343';\n"); - sb.append("KW3344 : 'KW' '3344';\n"); - sb.append("KW3345 : 'KW' '3345';\n"); - sb.append("KW3346 : 'KW' '3346';\n"); - sb.append("KW3347 : 'KW' '3347';\n"); - sb.append("KW3348 : 'KW' '3348';\n"); - sb.append("KW3349 : 'KW' '3349';\n"); - sb.append("KW3350 : 'KW' '3350';\n"); - sb.append("KW3351 : 'KW' '3351';\n"); - sb.append("KW3352 : 'KW' '3352';\n"); - sb.append("KW3353 : 'KW' '3353';\n"); - sb.append("KW3354 : 'KW' '3354';\n"); - sb.append("KW3355 : 'KW' '3355';\n"); - sb.append("KW3356 : 'KW' '3356';\n"); - sb.append("KW3357 : 'KW' '3357';\n"); - sb.append("KW3358 : 'KW' '3358';\n"); - sb.append("KW3359 : 'KW' '3359';\n"); - sb.append("KW3360 : 'KW' '3360';\n"); - sb.append("KW3361 : 'KW' '3361';\n"); - sb.append("KW3362 : 'KW' '3362';\n"); - sb.append("KW3363 : 'KW' '3363';\n"); - sb.append("KW3364 : 'KW' '3364';\n"); - sb.append("KW3365 : 'KW' '3365';\n"); - sb.append("KW3366 : 'KW' '3366';\n"); - sb.append("KW3367 : 'KW' '3367';\n"); - sb.append("KW3368 : 'KW' '3368';\n"); - sb.append("KW3369 : 'KW' '3369';\n"); - sb.append("KW3370 : 'KW' '3370';\n"); - sb.append("KW3371 : 'KW' '3371';\n"); - sb.append("KW3372 : 'KW' '3372';\n"); - sb.append("KW3373 : 'KW' '3373';\n"); - sb.append("KW3374 : 'KW' '3374';\n"); - sb.append("KW3375 : 'KW' '3375';\n"); - sb.append("KW3376 : 'KW' '3376';\n"); - sb.append("KW3377 : 'KW' '3377';\n"); - sb.append("KW3378 : 'KW' '3378';\n"); - sb.append("KW3379 : 'KW' '3379';\n"); - sb.append("KW3380 : 'KW' '3380';\n"); - sb.append("KW3381 : 'KW' '3381';\n"); - sb.append("KW3382 : 'KW' '3382';\n"); - sb.append("KW3383 : 'KW' '3383';\n"); - sb.append("KW3384 : 'KW' '3384';\n"); - sb.append("KW3385 : 'KW' '3385';\n"); - sb.append("KW3386 : 'KW' '3386';\n"); - sb.append("KW3387 : 'KW' '3387';\n"); - sb.append("KW3388 : 'KW' '3388';\n"); - sb.append("KW3389 : 'KW' '3389';\n"); - sb.append("KW3390 : 'KW' '3390';\n"); - sb.append("KW3391 : 'KW' '3391';\n"); - sb.append("KW3392 : 'KW' '3392';\n"); - sb.append("KW3393 : 'KW' '3393';\n"); - sb.append("KW3394 : 'KW' '3394';\n"); - sb.append("KW3395 : 'KW' '3395';\n"); - sb.append("KW3396 : 'KW' '3396';\n"); - sb.append("KW3397 : 'KW' '3397';\n"); - sb.append("KW3398 : 'KW' '3398';\n"); - sb.append("KW3399 : 'KW' '3399';\n"); - sb.append("KW3400 : 'KW' '3400';\n"); - sb.append("KW3401 : 'KW' '3401';\n"); - sb.append("KW3402 : 'KW' '3402';\n"); - sb.append("KW3403 : 'KW' '3403';\n"); - sb.append("KW3404 : 'KW' '3404';\n"); - sb.append("KW3405 : 'KW' '3405';\n"); - sb.append("KW3406 : 'KW' '3406';\n"); - sb.append("KW3407 : 'KW' '3407';\n"); - sb.append("KW3408 : 'KW' '3408';\n"); - sb.append("KW3409 : 'KW' '3409';\n"); - sb.append("KW3410 : 'KW' '3410';\n"); - sb.append("KW3411 : 'KW' '3411';\n"); - sb.append("KW3412 : 'KW' '3412';\n"); - sb.append("KW3413 : 'KW' '3413';\n"); - sb.append("KW3414 : 'KW' '3414';\n"); - sb.append("KW3415 : 'KW' '3415';\n"); - sb.append("KW3416 : 'KW' '3416';\n"); - sb.append("KW3417 : 'KW' '3417';\n"); - sb.append("KW3418 : 'KW' '3418';\n"); - sb.append("KW3419 : 'KW' '3419';\n"); - sb.append("KW3420 : 'KW' '3420';\n"); - sb.append("KW3421 : 'KW' '3421';\n"); - sb.append("KW3422 : 'KW' '3422';\n"); - sb.append("KW3423 : 'KW' '3423';\n"); - sb.append("KW3424 : 'KW' '3424';\n"); - sb.append("KW3425 : 'KW' '3425';\n"); - sb.append("KW3426 : 'KW' '3426';\n"); - sb.append("KW3427 : 'KW' '3427';\n"); - sb.append("KW3428 : 'KW' '3428';\n"); - sb.append("KW3429 : 'KW' '3429';\n"); - sb.append("KW3430 : 'KW' '3430';\n"); - sb.append("KW3431 : 'KW' '3431';\n"); - sb.append("KW3432 : 'KW' '3432';\n"); - sb.append("KW3433 : 'KW' '3433';\n"); - sb.append("KW3434 : 'KW' '3434';\n"); - sb.append("KW3435 : 'KW' '3435';\n"); - sb.append("KW3436 : 'KW' '3436';\n"); - sb.append("KW3437 : 'KW' '3437';\n"); - sb.append("KW3438 : 'KW' '3438';\n"); - sb.append("KW3439 : 'KW' '3439';\n"); - sb.append("KW3440 : 'KW' '3440';\n"); - sb.append("KW3441 : 'KW' '3441';\n"); - sb.append("KW3442 : 'KW' '3442';\n"); - sb.append("KW3443 : 'KW' '3443';\n"); - sb.append("KW3444 : 'KW' '3444';\n"); - sb.append("KW3445 : 'KW' '3445';\n"); - sb.append("KW3446 : 'KW' '3446';\n"); - sb.append("KW3447 : 'KW' '3447';\n"); - sb.append("KW3448 : 'KW' '3448';\n"); - sb.append("KW3449 : 'KW' '3449';\n"); - sb.append("KW3450 : 'KW' '3450';\n"); - sb.append("KW3451 : 'KW' '3451';\n"); - sb.append("KW3452 : 'KW' '3452';\n"); - sb.append("KW3453 : 'KW' '3453';\n"); - sb.append("KW3454 : 'KW' '3454';\n"); - sb.append("KW3455 : 'KW' '3455';\n"); - sb.append("KW3456 : 'KW' '3456';\n"); - sb.append("KW3457 : 'KW' '3457';\n"); - sb.append("KW3458 : 'KW' '3458';\n"); - sb.append("KW3459 : 'KW' '3459';\n"); - sb.append("KW3460 : 'KW' '3460';\n"); - sb.append("KW3461 : 'KW' '3461';\n"); - sb.append("KW3462 : 'KW' '3462';\n"); - sb.append("KW3463 : 'KW' '3463';\n"); - sb.append("KW3464 : 'KW' '3464';\n"); - sb.append("KW3465 : 'KW' '3465';\n"); - sb.append("KW3466 : 'KW' '3466';\n"); - sb.append("KW3467 : 'KW' '3467';\n"); - sb.append("KW3468 : 'KW' '3468';\n"); - sb.append("KW3469 : 'KW' '3469';\n"); - sb.append("KW3470 : 'KW' '3470';\n"); - sb.append("KW3471 : 'KW' '3471';\n"); - sb.append("KW3472 : 'KW' '3472';\n"); - sb.append("KW3473 : 'KW' '3473';\n"); - sb.append("KW3474 : 'KW' '3474';\n"); - sb.append("KW3475 : 'KW' '3475';\n"); - sb.append("KW3476 : 'KW' '3476';\n"); - sb.append("KW3477 : 'KW' '3477';\n"); - sb.append("KW3478 : 'KW' '3478';\n"); - sb.append("KW3479 : 'KW' '3479';\n"); - sb.append("KW3480 : 'KW' '3480';\n"); - sb.append("KW3481 : 'KW' '3481';\n"); - sb.append("KW3482 : 'KW' '3482';\n"); - sb.append("KW3483 : 'KW' '3483';\n"); - sb.append("KW3484 : 'KW' '3484';\n"); - sb.append("KW3485 : 'KW' '3485';\n"); - sb.append("KW3486 : 'KW' '3486';\n"); - sb.append("KW3487 : 'KW' '3487';\n"); - sb.append("KW3488 : 'KW' '3488';\n"); - sb.append("KW3489 : 'KW' '3489';\n"); - sb.append("KW3490 : 'KW' '3490';\n"); - sb.append("KW3491 : 'KW' '3491';\n"); - sb.append("KW3492 : 'KW' '3492';\n"); - sb.append("KW3493 : 'KW' '3493';\n"); - sb.append("KW3494 : 'KW' '3494';\n"); - sb.append("KW3495 : 'KW' '3495';\n"); - sb.append("KW3496 : 'KW' '3496';\n"); - sb.append("KW3497 : 'KW' '3497';\n"); - sb.append("KW3498 : 'KW' '3498';\n"); - sb.append("KW3499 : 'KW' '3499';\n"); - sb.append("KW3500 : 'KW' '3500';\n"); - sb.append("KW3501 : 'KW' '3501';\n"); - sb.append("KW3502 : 'KW' '3502';\n"); - sb.append("KW3503 : 'KW' '3503';\n"); - sb.append("KW3504 : 'KW' '3504';\n"); - sb.append("KW3505 : 'KW' '3505';\n"); - sb.append("KW3506 : 'KW' '3506';\n"); - sb.append("KW3507 : 'KW' '3507';\n"); - sb.append("KW3508 : 'KW' '3508';\n"); - sb.append("KW3509 : 'KW' '3509';\n"); - sb.append("KW3510 : 'KW' '3510';\n"); - sb.append("KW3511 : 'KW' '3511';\n"); - sb.append("KW3512 : 'KW' '3512';\n"); - sb.append("KW3513 : 'KW' '3513';\n"); - sb.append("KW3514 : 'KW' '3514';\n"); - sb.append("KW3515 : 'KW' '3515';\n"); - sb.append("KW3516 : 'KW' '3516';\n"); - sb.append("KW3517 : 'KW' '3517';\n"); - sb.append("KW3518 : 'KW' '3518';\n"); - sb.append("KW3519 : 'KW' '3519';\n"); - sb.append("KW3520 : 'KW' '3520';\n"); - sb.append("KW3521 : 'KW' '3521';\n"); - sb.append("KW3522 : 'KW' '3522';\n"); - sb.append("KW3523 : 'KW' '3523';\n"); - sb.append("KW3524 : 'KW' '3524';\n"); - sb.append("KW3525 : 'KW' '3525';\n"); - sb.append("KW3526 : 'KW' '3526';\n"); - sb.append("KW3527 : 'KW' '3527';\n"); - sb.append("KW3528 : 'KW' '3528';\n"); - sb.append("KW3529 : 'KW' '3529';\n"); - sb.append("KW3530 : 'KW' '3530';\n"); - sb.append("KW3531 : 'KW' '3531';\n"); - sb.append("KW3532 : 'KW' '3532';\n"); - sb.append("KW3533 : 'KW' '3533';\n"); - sb.append("KW3534 : 'KW' '3534';\n"); - sb.append("KW3535 : 'KW' '3535';\n"); - sb.append("KW3536 : 'KW' '3536';\n"); - sb.append("KW3537 : 'KW' '3537';\n"); - sb.append("KW3538 : 'KW' '3538';\n"); - sb.append("KW3539 : 'KW' '3539';\n"); - sb.append("KW3540 : 'KW' '3540';\n"); - sb.append("KW3541 : 'KW' '3541';\n"); - sb.append("KW3542 : 'KW' '3542';\n"); - sb.append("KW3543 : 'KW' '3543';\n"); - sb.append("KW3544 : 'KW' '3544';\n"); - sb.append("KW3545 : 'KW' '3545';\n"); - sb.append("KW3546 : 'KW' '3546';\n"); - sb.append("KW3547 : 'KW' '3547';\n"); - sb.append("KW3548 : 'KW' '3548';\n"); - sb.append("KW3549 : 'KW' '3549';\n"); - sb.append("KW3550 : 'KW' '3550';\n"); - sb.append("KW3551 : 'KW' '3551';\n"); - sb.append("KW3552 : 'KW' '3552';\n"); - sb.append("KW3553 : 'KW' '3553';\n"); - sb.append("KW3554 : 'KW' '3554';\n"); - sb.append("KW3555 : 'KW' '3555';\n"); - sb.append("KW3556 : 'KW' '3556';\n"); - sb.append("KW3557 : 'KW' '3557';\n"); - sb.append("KW3558 : 'KW' '3558';\n"); - sb.append("KW3559 : 'KW' '3559';\n"); - sb.append("KW3560 : 'KW' '3560';\n"); - sb.append("KW3561 : 'KW' '3561';\n"); - sb.append("KW3562 : 'KW' '3562';\n"); - sb.append("KW3563 : 'KW' '3563';\n"); - sb.append("KW3564 : 'KW' '3564';\n"); - sb.append("KW3565 : 'KW' '3565';\n"); - sb.append("KW3566 : 'KW' '3566';\n"); - sb.append("KW3567 : 'KW' '3567';\n"); - sb.append("KW3568 : 'KW' '3568';\n"); - sb.append("KW3569 : 'KW' '3569';\n"); - sb.append("KW3570 : 'KW' '3570';\n"); - sb.append("KW3571 : 'KW' '3571';\n"); - sb.append("KW3572 : 'KW' '3572';\n"); - sb.append("KW3573 : 'KW' '3573';\n"); - sb.append("KW3574 : 'KW' '3574';\n"); - sb.append("KW3575 : 'KW' '3575';\n"); - sb.append("KW3576 : 'KW' '3576';\n"); - sb.append("KW3577 : 'KW' '3577';\n"); - sb.append("KW3578 : 'KW' '3578';\n"); - sb.append("KW3579 : 'KW' '3579';\n"); - sb.append("KW3580 : 'KW' '3580';\n"); - sb.append("KW3581 : 'KW' '3581';\n"); - sb.append("KW3582 : 'KW' '3582';\n"); - sb.append("KW3583 : 'KW' '3583';\n"); - sb.append("KW3584 : 'KW' '3584';\n"); - sb.append("KW3585 : 'KW' '3585';\n"); - sb.append("KW3586 : 'KW' '3586';\n"); - sb.append("KW3587 : 'KW' '3587';\n"); - sb.append("KW3588 : 'KW' '3588';\n"); - sb.append("KW3589 : 'KW' '3589';\n"); - sb.append("KW3590 : 'KW' '3590';\n"); - sb.append("KW3591 : 'KW' '3591';\n"); - sb.append("KW3592 : 'KW' '3592';\n"); - sb.append("KW3593 : 'KW' '3593';\n"); - sb.append("KW3594 : 'KW' '3594';\n"); - sb.append("KW3595 : 'KW' '3595';\n"); - sb.append("KW3596 : 'KW' '3596';\n"); - sb.append("KW3597 : 'KW' '3597';\n"); - sb.append("KW3598 : 'KW' '3598';\n"); - sb.append("KW3599 : 'KW' '3599';\n"); - sb.append("KW3600 : 'KW' '3600';\n"); - sb.append("KW3601 : 'KW' '3601';\n"); - sb.append("KW3602 : 'KW' '3602';\n"); - sb.append("KW3603 : 'KW' '3603';\n"); - sb.append("KW3604 : 'KW' '3604';\n"); - sb.append("KW3605 : 'KW' '3605';\n"); - sb.append("KW3606 : 'KW' '3606';\n"); - sb.append("KW3607 : 'KW' '3607';\n"); - sb.append("KW3608 : 'KW' '3608';\n"); - sb.append("KW3609 : 'KW' '3609';\n"); - sb.append("KW3610 : 'KW' '3610';\n"); - sb.append("KW3611 : 'KW' '3611';\n"); - sb.append("KW3612 : 'KW' '3612';\n"); - sb.append("KW3613 : 'KW' '3613';\n"); - sb.append("KW3614 : 'KW' '3614';\n"); - sb.append("KW3615 : 'KW' '3615';\n"); - sb.append("KW3616 : 'KW' '3616';\n"); - sb.append("KW3617 : 'KW' '3617';\n"); - sb.append("KW3618 : 'KW' '3618';\n"); - sb.append("KW3619 : 'KW' '3619';\n"); - sb.append("KW3620 : 'KW' '3620';\n"); - sb.append("KW3621 : 'KW' '3621';\n"); - sb.append("KW3622 : 'KW' '3622';\n"); - sb.append("KW3623 : 'KW' '3623';\n"); - sb.append("KW3624 : 'KW' '3624';\n"); - sb.append("KW3625 : 'KW' '3625';\n"); - sb.append("KW3626 : 'KW' '3626';\n"); - sb.append("KW3627 : 'KW' '3627';\n"); - sb.append("KW3628 : 'KW' '3628';\n"); - sb.append("KW3629 : 'KW' '3629';\n"); - sb.append("KW3630 : 'KW' '3630';\n"); - sb.append("KW3631 : 'KW' '3631';\n"); - sb.append("KW3632 : 'KW' '3632';\n"); - sb.append("KW3633 : 'KW' '3633';\n"); - sb.append("KW3634 : 'KW' '3634';\n"); - sb.append("KW3635 : 'KW' '3635';\n"); - sb.append("KW3636 : 'KW' '3636';\n"); - sb.append("KW3637 : 'KW' '3637';\n"); - sb.append("KW3638 : 'KW' '3638';\n"); - sb.append("KW3639 : 'KW' '3639';\n"); - sb.append("KW3640 : 'KW' '3640';\n"); - sb.append("KW3641 : 'KW' '3641';\n"); - sb.append("KW3642 : 'KW' '3642';\n"); - sb.append("KW3643 : 'KW' '3643';\n"); - sb.append("KW3644 : 'KW' '3644';\n"); - sb.append("KW3645 : 'KW' '3645';\n"); - sb.append("KW3646 : 'KW' '3646';\n"); - sb.append("KW3647 : 'KW' '3647';\n"); - sb.append("KW3648 : 'KW' '3648';\n"); - sb.append("KW3649 : 'KW' '3649';\n"); - sb.append("KW3650 : 'KW' '3650';\n"); - sb.append("KW3651 : 'KW' '3651';\n"); - sb.append("KW3652 : 'KW' '3652';\n"); - sb.append("KW3653 : 'KW' '3653';\n"); - sb.append("KW3654 : 'KW' '3654';\n"); - sb.append("KW3655 : 'KW' '3655';\n"); - sb.append("KW3656 : 'KW' '3656';\n"); - sb.append("KW3657 : 'KW' '3657';\n"); - sb.append("KW3658 : 'KW' '3658';\n"); - sb.append("KW3659 : 'KW' '3659';\n"); - sb.append("KW3660 : 'KW' '3660';\n"); - sb.append("KW3661 : 'KW' '3661';\n"); - sb.append("KW3662 : 'KW' '3662';\n"); - sb.append("KW3663 : 'KW' '3663';\n"); - sb.append("KW3664 : 'KW' '3664';\n"); - sb.append("KW3665 : 'KW' '3665';\n"); - sb.append("KW3666 : 'KW' '3666';\n"); - sb.append("KW3667 : 'KW' '3667';\n"); - sb.append("KW3668 : 'KW' '3668';\n"); - sb.append("KW3669 : 'KW' '3669';\n"); - sb.append("KW3670 : 'KW' '3670';\n"); - sb.append("KW3671 : 'KW' '3671';\n"); - sb.append("KW3672 : 'KW' '3672';\n"); - sb.append("KW3673 : 'KW' '3673';\n"); - sb.append("KW3674 : 'KW' '3674';\n"); - sb.append("KW3675 : 'KW' '3675';\n"); - sb.append("KW3676 : 'KW' '3676';\n"); - sb.append("KW3677 : 'KW' '3677';\n"); - sb.append("KW3678 : 'KW' '3678';\n"); - sb.append("KW3679 : 'KW' '3679';\n"); - sb.append("KW3680 : 'KW' '3680';\n"); - sb.append("KW3681 : 'KW' '3681';\n"); - sb.append("KW3682 : 'KW' '3682';\n"); - sb.append("KW3683 : 'KW' '3683';\n"); - sb.append("KW3684 : 'KW' '3684';\n"); - sb.append("KW3685 : 'KW' '3685';\n"); - sb.append("KW3686 : 'KW' '3686';\n"); - sb.append("KW3687 : 'KW' '3687';\n"); - sb.append("KW3688 : 'KW' '3688';\n"); - sb.append("KW3689 : 'KW' '3689';\n"); - sb.append("KW3690 : 'KW' '3690';\n"); - sb.append("KW3691 : 'KW' '3691';\n"); - sb.append("KW3692 : 'KW' '3692';\n"); - sb.append("KW3693 : 'KW' '3693';\n"); - sb.append("KW3694 : 'KW' '3694';\n"); - sb.append("KW3695 : 'KW' '3695';\n"); - sb.append("KW3696 : 'KW' '3696';\n"); - sb.append("KW3697 : 'KW' '3697';\n"); - sb.append("KW3698 : 'KW' '3698';\n"); - sb.append("KW3699 : 'KW' '3699';\n"); - sb.append("KW3700 : 'KW' '3700';\n"); - sb.append("KW3701 : 'KW' '3701';\n"); - sb.append("KW3702 : 'KW' '3702';\n"); - sb.append("KW3703 : 'KW' '3703';\n"); - sb.append("KW3704 : 'KW' '3704';\n"); - sb.append("KW3705 : 'KW' '3705';\n"); - sb.append("KW3706 : 'KW' '3706';\n"); - sb.append("KW3707 : 'KW' '3707';\n"); - sb.append("KW3708 : 'KW' '3708';\n"); - sb.append("KW3709 : 'KW' '3709';\n"); - sb.append("KW3710 : 'KW' '3710';\n"); - sb.append("KW3711 : 'KW' '3711';\n"); - sb.append("KW3712 : 'KW' '3712';\n"); - sb.append("KW3713 : 'KW' '3713';\n"); - sb.append("KW3714 : 'KW' '3714';\n"); - sb.append("KW3715 : 'KW' '3715';\n"); - sb.append("KW3716 : 'KW' '3716';\n"); - sb.append("KW3717 : 'KW' '3717';\n"); - sb.append("KW3718 : 'KW' '3718';\n"); - sb.append("KW3719 : 'KW' '3719';\n"); - sb.append("KW3720 : 'KW' '3720';\n"); - sb.append("KW3721 : 'KW' '3721';\n"); - sb.append("KW3722 : 'KW' '3722';\n"); - sb.append("KW3723 : 'KW' '3723';\n"); - sb.append("KW3724 : 'KW' '3724';\n"); - sb.append("KW3725 : 'KW' '3725';\n"); - sb.append("KW3726 : 'KW' '3726';\n"); - sb.append("KW3727 : 'KW' '3727';\n"); - sb.append("KW3728 : 'KW' '3728';\n"); - sb.append("KW3729 : 'KW' '3729';\n"); - sb.append("KW3730 : 'KW' '3730';\n"); - sb.append("KW3731 : 'KW' '3731';\n"); - sb.append("KW3732 : 'KW' '3732';\n"); - sb.append("KW3733 : 'KW' '3733';\n"); - sb.append("KW3734 : 'KW' '3734';\n"); - sb.append("KW3735 : 'KW' '3735';\n"); - sb.append("KW3736 : 'KW' '3736';\n"); - sb.append("KW3737 : 'KW' '3737';\n"); - sb.append("KW3738 : 'KW' '3738';\n"); - sb.append("KW3739 : 'KW' '3739';\n"); - sb.append("KW3740 : 'KW' '3740';\n"); - sb.append("KW3741 : 'KW' '3741';\n"); - sb.append("KW3742 : 'KW' '3742';\n"); - sb.append("KW3743 : 'KW' '3743';\n"); - sb.append("KW3744 : 'KW' '3744';\n"); - sb.append("KW3745 : 'KW' '3745';\n"); - sb.append("KW3746 : 'KW' '3746';\n"); - sb.append("KW3747 : 'KW' '3747';\n"); - sb.append("KW3748 : 'KW' '3748';\n"); - sb.append("KW3749 : 'KW' '3749';\n"); - sb.append("KW3750 : 'KW' '3750';\n"); - sb.append("KW3751 : 'KW' '3751';\n"); - sb.append("KW3752 : 'KW' '3752';\n"); - sb.append("KW3753 : 'KW' '3753';\n"); - sb.append("KW3754 : 'KW' '3754';\n"); - sb.append("KW3755 : 'KW' '3755';\n"); - sb.append("KW3756 : 'KW' '3756';\n"); - sb.append("KW3757 : 'KW' '3757';\n"); - sb.append("KW3758 : 'KW' '3758';\n"); - sb.append("KW3759 : 'KW' '3759';\n"); - sb.append("KW3760 : 'KW' '3760';\n"); - sb.append("KW3761 : 'KW' '3761';\n"); - sb.append("KW3762 : 'KW' '3762';\n"); - sb.append("KW3763 : 'KW' '3763';\n"); - sb.append("KW3764 : 'KW' '3764';\n"); - sb.append("KW3765 : 'KW' '3765';\n"); - sb.append("KW3766 : 'KW' '3766';\n"); - sb.append("KW3767 : 'KW' '3767';\n"); - sb.append("KW3768 : 'KW' '3768';\n"); - sb.append("KW3769 : 'KW' '3769';\n"); - sb.append("KW3770 : 'KW' '3770';\n"); - sb.append("KW3771 : 'KW' '3771';\n"); - sb.append("KW3772 : 'KW' '3772';\n"); - sb.append("KW3773 : 'KW' '3773';\n"); - sb.append("KW3774 : 'KW' '3774';\n"); - sb.append("KW3775 : 'KW' '3775';\n"); - sb.append("KW3776 : 'KW' '3776';\n"); - sb.append("KW3777 : 'KW' '3777';\n"); - sb.append("KW3778 : 'KW' '3778';\n"); - sb.append("KW3779 : 'KW' '3779';\n"); - sb.append("KW3780 : 'KW' '3780';\n"); - sb.append("KW3781 : 'KW' '3781';\n"); - sb.append("KW3782 : 'KW' '3782';\n"); - sb.append("KW3783 : 'KW' '3783';\n"); - sb.append("KW3784 : 'KW' '3784';\n"); - sb.append("KW3785 : 'KW' '3785';\n"); - sb.append("KW3786 : 'KW' '3786';\n"); - sb.append("KW3787 : 'KW' '3787';\n"); - sb.append("KW3788 : 'KW' '3788';\n"); - sb.append("KW3789 : 'KW' '3789';\n"); - sb.append("KW3790 : 'KW' '3790';\n"); - sb.append("KW3791 : 'KW' '3791';\n"); - sb.append("KW3792 : 'KW' '3792';\n"); - sb.append("KW3793 : 'KW' '3793';\n"); - sb.append("KW3794 : 'KW' '3794';\n"); - sb.append("KW3795 : 'KW' '3795';\n"); - sb.append("KW3796 : 'KW' '3796';\n"); - sb.append("KW3797 : 'KW' '3797';\n"); - sb.append("KW3798 : 'KW' '3798';\n"); - sb.append("KW3799 : 'KW' '3799';\n"); - sb.append("KW3800 : 'KW' '3800';\n"); - sb.append("KW3801 : 'KW' '3801';\n"); - sb.append("KW3802 : 'KW' '3802';\n"); - sb.append("KW3803 : 'KW' '3803';\n"); - sb.append("KW3804 : 'KW' '3804';\n"); - sb.append("KW3805 : 'KW' '3805';\n"); - sb.append("KW3806 : 'KW' '3806';\n"); - sb.append("KW3807 : 'KW' '3807';\n"); - sb.append("KW3808 : 'KW' '3808';\n"); - sb.append("KW3809 : 'KW' '3809';\n"); - sb.append("KW3810 : 'KW' '3810';\n"); - sb.append("KW3811 : 'KW' '3811';\n"); - sb.append("KW3812 : 'KW' '3812';\n"); - sb.append("KW3813 : 'KW' '3813';\n"); - sb.append("KW3814 : 'KW' '3814';\n"); - sb.append("KW3815 : 'KW' '3815';\n"); - sb.append("KW3816 : 'KW' '3816';\n"); - sb.append("KW3817 : 'KW' '3817';\n"); - sb.append("KW3818 : 'KW' '3818';\n"); - sb.append("KW3819 : 'KW' '3819';\n"); - sb.append("KW3820 : 'KW' '3820';\n"); - sb.append("KW3821 : 'KW' '3821';\n"); - sb.append("KW3822 : 'KW' '3822';\n"); - sb.append("KW3823 : 'KW' '3823';\n"); - sb.append("KW3824 : 'KW' '3824';\n"); - sb.append("KW3825 : 'KW' '3825';\n"); - sb.append("KW3826 : 'KW' '3826';\n"); - sb.append("KW3827 : 'KW' '3827';\n"); - sb.append("KW3828 : 'KW' '3828';\n"); - sb.append("KW3829 : 'KW' '3829';\n"); - sb.append("KW3830 : 'KW' '3830';\n"); - sb.append("KW3831 : 'KW' '3831';\n"); - sb.append("KW3832 : 'KW' '3832';\n"); - sb.append("KW3833 : 'KW' '3833';\n"); - sb.append("KW3834 : 'KW' '3834';\n"); - sb.append("KW3835 : 'KW' '3835';\n"); - sb.append("KW3836 : 'KW' '3836';\n"); - sb.append("KW3837 : 'KW' '3837';\n"); - sb.append("KW3838 : 'KW' '3838';\n"); - sb.append("KW3839 : 'KW' '3839';\n"); - sb.append("KW3840 : 'KW' '3840';\n"); - sb.append("KW3841 : 'KW' '3841';\n"); - sb.append("KW3842 : 'KW' '3842';\n"); - sb.append("KW3843 : 'KW' '3843';\n"); - sb.append("KW3844 : 'KW' '3844';\n"); - sb.append("KW3845 : 'KW' '3845';\n"); - sb.append("KW3846 : 'KW' '3846';\n"); - sb.append("KW3847 : 'KW' '3847';\n"); - sb.append("KW3848 : 'KW' '3848';\n"); - sb.append("KW3849 : 'KW' '3849';\n"); - sb.append("KW3850 : 'KW' '3850';\n"); - sb.append("KW3851 : 'KW' '3851';\n"); - sb.append("KW3852 : 'KW' '3852';\n"); - sb.append("KW3853 : 'KW' '3853';\n"); - sb.append("KW3854 : 'KW' '3854';\n"); - sb.append("KW3855 : 'KW' '3855';\n"); - sb.append("KW3856 : 'KW' '3856';\n"); - sb.append("KW3857 : 'KW' '3857';\n"); - sb.append("KW3858 : 'KW' '3858';\n"); - sb.append("KW3859 : 'KW' '3859';\n"); - sb.append("KW3860 : 'KW' '3860';\n"); - sb.append("KW3861 : 'KW' '3861';\n"); - sb.append("KW3862 : 'KW' '3862';\n"); - sb.append("KW3863 : 'KW' '3863';\n"); - sb.append("KW3864 : 'KW' '3864';\n"); - sb.append("KW3865 : 'KW' '3865';\n"); - sb.append("KW3866 : 'KW' '3866';\n"); - sb.append("KW3867 : 'KW' '3867';\n"); - sb.append("KW3868 : 'KW' '3868';\n"); - sb.append("KW3869 : 'KW' '3869';\n"); - sb.append("KW3870 : 'KW' '3870';\n"); - sb.append("KW3871 : 'KW' '3871';\n"); - sb.append("KW3872 : 'KW' '3872';\n"); - sb.append("KW3873 : 'KW' '3873';\n"); - sb.append("KW3874 : 'KW' '3874';\n"); - sb.append("KW3875 : 'KW' '3875';\n"); - sb.append("KW3876 : 'KW' '3876';\n"); - sb.append("KW3877 : 'KW' '3877';\n"); - sb.append("KW3878 : 'KW' '3878';\n"); - sb.append("KW3879 : 'KW' '3879';\n"); - sb.append("KW3880 : 'KW' '3880';\n"); - sb.append("KW3881 : 'KW' '3881';\n"); - sb.append("KW3882 : 'KW' '3882';\n"); - sb.append("KW3883 : 'KW' '3883';\n"); - sb.append("KW3884 : 'KW' '3884';\n"); - sb.append("KW3885 : 'KW' '3885';\n"); - sb.append("KW3886 : 'KW' '3886';\n"); - sb.append("KW3887 : 'KW' '3887';\n"); - sb.append("KW3888 : 'KW' '3888';\n"); - sb.append("KW3889 : 'KW' '3889';\n"); - sb.append("KW3890 : 'KW' '3890';\n"); - sb.append("KW3891 : 'KW' '3891';\n"); - sb.append("KW3892 : 'KW' '3892';\n"); - sb.append("KW3893 : 'KW' '3893';\n"); - sb.append("KW3894 : 'KW' '3894';\n"); - sb.append("KW3895 : 'KW' '3895';\n"); - sb.append("KW3896 : 'KW' '3896';\n"); - sb.append("KW3897 : 'KW' '3897';\n"); - sb.append("KW3898 : 'KW' '3898';\n"); - sb.append("KW3899 : 'KW' '3899';\n"); - sb.append("KW3900 : 'KW' '3900';\n"); - sb.append("KW3901 : 'KW' '3901';\n"); - sb.append("KW3902 : 'KW' '3902';\n"); - sb.append("KW3903 : 'KW' '3903';\n"); - sb.append("KW3904 : 'KW' '3904';\n"); - sb.append("KW3905 : 'KW' '3905';\n"); - sb.append("KW3906 : 'KW' '3906';\n"); - sb.append("KW3907 : 'KW' '3907';\n"); - sb.append("KW3908 : 'KW' '3908';\n"); - sb.append("KW3909 : 'KW' '3909';\n"); - sb.append("KW3910 : 'KW' '3910';\n"); - sb.append("KW3911 : 'KW' '3911';\n"); - sb.append("KW3912 : 'KW' '3912';\n"); - sb.append("KW3913 : 'KW' '3913';\n"); - sb.append("KW3914 : 'KW' '3914';\n"); - sb.append("KW3915 : 'KW' '3915';\n"); - sb.append("KW3916 : 'KW' '3916';\n"); - sb.append("KW3917 : 'KW' '3917';\n"); - sb.append("KW3918 : 'KW' '3918';\n"); - sb.append("KW3919 : 'KW' '3919';\n"); - sb.append("KW3920 : 'KW' '3920';\n"); - sb.append("KW3921 : 'KW' '3921';\n"); - sb.append("KW3922 : 'KW' '3922';\n"); - sb.append("KW3923 : 'KW' '3923';\n"); - sb.append("KW3924 : 'KW' '3924';\n"); - sb.append("KW3925 : 'KW' '3925';\n"); - sb.append("KW3926 : 'KW' '3926';\n"); - sb.append("KW3927 : 'KW' '3927';\n"); - sb.append("KW3928 : 'KW' '3928';\n"); - sb.append("KW3929 : 'KW' '3929';\n"); - sb.append("KW3930 : 'KW' '3930';\n"); - sb.append("KW3931 : 'KW' '3931';\n"); - sb.append("KW3932 : 'KW' '3932';\n"); - sb.append("KW3933 : 'KW' '3933';\n"); - sb.append("KW3934 : 'KW' '3934';\n"); - sb.append("KW3935 : 'KW' '3935';\n"); - sb.append("KW3936 : 'KW' '3936';\n"); - sb.append("KW3937 : 'KW' '3937';\n"); - sb.append("KW3938 : 'KW' '3938';\n"); - sb.append("KW3939 : 'KW' '3939';\n"); - sb.append("KW3940 : 'KW' '3940';\n"); - sb.append("KW3941 : 'KW' '3941';\n"); - sb.append("KW3942 : 'KW' '3942';\n"); - sb.append("KW3943 : 'KW' '3943';\n"); - sb.append("KW3944 : 'KW' '3944';\n"); - sb.append("KW3945 : 'KW' '3945';\n"); - sb.append("KW3946 : 'KW' '3946';\n"); - sb.append("KW3947 : 'KW' '3947';\n"); - sb.append("KW3948 : 'KW' '3948';\n"); - sb.append("KW3949 : 'KW' '3949';\n"); - sb.append("KW3950 : 'KW' '3950';\n"); - sb.append("KW3951 : 'KW' '3951';\n"); - sb.append("KW3952 : 'KW' '3952';\n"); - sb.append("KW3953 : 'KW' '3953';\n"); - sb.append("KW3954 : 'KW' '3954';\n"); - sb.append("KW3955 : 'KW' '3955';\n"); - sb.append("KW3956 : 'KW' '3956';\n"); - sb.append("KW3957 : 'KW' '3957';\n"); - sb.append("KW3958 : 'KW' '3958';\n"); - sb.append("KW3959 : 'KW' '3959';\n"); - sb.append("KW3960 : 'KW' '3960';\n"); - sb.append("KW3961 : 'KW' '3961';\n"); - sb.append("KW3962 : 'KW' '3962';\n"); - sb.append("KW3963 : 'KW' '3963';\n"); - sb.append("KW3964 : 'KW' '3964';\n"); - sb.append("KW3965 : 'KW' '3965';\n"); - sb.append("KW3966 : 'KW' '3966';\n"); - sb.append("KW3967 : 'KW' '3967';\n"); - sb.append("KW3968 : 'KW' '3968';\n"); - sb.append("KW3969 : 'KW' '3969';\n"); - sb.append("KW3970 : 'KW' '3970';\n"); - sb.append("KW3971 : 'KW' '3971';\n"); - sb.append("KW3972 : 'KW' '3972';\n"); - sb.append("KW3973 : 'KW' '3973';\n"); - sb.append("KW3974 : 'KW' '3974';\n"); - sb.append("KW3975 : 'KW' '3975';\n"); - sb.append("KW3976 : 'KW' '3976';\n"); - sb.append("KW3977 : 'KW' '3977';\n"); - sb.append("KW3978 : 'KW' '3978';\n"); - sb.append("KW3979 : 'KW' '3979';\n"); - sb.append("KW3980 : 'KW' '3980';\n"); - sb.append("KW3981 : 'KW' '3981';\n"); - sb.append("KW3982 : 'KW' '3982';\n"); - sb.append("KW3983 : 'KW' '3983';\n"); - sb.append("KW3984 : 'KW' '3984';\n"); - sb.append("KW3985 : 'KW' '3985';\n"); - sb.append("KW3986 : 'KW' '3986';\n"); - sb.append("KW3987 : 'KW' '3987';\n"); - sb.append("KW3988 : 'KW' '3988';\n"); - sb.append("KW3989 : 'KW' '3989';\n"); - sb.append("KW3990 : 'KW' '3990';\n"); - sb.append("KW3991 : 'KW' '3991';\n"); - sb.append("KW3992 : 'KW' '3992';\n"); - sb.append("KW3993 : 'KW' '3993';\n"); - sb.append("KW3994 : 'KW' '3994';\n"); - sb.append("KW3995 : 'KW' '3995';\n"); - sb.append("KW3996 : 'KW' '3996';\n"); - sb.append("KW3997 : 'KW' '3997';\n"); - sb.append("KW3998 : 'KW' '3998';\n"); - sb.append("KW3999 : 'KW' '3999';\n"); - String grammar = sb.toString(); - String found = execLexer("L.g4", grammar, "L", "KW400", false); - assertEquals("[@0,0:4='KW400',<402>,1:0]\n" + - "[@1,5:4='',<-1>,1:5]\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testZeroLengthToken() throws Exception { - StringBuilder sb = new StringBuilder(); - sb.append("lexer grammar L;\n"); - sb.append("BeginString\n"); - sb.append(" : '\\'' -> more, pushMode(StringMode)\n"); - sb.append(" ;\n"); - sb.append("mode StringMode;\n"); - sb.append(" StringMode_X : 'x' -> more;\n"); - sb.append(" StringMode_Done : -> more, mode(EndStringMode);\n"); - sb.append("mode EndStringMode; \n"); - sb.append(" EndString : '\\'' -> popMode;\n"); - String grammar = sb.toString(); - String found = execLexer("L.g4", grammar, "L", "'xxx'", false); - assertEquals("[@0,0:4=''xxx'',<1>,1:0]\n" + - "[@1,5:4='',<-1>,1:5]\n", found); - assertNull(this.stderrDuringParse); - } - - -} diff --git a/tool/test/org/antlr/v4/test/rt/java/TestListeners.java b/tool/test/org/antlr/v4/test/rt/java/TestListeners.java deleted file mode 100644 index 8b9ce2188..000000000 --- a/tool/test/org/antlr/v4/test/rt/java/TestListeners.java +++ /dev/null @@ -1,230 +0,0 @@ -package org.antlr.v4.test.rt.java; - -import org.junit.Test; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; - -public class TestListeners extends BaseTest { - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testBasic() throws Exception { - String grammar = "grammar T;\n" + - "@parser::header {\n" + - "}\n" + - "\n" + - "@parser::members {\n" + - "public static class LeafListener extends TBaseListener {\n" + - " public void visitTerminal(TerminalNode node) {\n" + - " System.out.println(node.getSymbol().getText());\n" + - " }\n" + - "}\n" + - "}\n" + - "\n" + - "s\n" + - "@after {\n" + - "System.out.println($ctx.r.toStringTree(this));\n" + - "ParseTreeWalker walker = new ParseTreeWalker();\n" + - "walker.walk(new LeafListener(), $ctx.r);\n" + - "}\n" + - " : r=a ;\n" + - "a : INT INT\n" + - " | ID\n" + - " ;\n" + - "MULT: '*' ;\n" + - "ADD : '+' ;\n" + - "INT : [0-9]+ ;\n" + - "ID : [a-z]+ ;\n" + - "WS : [ \\t\\n]+ -> skip ;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "s", "1 2", false); - assertEquals("(a 1 2)\n1\n2\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - String testTokenGetters(String input) throws Exception { - String grammar = "grammar T;\n" + - "@parser::header {\n" + - "}\n" + - "\n" + - "@parser::members {\n" + - "public static class LeafListener extends TBaseListener {\n" + - " public void exitA(TParser.AContext ctx) {\n" + - " if (ctx.getChildCount()==2) \n" + - " System.out.printf(\"%s %s %s\",ctx.INT(0).getSymbol().getText(),\n" + - " ctx.INT(1).getSymbol().getText(),ctx.INT());\n" + - " else\n" + - " System.out.println(ctx.ID().getSymbol());\n" + - " }\n" + - "}\n" + - "}\n" + - "\n" + - "s\n" + - "@after {\n" + - "System.out.println($ctx.r.toStringTree(this));\n" + - "ParseTreeWalker walker = new ParseTreeWalker();\n" + - "walker.walk(new LeafListener(), $ctx.r);\n" + - "}\n" + - " : r=a ;\n" + - "a : INT INT\n" + - " | ID\n" + - " ;\n" + - "MULT: '*' ;\n" + - "ADD : '+' ;\n" + - "INT : [0-9]+ ;\n" + - "ID : [a-z]+ ;\n" + - "WS : [ \\t\\n]+ -> skip ;"; - return execParser("T.g4", grammar, "TParser", "TLexer", "s", input, false); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testTokenGetters_1() throws Exception { - String found = testTokenGetters("1 2"); - assertEquals("(a 1 2)\n1 2 [1, 2]\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testTokenGetters_2() throws Exception { - String found = testTokenGetters("abc"); - assertEquals("(a abc)\n[@0,0:2='abc',<4>,1:0]\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - String testRuleGetters(String input) throws Exception { - String grammar = "grammar T;\n" + - "@parser::header {\n" + - "}\n" + - "\n" + - "@parser::members {\n" + - "public static class LeafListener extends TBaseListener {\n" + - " public void exitA(TParser.AContext ctx) {\n" + - " if (ctx.getChildCount()==2) {\n" + - " System.out.printf(\"%s %s %s\",ctx.b(0).start.getText(),\n" + - " ctx.b(1).start.getText(),ctx.b().get(0).start.getText());\n" + - " } else \n" + - " System.out.println(ctx.b(0).start.getText());\n" + - " }\n" + - "}\n" + - "}\n" + - "\n" + - "s\n" + - "@after {\n" + - "System.out.println($ctx.r.toStringTree(this));\n" + - "ParseTreeWalker walker = new ParseTreeWalker();\n" + - "walker.walk(new LeafListener(), $ctx.r);\n" + - "}\n" + - " : r=a ;\n" + - "a : b b // forces list\n" + - " | b // a list still\n" + - " ;\n" + - "b : ID | INT;\n" + - "MULT: '*' ;\n" + - "ADD : '+' ;\n" + - "INT : [0-9]+ ;\n" + - "ID : [a-z]+ ;\n" + - "WS : [ \\t\\n]+ -> skip ;"; - return execParser("T.g4", grammar, "TParser", "TLexer", "s", input, false); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testRuleGetters_1() throws Exception { - String found = testRuleGetters("1 2"); - assertEquals("(a (b 1) (b 2))\n1 2 1\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testRuleGetters_2() throws Exception { - String found = testRuleGetters("abc"); - assertEquals("(a (b abc))\nabc\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testLR() throws Exception { - String grammar = "grammar T;\n" + - "@parser::header {\n" + - "}\n" + - "\n" + - "@parser::members {\n" + - "public static class LeafListener extends TBaseListener {\n" + - " public void exitE(TParser.EContext ctx) {\n" + - " if (ctx.getChildCount()==3) {\n" + - " System.out.printf(\"%s %s %s\\n\",ctx.e(0).start.getText(),\n" + - " ctx.e(1).start.getText(), ctx.e().get(0).start.getText());\n" + - " } else \n" + - " System.out.println(ctx.INT().getSymbol().getText());\n" + - " }\n" + - "}\n" + - "}\n" + - "\n" + - "s\n" + - "@after {\n" + - "System.out.println($ctx.r.toStringTree(this));\n" + - "ParseTreeWalker walker = new ParseTreeWalker();\n" + - "walker.walk(new LeafListener(), $ctx.r);\n" + - "}\n" + - " : r=e ;\n" + - "e : e op='*' e\n" + - " | e op='+' e\n" + - " | INT\n" + - " ;\n" + - "MULT: '*' ;\n" + - "ADD : '+' ;\n" + - "INT : [0-9]+ ;\n" + - "ID : [a-z]+ ;\n" + - "WS : [ \\t\\n]+ -> skip ;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "s", "1+2*3", false); - assertEquals("(e (e 1) + (e (e 2) * (e 3)))\n1\n2\n3\n2 3 2\n1 2 1\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testLRWithLabels() throws Exception { - String grammar = "grammar T;\n" + - "@parser::header {\n" + - "}\n" + - "\n" + - "@parser::members {\n" + - "public static class LeafListener extends TBaseListener {\n" + - " public void exitCall(TParser.CallContext ctx) {\n" + - " System.out.printf(\"%s %s\",ctx.e().start.getText(),ctx.eList());\n" + - " }\n" + - " public void exitInt(TParser.IntContext ctx) {\n" + - " System.out.println(ctx.INT().getSymbol().getText());\n" + - " }\n" + - "}\n" + - "}\n" + - "\n" + - "s\n" + - "@after {\n" + - "System.out.println($ctx.r.toStringTree(this));\n" + - "ParseTreeWalker walker = new ParseTreeWalker();\n" + - "walker.walk(new LeafListener(), $ctx.r);\n" + - "}\n" + - " : r=e ;\n" + - "e : e '(' eList ')' # Call\n" + - " | INT # Int\n" + - " ;\n" + - "eList : e (',' e)* ;\n" + - "MULT: '*' ;\n" + - "ADD : '+' ;\n" + - "INT : [0-9]+ ;\n" + - "ID : [a-z]+ ;\n" + - "WS : [ \\t\\n]+ -> skip ;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "s", "1(2,3)", false); - assertEquals("(e (e 1) ( (eList (e 2) , (e 3)) ))\n1\n2\n3\n1 [13 6]\n", found); - assertNull(this.stderrDuringParse); - } - - -} diff --git a/tool/test/org/antlr/v4/test/rt/java/TestParseTrees.java b/tool/test/org/antlr/v4/test/rt/java/TestParseTrees.java deleted file mode 100644 index 77581717e..000000000 --- a/tool/test/org/antlr/v4/test/rt/java/TestParseTrees.java +++ /dev/null @@ -1,173 +0,0 @@ -package org.antlr.v4.test.rt.java; - -import org.junit.Test; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; - -public class TestParseTrees extends BaseTest { - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testTokenAndRuleContextString() throws Exception { - String grammar = "grammar T;\n" + - "s\n" + - "@init {\n" + - "setBuildParseTree(true);\n" + - "}\n" + - "@after {\n" + - "System.out.println($r.ctx.toStringTree(this));\n" + - "}\n" + - " : r=a ;\n" + - "a : 'x' { \n" + - "System.out.println(getRuleInvocationStack());\n" + - "} ;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "s", "x", false); - assertEquals("[a, s]\n(a x)\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testToken2() throws Exception { - String grammar = "grammar T;\n" + - "s\n" + - "@init {\n" + - "setBuildParseTree(true);\n" + - "}\n" + - "@after {\n" + - "System.out.println($r.ctx.toStringTree(this));\n" + - "}\n" + - " : r=a ;\n" + - "a : 'x' 'y'\n" + - " ;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "s", "xy", false); - assertEquals("(a x y)\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void test2Alts() throws Exception { - String grammar = "grammar T;\n" + - "s\n" + - "@init {\n" + - "setBuildParseTree(true);\n" + - "}\n" + - "@after {\n" + - "System.out.println($r.ctx.toStringTree(this));\n" + - "}\n" + - " : r=a ;\n" + - "a : 'x' | 'y'\n" + - " ;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "s", "y", false); - assertEquals("(a y)\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void test2AltLoop() throws Exception { - String grammar = "grammar T;\n" + - "s\n" + - "@init {\n" + - "setBuildParseTree(true);\n" + - "}\n" + - "@after {\n" + - "System.out.println($r.ctx.toStringTree(this));\n" + - "}\n" + - " : r=a ;\n" + - "a : ('x' | 'y')* 'z'\n" + - " ;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "s", "xyyxyxz", false); - assertEquals("(a x y y x y x z)\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testRuleRef() throws Exception { - String grammar = "grammar T;\n" + - "s\n" + - "@init {\n" + - "setBuildParseTree(true);\n" + - "}\n" + - "@after {\n" + - "System.out.println($r.ctx.toStringTree(this));\n" + - "}\n" + - " : r=a ;\n" + - "a : b 'x'\n" + - " ;\n" + - "b : 'y' \n" + - " ;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "s", "yx", false); - assertEquals("(a (b y) x)\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testExtraToken() throws Exception { - String grammar = "grammar T;\n" + - "s\n" + - "@init {\n" + - "setBuildParseTree(true);\n" + - "}\n" + - "@after {\n" + - "System.out.println($r.ctx.toStringTree(this));\n" + - "}\n" + - " : r=a ;\n" + - "a : 'x' 'y'\n" + - " ;\n" + - "Z : 'z' \n" + - " ;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "s", "xzy", false); - assertEquals("(a x z y)\n", found); - assertEquals("line 1:1 extraneous input 'z' expecting 'y'\n", this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testNoViableAlt() throws Exception { - String grammar = "grammar T;\n" + - "s\n" + - "@init {\n" + - "setBuildParseTree(true);\n" + - "}\n" + - "@after {\n" + - "System.out.println($r.ctx.toStringTree(this));\n" + - "}\n" + - " : r=a ;\n" + - "a : 'x' | 'y'\n" + - " ;\n" + - "Z : 'z' \n" + - " ;\n" + - " "; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "s", "z", false); - assertEquals("(a z)\n", found); - assertEquals("line 1:0 mismatched input 'z' expecting {'x', 'y'}\n", this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testSync() throws Exception { - String grammar = "grammar T;\n" + - "s\n" + - "@init {\n" + - "setBuildParseTree(true);\n" + - "}\n" + - "@after {\n" + - "System.out.println($r.ctx.toStringTree(this));\n" + - "}\n" + - " : r=a ;\n" + - "a : 'x' 'y'* '!'\n" + - " ;\n" + - "Z : 'z' \n" + - " ;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "s", "xzyy!", false); - assertEquals("(a x z y y !)\n", found); - assertEquals("line 1:1 extraneous input 'z' expecting {'y', '!'}\n", this.stderrDuringParse); - } - - -} diff --git a/tool/test/org/antlr/v4/test/rt/java/TestParserErrors.java b/tool/test/org/antlr/v4/test/rt/java/TestParserErrors.java deleted file mode 100644 index db0c5d986..000000000 --- a/tool/test/org/antlr/v4/test/rt/java/TestParserErrors.java +++ /dev/null @@ -1,353 +0,0 @@ -package org.antlr.v4.test.rt.java; - -import org.junit.Test; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; - -public class TestParserErrors extends BaseTest { - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testTokenMismatch() throws Exception { - String grammar = "grammar T;\n" + - "a : 'a' 'b' ;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "a", "aa", false); - assertEquals("", found); - assertEquals("line 1:1 mismatched input 'a' expecting 'b'\n", this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testTokenMismatch2() throws Exception { - String grammar = "grammar T;\n" + - "\n" + - "stat: ( '(' expr? ')' )? EOF ;\n" + - "expr: ID '=' STR ;\n" + - "\n" + - "ERR : '~FORCE_ERROR~' ;\n" + - "ID : [a-zA-Z]+ ;\n" + - "STR : '\"' ~[\"]* '\"' ;\n" + - "WS : [ \\t\\r\\n]+ -> skip ;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "stat", "( ~FORCE_ERROR~ ", false); - assertEquals("", found); - assertEquals("line 1:2 mismatched input '~FORCE_ERROR~' expecting ')'\n", this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testSingleTokenDeletion() throws Exception { - String grammar = "grammar T;\n" + - "a : 'a' 'b' ;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "a", "aab", false); - assertEquals("", found); - assertEquals("line 1:1 extraneous input 'a' expecting 'b'\n", this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testSingleTokenDeletionExpectingSet() throws Exception { - String grammar = "grammar T;\n" + - "a : 'a' ('b'|'c') ;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "a", "aab", false); - assertEquals("", found); - assertEquals("line 1:1 extraneous input 'a' expecting {'b', 'c'}\n", this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testSingleTokenDeletionConsumption() throws Exception { - String grammar = "grammar T;\n" + - "myset: ('b'|'c') ;\n" + - "a: 'a' myset 'd' {System.out.println(\"\" + $myset.stop);} ; "; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "a", "aabd", false); - assertEquals("[@2,2:2='b',<1>,1:2]\n", found); - assertEquals("line 1:1 extraneous input 'a' expecting {'b', 'c'}\n", this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testSingleTokenInsertion() throws Exception { - String grammar = "grammar T;\n" + - "a : 'a' 'b' 'c' ;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "a", "ac", false); - assertEquals("", found); - assertEquals("line 1:1 missing 'b' at 'c'\n", this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testConjuringUpToken() throws Exception { - String grammar = "grammar T;\n" + - "a : 'a' x='b' {System.out.println(\"conjured=\" + $x);} 'c' ;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "a", "ac", false); - assertEquals("conjured=[@-1,-1:-1='',<2>,1:1]\n", found); - assertEquals("line 1:1 missing 'b' at 'c'\n", this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testSingleSetInsertion() throws Exception { - String grammar = "grammar T;\n" + - "a : 'a' ('b'|'c') 'd' ;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "a", "ad", false); - assertEquals("", found); - assertEquals("line 1:1 missing {'b', 'c'} at 'd'\n", this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testSingleSetInsertionConsumption() throws Exception { - String grammar = "grammar T;\n" + - "myset: ('b'|'c') ;\n" + - "a: 'a' myset 'd' {System.out.println(\"\" + $myset.stop);} ; "; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "a", "ad", false); - assertEquals("[@0,0:0='a',<3>,1:0]\n", found); - assertEquals("line 1:1 missing {'b', 'c'} at 'd'\n", this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testConjuringUpTokenFromSet() throws Exception { - String grammar = "grammar T;\n" + - "a : 'a' x=('b'|'c') {System.out.println(\"conjured=\" + $x);} 'd' ;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "a", "ad", false); - assertEquals("conjured=[@-1,-1:-1='',<2>,1:1]\n", found); - assertEquals("line 1:1 missing {'b', 'c'} at 'd'\n", this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testLL2() throws Exception { - String grammar = "grammar T;\n" + - "a : 'a' 'b'\n" + - " | 'a' 'c'\n" + - ";\n" + - "q : 'e' ;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "a", "ae", false); - assertEquals("", found); - assertEquals("line 1:1 no viable alternative at input 'ae'\n", this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testLL3() throws Exception { - String grammar = "grammar T;\n" + - "a : 'a' 'b'* 'c'\n" + - " | 'a' 'b' 'd'\n" + - ";\n" + - "q : 'e' ;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "a", "abe", false); - assertEquals("", found); - assertEquals("line 1:2 no viable alternative at input 'abe'\n", this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testLLStar() throws Exception { - String grammar = "grammar T;\n" + - "a : 'a'+ 'b'\n" + - " | 'a'+ 'c'\n" + - ";\n" + - "q : 'e' ;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "a", "aaae", false); - assertEquals("", found); - assertEquals("line 1:3 no viable alternative at input 'aaae'\n", this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testSingleTokenDeletionBeforeLoop() throws Exception { - String grammar = "grammar T;\n" + - "a : 'a' 'b'* ;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "a", "aabc", false); - assertEquals("", found); - assertEquals("line 1:1 extraneous input 'a' expecting {, 'b'}\nline 1:3 token recognition error at: 'c'\n", this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testMultiTokenDeletionBeforeLoop() throws Exception { - String grammar = "grammar T;\n" + - "a : 'a' 'b'* 'c';"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "a", "aacabc", false); - assertEquals("", found); - assertEquals("line 1:1 extraneous input 'a' expecting {'b', 'c'}\n", this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testSingleTokenDeletionDuringLoop() throws Exception { - String grammar = "grammar T;\n" + - "a : 'a' 'b'* 'c' ;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "a", "ababbc", false); - assertEquals("", found); - assertEquals("line 1:2 extraneous input 'a' expecting {'b', 'c'}\n", this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testMultiTokenDeletionDuringLoop() throws Exception { - String grammar = "grammar T;\n" + - "a : 'a' 'b'* 'c' ;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "a", "abaaababc", false); - assertEquals("", found); - assertEquals("line 1:2 extraneous input 'a' expecting {'b', 'c'}\nline 1:6 extraneous input 'a' expecting {'b', 'c'}\n", this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testSingleTokenDeletionBeforeLoop2() throws Exception { - String grammar = "grammar T;\n" + - "a : 'a' ('b'|'z'{})*;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "a", "aabc", false); - assertEquals("", found); - assertEquals("line 1:1 extraneous input 'a' expecting {, 'b', 'z'}\nline 1:3 token recognition error at: 'c'\n", this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testMultiTokenDeletionBeforeLoop2() throws Exception { - String grammar = "grammar T;\n" + - "a : 'a' ('b'|'z'{})* 'c';"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "a", "aacabc", false); - assertEquals("", found); - assertEquals("line 1:1 extraneous input 'a' expecting {'b', 'z', 'c'}\n", this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testSingleTokenDeletionDuringLoop2() throws Exception { - String grammar = "grammar T;\n" + - "a : 'a' ('b'|'z'{})* 'c' ;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "a", "ababbc", false); - assertEquals("", found); - assertEquals("line 1:2 extraneous input 'a' expecting {'b', 'z', 'c'}\n", this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testMultiTokenDeletionDuringLoop2() throws Exception { - String grammar = "grammar T;\n" + - "a : 'a' ('b'|'z'{})* 'c' ;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "a", "abaaababc", false); - assertEquals("", found); - assertEquals("line 1:2 extraneous input 'a' expecting {'b', 'z', 'c'}\nline 1:6 extraneous input 'a' expecting {'b', 'z', 'c'}\n", this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testLL1ErrorInfo() throws Exception { - String grammar = "grammar T;\n" + - "start : animal (AND acClass)? service EOF;\n" + - "animal : (DOG | CAT );\n" + - "service : (HARDWARE | SOFTWARE) ;\n" + - "AND : 'and';\n" + - "DOG : 'dog';\n" + - "CAT : 'cat';\n" + - "HARDWARE: 'hardware';\n" + - "SOFTWARE: 'software';\n" + - "WS : ' ' -> skip ;\n" + - "acClass\n" + - "@init\n" + - "{System.out.println(this.getExpectedTokens().toString(this.tokenNames));}\n" + - " : ;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "start", "dog and software", false); - assertEquals("{'hardware', 'software'}\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testInvalidEmptyInput() throws Exception { - String grammar = "grammar T;\n" + - "start : ID+;\n" + - "ID : [a-z]+;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "start", "", false); - assertEquals("", found); - assertEquals("line 1:0 missing ID at ''\n", this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testContextListGetters() throws Exception { - String grammar = "grammar T;\n" + - "@parser::members{\n" + - "void foo() {\n" + - " SContext s = null;\n" + - " List a = s.a();\n" + - " List b = s.b();\n" + - "}\n" + - "}\n" + - "s : (a | b)+;\n" + - "a : 'a' {System.out.print('a');};\n" + - "b : 'b' {System.out.print('b');};"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "s", "abab", false); - assertEquals("abab\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - String testDuplicatedLeftRecursiveCall(String input) throws Exception { - String grammar = "grammar T;\n" + - "start : expr EOF;\n" + - "expr : 'x'\n" + - " | expr expr\n" + - " ;"; - return execParser("T.g4", grammar, "TParser", "TLexer", "start", input, false); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testDuplicatedLeftRecursiveCall_1() throws Exception { - String found = testDuplicatedLeftRecursiveCall("xx"); - assertEquals("", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testDuplicatedLeftRecursiveCall_2() throws Exception { - String found = testDuplicatedLeftRecursiveCall("xxx"); - assertEquals("", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testDuplicatedLeftRecursiveCall_3() throws Exception { - String found = testDuplicatedLeftRecursiveCall("xxxx"); - assertEquals("", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testInvalidATNStateRemoval() throws Exception { - String grammar = "grammar T;\n" + - "start : ID ':' expr;\n" + - "expr : primary expr? {} | expr '->' ID;\n" + - "primary : ID;\n" + - "ID : [a-z]+;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "start", "x:x", false); - assertEquals("", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testNoViableAltAvoidance() throws Exception { - String grammar = "grammar T;\n" + - "s : e '!' ;\n" + - "e : 'a' 'b'\n" + - " | 'a'\n" + - " ;\n" + - "DOT : '.' ;\n" + - "WS : [ \\t\\r\\n]+ -> skip;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "s", "a.", false); - assertEquals("", found); - assertEquals("line 1:1 mismatched input '.' expecting '!'\n", this.stderrDuringParse); - } - - -} diff --git a/tool/test/org/antlr/v4/test/rt/java/TestParserExec.java b/tool/test/org/antlr/v4/test/rt/java/TestParserExec.java deleted file mode 100644 index 9fe6799c5..000000000 --- a/tool/test/org/antlr/v4/test/rt/java/TestParserExec.java +++ /dev/null @@ -1,477 +0,0 @@ -package org.antlr.v4.test.rt.java; - -import org.junit.Test; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; - -public class TestParserExec extends BaseTest { - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testLabels() throws Exception { - String grammar = "grammar T;\n" + - "a : b1=b b2+=b* b3+=';' ;\n" + - "b : id_=ID val+=INT*;\n" + - "ID : 'a'..'z'+ ;\n" + - "INT : '0'..'9'+;\n" + - "WS : (' '|'\\n') -> skip ;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "a", "abc 34;", false); - assertEquals("", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testListLabelsOnSet() throws Exception { - String grammar = "grammar T;\n" + - "a : b b* ';' ;\n" + - "b : ID val+=(INT | FLOAT)*;\n" + - "ID : 'a'..'z'+ ;\n" + - "INT : '0'..'9'+;\n" + - "FLOAT : [0-9]+ '.' [0-9]+;\n" + - "WS : (' '|'\\n') -> skip ;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "a", "abc 34;", false); - assertEquals("", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testAorB() throws Exception { - String grammar = "grammar T;\n" + - "a : ID {\n" + - "System.out.println(\"alt 1\");\n" + - "} | INT {\n" + - "System.out.println(\"alt 2\");\n" + - "};\n" + - "ID : 'a'..'z'+ ;\n" + - "INT : '0'..'9'+;\n" + - "WS : (' '|'\\n') -> skip ;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "a", "34", false); - assertEquals("alt 2\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testBasic() throws Exception { - String grammar = "grammar T;\n" + - "a : ID INT {\n" + - "System.out.println($text);\n" + - "};\n" + - "ID : 'a'..'z'+ ;\n" + - "INT : '0'..'9'+;\n" + - "WS : (' '|'\\n') -> skip;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "a", "abc 34", false); - assertEquals("abc34\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testAPlus() throws Exception { - String grammar = "grammar T;\n" + - "a : ID+ {\n" + - "System.out.println($text);\n" + - "};\n" + - "ID : 'a'..'z'+;\n" + - "WS : (' '|'\\n') -> skip;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "a", "a b c", false); - assertEquals("abc\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testAorAPlus() throws Exception { - String grammar = "grammar T;\n" + - "a : (ID|ID)+ {\n" + - "System.out.println($text);\n" + - "};\n" + - "ID : 'a'..'z'+;\n" + - "WS : (' '|'\\n') -> skip;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "a", "a b c", false); - assertEquals("abc\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testIfIfElseGreedyBinding1() throws Exception { - String grammar = "grammar T;\n" + - "start : statement+ ;\n" + - "statement : 'x' | ifStatement;\n" + - "ifStatement : 'if' 'y' statement ('else' statement)? {\n" + - "System.out.println($text);\n" + - "};\n" + - "ID : 'a'..'z'+ ;\n" + - "WS : (' '|'\\n') -> channel(HIDDEN);"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "start", "if y if y x else x", false); - assertEquals("if y x else x\nif y if y x else x\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testIfIfElseGreedyBinding2() throws Exception { - String grammar = "grammar T;\n" + - "start : statement+ ;\n" + - "statement : 'x' | ifStatement;\n" + - "ifStatement : 'if' 'y' statement ('else' statement|) {\n" + - "System.out.println($text);\n" + - "};\n" + - "ID : 'a'..'z'+ ;\n" + - "WS : (' '|'\\n') -> channel(HIDDEN);"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "start", "if y if y x else x", false); - assertEquals("if y x else x\nif y if y x else x\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testIfIfElseNonGreedyBinding1() throws Exception { - String grammar = "grammar T;\n" + - "start : statement+ ;\n" + - "statement : 'x' | ifStatement;\n" + - "ifStatement : 'if' 'y' statement ('else' statement)?? {\n" + - "System.out.println($text);\n" + - "};\n" + - "ID : 'a'..'z'+ ;\n" + - "WS : (' '|'\\n') -> channel(HIDDEN);"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "start", "if y if y x else x", false); - assertEquals("if y x\nif y if y x else x\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testIfIfElseNonGreedyBinding2() throws Exception { - String grammar = "grammar T;\n" + - "start : statement+ ;\n" + - "statement : 'x' | ifStatement;\n" + - "ifStatement : 'if' 'y' statement (|'else' statement) {\n" + - "System.out.println($text);\n" + - "};\n" + - "ID : 'a'..'z'+ ;\n" + - "WS : (' '|'\\n') -> channel(HIDDEN);"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "start", "if y if y x else x", false); - assertEquals("if y x\nif y if y x else x\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - String testAStar(String input) throws Exception { - String grammar = "grammar T;\n" + - "a : ID* {\n" + - "System.out.println($text);\n" + - "};\n" + - "ID : 'a'..'z'+;\n" + - "WS : (' '|'\\n') -> skip;"; - return execParser("T.g4", grammar, "TParser", "TLexer", "a", input, false); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testAStar_1() throws Exception { - String found = testAStar(""); - assertEquals("\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testAStar_2() throws Exception { - String found = testAStar("a b c"); - assertEquals("abc\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - String testLL1OptionalBlock(String input) throws Exception { - String grammar = "grammar T;\n" + - "a : (ID|{}INT)? {\n" + - "System.out.println($text);\n" + - "};\n" + - "ID : 'a'..'z'+;\n" + - "INT : '0'..'9'+ ;\n" + - "WS : (' '|'\\n') -> skip;"; - return execParser("T.g4", grammar, "TParser", "TLexer", "a", input, false); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testLL1OptionalBlock_1() throws Exception { - String found = testLL1OptionalBlock(""); - assertEquals("\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testLL1OptionalBlock_2() throws Exception { - String found = testLL1OptionalBlock("a"); - assertEquals("a\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - String testAorAStar(String input) throws Exception { - String grammar = "grammar T;\n" + - "a : (ID|ID)* {\n" + - "System.out.println($text);\n" + - "};\n" + - "ID : 'a'..'z'+;\n" + - "WS : (' '|'\\n') -> skip;"; - return execParser("T.g4", grammar, "TParser", "TLexer", "a", input, false); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testAorAStar_1() throws Exception { - String found = testAorAStar(""); - assertEquals("\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testAorAStar_2() throws Exception { - String found = testAorAStar("a b c"); - assertEquals("abc\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testAorBPlus() throws Exception { - String grammar = "grammar T;\n" + - "a : (ID|INT{\n" + - "})+ {\n" + - "System.out.println($text);\n" + - "};\n" + - "ID : 'a'..'z'+ ;\n" + - "INT : '0'..'9'+;\n" + - "WS : (' '|'\\n') -> skip ;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "a", "a 34 c", false); - assertEquals("a34c\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - String testAorBStar(String input) throws Exception { - String grammar = "grammar T;\n" + - "a : (ID|INT{\n" + - "})* {\n" + - "System.out.println($text);\n" + - "};\n" + - "ID : 'a'..'z'+ ;\n" + - "INT : '0'..'9'+;\n" + - "WS : (' '|'\\n') -> skip ;"; - return execParser("T.g4", grammar, "TParser", "TLexer", "a", input, false); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testAorBStar_1() throws Exception { - String found = testAorBStar(""); - assertEquals("\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testAorBStar_2() throws Exception { - String found = testAorBStar("a 34 c"); - assertEquals("a34c\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - String testOptional(String input) throws Exception { - String grammar = "grammar T;\n" + - "stat : ifstat | 'x';\n" + - "ifstat : 'if' stat ('else' stat)?;\n" + - "WS : [ \\n\\t]+ -> skip ;"; - return execParser("T.g4", grammar, "TParser", "TLexer", "stat", input, false); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testOptional_1() throws Exception { - String found = testOptional("x"); - assertEquals("", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testOptional_2() throws Exception { - String found = testOptional("if x"); - assertEquals("", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testOptional_3() throws Exception { - String found = testOptional("if x else x"); - assertEquals("", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testOptional_4() throws Exception { - String found = testOptional("if if x else x"); - assertEquals("", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testPredicatedIfIfElse() throws Exception { - String grammar = "grammar T;\n" + - "s : stmt EOF ;\n" + - "stmt : ifStmt | ID;\n" + - "ifStmt : 'if' ID stmt ('else' stmt | { this._input.LA(1)!=TParser.ELSE }?);\n" + - "ELSE : 'else';\n" + - "ID : [a-zA-Z]+;\n" + - "WS : [ \\n\\t]+ -> skip;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "s", "if x if x a else b", false); - assertEquals("", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testLabelAliasingAcrossLabeledAlternatives() throws Exception { - String grammar = "grammar T;\n" + - "start : a* EOF;\n" + - "a\n" + - " : label=subrule {System.out.println($label.text);} #One\n" + - " | label='y' {System.out.println($label.text);} #Two\n" + - " ;\n" + - "subrule : 'x';\n" + - "WS : (' '|'\\n') -> skip ;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "start", "xy", false); - assertEquals("x\ny\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testPredictionIssue334() throws Exception { - String grammar = "grammar T;\n" + - "file_ @init{\n" + - "setErrorHandler(new BailErrorStrategy());\n" + - "} \n" + - "@after {\n" + - "System.out.println($ctx.toStringTree(this));\n" + - "}\n" + - " : item (SEMICOLON item)* SEMICOLON? EOF ;\n" + - "item : A B?;\n" + - "SEMICOLON: ';';\n" + - "A : 'a'|'A';\n" + - "B : 'b'|'B';\n" + - "WS : [ \\r\\t\\n]+ -> skip;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "file_", "a", false); - assertEquals("(file_ (item a) )\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testListLabelForClosureContext() throws Exception { - String grammar = "grammar T;\n" + - "ifStatement\n" + - "@after {\n" + - "Object items = $ctx.elseIfStatement(); \n" + - "}\n" + - " : 'if' expression\n" + - " ( ( 'then'\n" + - " executableStatement*\n" + - " elseIfStatement* // <--- problem is here\n" + - " elseStatement?\n" + - " 'end' 'if'\n" + - " ) | executableStatement )\n" + - " ;\n" + - "\n" + - "elseIfStatement\n" + - " : 'else' 'if' expression 'then' executableStatement*\n" + - " ;\n" + - "expression : 'a' ;\n" + - "executableStatement : 'a' ;\n" + - "elseStatement : 'a' ;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "expression", "a", false); - assertEquals("", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testMultipleEOFHandling() throws Exception { - String grammar = "grammar T;\n" + - "prog : ('x' | 'x' 'y') EOF EOF;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "prog", "x", false); - assertEquals("", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testEOFInClosure() throws Exception { - String grammar = "grammar T;\n" + - "prog : stat EOF;\n" + - "stat : 'x' ('y' | EOF)*?;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "prog", "x", false); - assertEquals("", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - String testReferenceToATN(String input) throws Exception { - String grammar = "grammar T;\n" + - "a : (ID|ATN_)* ATN_? {System.out.println($text);} ;\n" + - "ID : 'a'..'z'+ ;\n" + - "ATN_ : '0'..'9'+;\n" + - "WS : (' '|'\\n') -> skip ;"; - return execParser("T.g4", grammar, "TParser", "TLexer", "a", input, false); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testReferenceToATN_1() throws Exception { - String found = testReferenceToATN(""); - assertEquals("\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testReferenceToATN_2() throws Exception { - String found = testReferenceToATN("a 34 c"); - assertEquals("a34c\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testParserProperty() throws Exception { - String grammar = "grammar T;\n" + - "@members {\n" + - "boolean Property() {\n" + - " return true;\n" + - "}\n" + - "}\n" + - "a : {$parser.Property()}? ID {System.out.println(\"valid\");}\n" + - " ;\n" + - "ID : 'a'..'z'+ ;\n" + - "WS : (' '|'\\n') -> skip ;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "a", "abc", false); - assertEquals("valid\n", found); - assertNull(this.stderrDuringParse); - } - - -} diff --git a/tool/test/org/antlr/v4/test/rt/java/TestSemPredEvalLexer.java b/tool/test/org/antlr/v4/test/rt/java/TestSemPredEvalLexer.java deleted file mode 100644 index 3041272f3..000000000 --- a/tool/test/org/antlr/v4/test/rt/java/TestSemPredEvalLexer.java +++ /dev/null @@ -1,170 +0,0 @@ -package org.antlr.v4.test.rt.java; - -import org.junit.Test; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; - -public class TestSemPredEvalLexer extends BaseTest { - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testDisableRule() throws Exception { - StringBuilder sb = new StringBuilder(); - sb.append("lexer grammar L;\n"); - sb.append("E1 : 'enum' { false }? ;\n"); - sb.append("E2 : 'enum' { true }? ; // winner not E1 or ID\n"); - sb.append("ID : 'a'..'z'+ ;\n"); - sb.append("WS : (' '|'\\n') -> skip;\n"); - String grammar = sb.toString(); - String found = execLexer("L.g4", grammar, "L", "enum abc", true); - assertEquals("[@0,0:3='enum',<2>,1:0]\n" + - "[@1,5:7='abc',<3>,1:5]\n" + - "[@2,8:7='',<-1>,1:8]\n" + - "s0-' '->:s5=>4\n" + - "s0-'a'->:s6=>3\n" + - "s0-'e'->:s1=>3\n" + - ":s1=>3-'n'->:s2=>3\n" + - ":s2=>3-'u'->:s3=>3\n" + - ":s6=>3-'b'->:s6=>3\n" + - ":s6=>3-'c'->:s6=>3\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testIDvsEnum() throws Exception { - StringBuilder sb = new StringBuilder(); - sb.append("lexer grammar L;\n"); - sb.append("ENUM : 'enum' { false }? ;\n"); - sb.append("ID : 'a'..'z'+ ;\n"); - sb.append("WS : (' '|'\\n') -> skip;\n"); - String grammar = sb.toString(); - String found = execLexer("L.g4", grammar, "L", "enum abc enum", true); - assertEquals("[@0,0:3='enum',<2>,1:0]\n" + - "[@1,5:7='abc',<2>,1:5]\n" + - "[@2,9:12='enum',<2>,1:9]\n" + - "[@3,13:12='',<-1>,1:13]\n" + - "s0-' '->:s5=>3\n" + - "s0-'a'->:s4=>2\n" + - "s0-'e'->:s1=>2\n" + - ":s1=>2-'n'->:s2=>2\n" + - ":s2=>2-'u'->:s3=>2\n" + - ":s4=>2-'b'->:s4=>2\n" + - ":s4=>2-'c'->:s4=>2\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testIDnotEnum() throws Exception { - StringBuilder sb = new StringBuilder(); - sb.append("lexer grammar L;\n"); - sb.append("ENUM : [a-z]+ { false }? ;\n"); - sb.append("ID : [a-z]+ ;\n"); - sb.append("WS : (' '|'\\n') -> skip;\n"); - String grammar = sb.toString(); - String found = execLexer("L.g4", grammar, "L", "enum abc enum", true); - assertEquals("[@0,0:3='enum',<2>,1:0]\n" + - "[@1,5:7='abc',<2>,1:5]\n" + - "[@2,9:12='enum',<2>,1:9]\n" + - "[@3,13:12='',<-1>,1:13]\n" + - "s0-' '->:s2=>3\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testEnumNotID() throws Exception { - StringBuilder sb = new StringBuilder(); - sb.append("lexer grammar L;\n"); - sb.append("ENUM : [a-z]+ { this.getText().equals(\"enum\") }? ;\n"); - sb.append("ID : [a-z]+ ;\n"); - sb.append("WS : (' '|'\\n') -> skip;\n"); - String grammar = sb.toString(); - String found = execLexer("L.g4", grammar, "L", "enum abc enum", true); - assertEquals("[@0,0:3='enum',<1>,1:0]\n" + - "[@1,5:7='abc',<2>,1:5]\n" + - "[@2,9:12='enum',<1>,1:9]\n" + - "[@3,13:12='',<-1>,1:13]\n" + - "s0-' '->:s3=>3\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testIndent() throws Exception { - StringBuilder sb = new StringBuilder(); - sb.append("lexer grammar L;\n"); - sb.append("ID : [a-z]+ ;\n"); - sb.append("INDENT : [ \\t]+ { this._tokenStartCharPositionInLine==0 }?\n"); - sb.append(" { System.out.println(\"INDENT\"); } ;\n"); - sb.append("NL : '\\n';\n"); - sb.append("WS : [ \\t]+ ;\n"); - String grammar = sb.toString(); - String found = execLexer("L.g4", grammar, "L", "abc\n def \n", true); - assertEquals("INDENT\n" + - "[@0,0:2='abc',<1>,1:0]\n" + - "[@1,3:3='\\n',<3>,1:3]\n" + - "[@2,4:5=' ',<2>,2:0]\n" + - "[@3,6:8='def',<1>,2:2]\n" + - "[@4,9:10=' ',<4>,2:5]\n" + - "[@5,11:11='\\n',<3>,2:7]\n" + - "[@6,12:11='',<-1>,3:0]\n" + - "s0-'\n" + - "'->:s2=>3\n" + - "s0-'a'->:s1=>1\n" + - "s0-'d'->:s1=>1\n" + - ":s1=>1-'b'->:s1=>1\n" + - ":s1=>1-'c'->:s1=>1\n" + - ":s1=>1-'e'->:s1=>1\n" + - ":s1=>1-'f'->:s1=>1\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testLexerInputPositionSensitivePredicates() throws Exception { - StringBuilder sb = new StringBuilder(); - sb.append("lexer grammar L;\n"); - sb.append("WORD1 : ID1+ { System.out.println(this.getText()); } ;\n"); - sb.append("WORD2 : ID2+ { System.out.println(this.getText()); } ;\n"); - sb.append("fragment ID1 : { this.getCharPositionInLine() < 2 }? [a-zA-Z];\n"); - sb.append("fragment ID2 : { this.getCharPositionInLine() >= 2 }? [a-zA-Z];\n"); - sb.append("WS : (' '|'\\n') -> skip;\n"); - String grammar = sb.toString(); - String found = execLexer("L.g4", grammar, "L", "a cde\nabcde\n", true); - assertEquals("a\n" + - "cde\n" + - "ab\n" + - "cde\n" + - "[@0,0:0='a',<1>,1:0]\n" + - "[@1,2:4='cde',<2>,1:2]\n" + - "[@2,6:7='ab',<1>,2:0]\n" + - "[@3,8:10='cde',<2>,2:2]\n" + - "[@4,12:11='',<-1>,3:0]\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testPredicatedKeywords() throws Exception { - StringBuilder sb = new StringBuilder(); - sb.append("lexer grammar L;\n"); - sb.append("ENUM : [a-z]+ { this.getText().equals(\"enum\") }? { System.out.println(\"enum!\"); } ;\n"); - sb.append("ID : [a-z]+ { System.out.println(\"ID \" + this.getText()); } ;\n"); - sb.append("WS : [ \\n] -> skip ;\n"); - String grammar = sb.toString(); - String found = execLexer("L.g4", grammar, "L", "enum enu a", false); - assertEquals("enum!\n" + - "ID enu\n" + - "ID a\n" + - "[@0,0:3='enum',<1>,1:0]\n" + - "[@1,5:7='enu',<2>,1:5]\n" + - "[@2,9:9='a',<2>,1:9]\n" + - "[@3,10:9='',<-1>,1:10]\n", found); - assertNull(this.stderrDuringParse); - } - - -} diff --git a/tool/test/org/antlr/v4/test/rt/java/TestSemPredEvalParser.java b/tool/test/org/antlr/v4/test/rt/java/TestSemPredEvalParser.java deleted file mode 100644 index 0d00c36b7..000000000 --- a/tool/test/org/antlr/v4/test/rt/java/TestSemPredEvalParser.java +++ /dev/null @@ -1,456 +0,0 @@ -package org.antlr.v4.test.rt.java; - -import org.junit.Test; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; - -public class TestSemPredEvalParser extends BaseTest { - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testSimpleValidate() throws Exception { - String grammar = "grammar T;\n" + - "s : a ;\n" + - "a : {false}? ID {System.out.println(\"alt 1\");}\n" + - " | {true}? INT {System.out.println(\"alt 2\");}\n" + - " ;\n" + - "ID : 'a'..'z'+ ;\n" + - "INT : '0'..'9'+;\n" + - "WS : (' '|'\\n') -> skip ;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "s", "x", true); - assertEquals("", found); - assertEquals("line 1:0 no viable alternative at input 'x'\n", this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testSimpleValidate2() throws Exception { - String grammar = "grammar T;\n" + - "s : a a a;\n" + - "a : {false}? ID {System.out.println(\"alt 1\");}\n" + - " | {true}? INT {System.out.println(\"alt 2\");}\n" + - " ;\n" + - "ID : 'a'..'z'+ ;\n" + - "INT : '0'..'9'+;\n" + - "WS : (' '|'\\n') -> skip ;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "s", "3 4 x", true); - assertEquals("alt 2\nalt 2\n", found); - assertEquals("line 1:4 no viable alternative at input 'x'\n", this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testAtomWithClosureInTranslatedLRRule() throws Exception { - String grammar = "grammar T;\n" + - "start : e[0] EOF;\n" + - "e[int _p]\n" + - " : ( 'a' | 'b'+ ) ( {3 >= $_p}? '+' e[4] )*\n" + - " ;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "start", "a+b+a", false); - assertEquals("", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testValidateInDFA() throws Exception { - String grammar = "grammar T;\n" + - "s : a ';' a;\n" + - "// ';' helps us to resynchronize without consuming\n" + - "// 2nd 'a' reference. We our testing that the DFA also\n" + - "// throws an exception if the validating predicate fails\n" + - "a : {false}? ID {System.out.println(\"alt 1\");}\n" + - " | {true}? INT {System.out.println(\"alt 2\");}\n" + - " ;\n" + - "ID : 'a'..'z'+ ;\n" + - "INT : '0'..'9'+;\n" + - "WS : (' '|'\\n') -> skip ;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "s", "x ; y", true); - assertEquals("", found); - assertEquals("line 1:0 no viable alternative at input 'x'\nline 1:4 no viable alternative at input 'y'\n", this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testSimple() throws Exception { - String grammar = "grammar T;\n" + - "s : a a a; // do 3x: once in ATN, next in DFA then INT in ATN\n" + - "a : {false}? ID {System.out.println(\"alt 1\");}\n" + - " | {true}? ID {System.out.println(\"alt 2\");}\n" + - " | INT {System.out.println(\"alt 3\");}\n" + - " ;\n" + - "ID : 'a'..'z'+ ;\n" + - "INT : '0'..'9'+;\n" + - "WS : (' '|'\\n') -> skip ;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "s", "x y 3", true); - assertEquals("alt 2\nalt 2\nalt 3\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testOrder() throws Exception { - String grammar = "grammar T;\n" + - "s : a {} a; // do 2x: once in ATN, next in DFA;\n" + - "// action blocks lookahead from falling off of 'a'\n" + - "// and looking into 2nd 'a' ref. !ctx dependent pred\n" + - "a : ID {System.out.println(\"alt 1\");}\n" + - " | {true}? ID {System.out.println(\"alt 2\");}\n" + - " ;\n" + - "ID : 'a'..'z'+ ;\n" + - "INT : '0'..'9'+;\n" + - "WS : (' '|'\\n') -> skip ;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "s", "x y", false); - assertEquals("alt 1\nalt 1\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void test2UnpredicatedAlts() throws Exception { - String grammar = "grammar T;\n" + - "s : {_interp.setPredictionMode(PredictionMode.LL_EXACT_AMBIG_DETECTION);} a ';' a; // do 2x: once in ATN, next in DFA\n" + - "a : ID {System.out.println(\"alt 1\");}\n" + - " | ID {System.out.println(\"alt 2\");}\n" + - " | {false}? ID {System.out.println(\"alt 3\");}\n" + - " ;\n" + - "ID : 'a'..'z'+ ;\n" + - "INT : '0'..'9'+;\n" + - "WS : (' '|'\\n') -> skip ;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "s", "x; y", true); - assertEquals("alt 1\nalt 1\n", found); - assertEquals("line 1:0 reportAttemptingFullContext d=0 (a), input='x'\nline 1:0 reportAmbiguity d=0 (a): ambigAlts={1, 2}, input='x'\nline 1:3 reportAttemptingFullContext d=0 (a), input='y'\nline 1:3 reportAmbiguity d=0 (a): ambigAlts={1, 2}, input='y'\n", this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void test2UnpredicatedAltsAndOneOrthogonalAlt() throws Exception { - String grammar = "grammar T;\n" + - "s : {_interp.setPredictionMode(PredictionMode.LL_EXACT_AMBIG_DETECTION);} a ';' a ';' a;\n" + - "a : INT {System.out.println(\"alt 1\");}\n" + - " | ID {System.out.println(\"alt 2\");} // must pick this one for ID since pred is false\n" + - " | ID {System.out.println(\"alt 3\");}\n" + - " | {false}? ID {System.out.println(\"alt 4\");}\n" + - " ;\n" + - "ID : 'a'..'z'+ ;\n" + - "INT : '0'..'9'+;\n" + - "WS : (' '|'\\n') -> skip ;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "s", "34; x; y", true); - assertEquals("alt 1\nalt 2\nalt 2\n", found); - assertEquals("line 1:4 reportAttemptingFullContext d=0 (a), input='x'\nline 1:4 reportAmbiguity d=0 (a): ambigAlts={2, 3}, input='x'\nline 1:7 reportAttemptingFullContext d=0 (a), input='y'\nline 1:7 reportAmbiguity d=0 (a): ambigAlts={2, 3}, input='y'\n", this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testRewindBeforePredEval() throws Exception { - String grammar = "grammar T;\n" + - "s : a a;\n" + - "a : {this._input.LT(1).getText().equals(\"x\")}? ID INT {System.out.println(\"alt 1\");}\n" + - " | {this._input.LT(1).getText().equals(\"y\")}? ID INT {System.out.println(\"alt 2\");}\n" + - " ;\n" + - "ID : 'a'..'z'+ ;\n" + - "INT : '0'..'9'+;\n" + - "WS : (' '|'\\n') -> skip ;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "s", "y 3 x 4", true); - assertEquals("alt 2\nalt 1\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testNoTruePredsThrowsNoViableAlt() throws Exception { - String grammar = "grammar T;\n" + - "s : a a;\n" + - "a : {false}? ID INT {System.out.println(\"alt 1\");}\n" + - " | {false}? ID INT {System.out.println(\"alt 2\");}\n" + - " ;\n" + - "ID : 'a'..'z'+ ;\n" + - "INT : '0'..'9'+;\n" + - "WS : (' '|'\\n') -> skip ;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "s", "y 3 x 4", false); - assertEquals("", found); - assertEquals("line 1:0 no viable alternative at input 'y'\n", this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testToLeft() throws Exception { - String grammar = "grammar T;\n" + - " s : a+ ;\n" + - "a : {false}? ID {System.out.println(\"alt 1\");}\n" + - " | {true}? ID {System.out.println(\"alt 2\");}\n" + - " ;\n" + - "ID : 'a'..'z'+ ;\n" + - "INT : '0'..'9'+;\n" + - "WS : (' '|'\\n') -> skip ;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "s", "x x y", true); - assertEquals("alt 2\nalt 2\nalt 2\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testUnpredicatedPathsInAlt() throws Exception { - String grammar = "grammar T;\n" + - "s : a {System.out.println(\"alt 1\");}\n" + - " | b {System.out.println(\"alt 2\");}\n" + - " ;\n" + - "a : {false}? ID INT\n" + - " | ID INT\n" + - " ;\n" + - "b : ID ID\n" + - " ;\n" + - "ID : 'a'..'z'+ ;\n" + - "INT : '0'..'9'+;\n" + - "WS : (' '|'\\n') -> skip ;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "s", "x 4", true); - assertEquals("alt 1\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testActionHidesPreds() throws Exception { - String grammar = "grammar T;\n" + - "@members {int i = 0;}\n" + - "s : a+ ;\n" + - "a : {this.i = 1;} ID {this.i == 1}? {System.out.println(\"alt 1\");}\n" + - " | {this.i = 2;} ID {this.i == 2}? {System.out.println(\"alt 2\");}\n" + - " ;\n" + - "ID : 'a'..'z'+ ;\n" + - "INT : '0'..'9'+;\n" + - "WS : (' '|'\\n') -> skip ;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "s", "x x y", false); - assertEquals("alt 1\nalt 1\nalt 1\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testToLeftWithVaryingPredicate() throws Exception { - String grammar = "grammar T;\n" + - "@members {int i = 0;}\n" + - "s : ({this.i += 1;\n" + - "System.out.println(\"i=\" + this.i);} a)+ ;\n" + - "a : {this.i % 2 == 0}? ID {System.out.println(\"alt 1\");}\n" + - " | {this.i % 2 != 0}? ID {System.out.println(\"alt 2\");}\n" + - " ;\n" + - "ID : 'a'..'z'+ ;\n" + - "INT : '0'..'9'+;\n" + - "WS : (' '|'\\n') -> skip ;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "s", "x x y", true); - assertEquals("i=1\nalt 2\ni=2\nalt 1\ni=3\nalt 2\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testPredicateDependentOnArg() throws Exception { - String grammar = "grammar T;\n" + - "@members {int i = 0;}\n" + - "s : a[2] a[1];\n" + - "a[int i]\n" + - " : {$i==1}? ID {System.out.println(\"alt 1\");}\n" + - " | {$i==2}? ID {System.out.println(\"alt 2\");}\n" + - " ;\n" + - "ID : 'a'..'z'+ ;\n" + - "INT : '0'..'9'+;\n" + - "WS : (' '|'\\n') -> skip ;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "s", "a b", true); - assertEquals("alt 2\nalt 1\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testPredicateDependentOnArg2() throws Exception { - String grammar = "grammar T;\n" + - "@members {int i = 0;}\n" + - "s : a[2] a[1];\n" + - "a[int i]\n" + - " : {$i==1}? ID \n" + - " | {$i==2}? ID \n" + - " ;\n" + - "ID : 'a'..'z'+ ;\n" + - "INT : '0'..'9'+;\n" + - "WS : (' '|'\\n') -> skip ;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "s", "a b", true); - assertEquals("", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testDependentPredNotInOuterCtxShouldBeIgnored() throws Exception { - String grammar = "grammar T;\n" + - "s : b[2] ';' | b[2] '.' ; // decision in s drills down to ctx-dependent pred in a;\n" + - "b[int i] : a[i] ;\n" + - "a[int i]\n" + - " : {$i==1}? ID {System.out.println(\"alt 1\");}\n" + - " | {$i==2}? ID {System.out.println(\"alt 2\");}\n" + - " ;\n" + - "ID : 'a'..'z'+ ;\n" + - "INT : '0'..'9'+;\n" + - "WS : (' '|'\\n') -> skip ;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "s", "a;", true); - assertEquals("alt 2\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testIndependentPredNotPassedOuterCtxToAvoidCastException() throws Exception { - String grammar = "grammar T;\n" + - "s : b ';' | b '.' ;\n" + - "b : a ;\n" + - "a\n" + - " : {false}? ID {System.out.println(\"alt 1\");}\n" + - " | {true}? ID {System.out.println(\"alt 2\");}\n" + - " ;\n" + - "ID : 'a'..'z'+ ;\n" + - "INT : '0'..'9'+;\n" + - "WS : (' '|'\\n') -> skip ;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "s", "a;", true); - assertEquals("alt 2\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testPredsInGlobalFOLLOW() throws Exception { - String grammar = "grammar T;\n" + - "@members {\n" + - "boolean pred(boolean v) {\n" + - " System.out.println(\"eval=\"+v);\n" + - " return v;\n" + - "}\n" + - "}\n" + - "s : e {this.pred(true)}? {System.out.println(\"parse\");} '!' ;\n" + - "t : e {this.pred(false)}? ID ;\n" + - "e : ID | ; // non-LL(1) so we use ATN\n" + - "ID : 'a'..'z'+ ;\n" + - "INT : '0'..'9'+;\n" + - "WS : (' '|'\\n') -> skip ;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "s", "a!", true); - assertEquals("eval=true\nparse\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testDepedentPredsInGlobalFOLLOW() throws Exception { - String grammar = "grammar T;\n" + - "@members {\n" + - "boolean pred(boolean v) {\n" + - " System.out.println(\"eval=\"+v);\n" + - " return v;\n" + - "}\n" + - "}\n" + - "s : a[99] ;\n" + - "a[int i] : e {this.pred($i==99)}? {System.out.println(\"parse\");} '!' ;\n" + - "b[int i] : e {this.pred($i==99)}? ID ;\n" + - "e : ID | ; // non-LL(1) so we use ATN\n" + - "ID : 'a'..'z'+ ;\n" + - "INT : '0'..'9'+;\n" + - "WS : (' '|'\\n') -> skip ;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "s", "a!", true); - assertEquals("eval=true\nparse\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testActionsHidePredsInGlobalFOLLOW() throws Exception { - String grammar = "grammar T;\n" + - "@members {\n" + - "boolean pred(boolean v) {\n" + - " System.out.println(\"eval=\"+v);\n" + - " return v;\n" + - "}\n" + - "}\n" + - "s : e {} {this.pred(true)}? {System.out.println(\"parse\");} '!' ;\n" + - "t : e {} {this.pred(false)}? ID ;\n" + - "e : ID | ; // non-LL(1) so we use ATN\n" + - "ID : 'a'..'z'+ ;\n" + - "INT : '0'..'9'+;\n" + - "WS : (' '|'\\n') -> skip ;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "s", "a!", true); - assertEquals("eval=true\nparse\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - String testPredTestedEvenWhenUnAmbig(String input) throws Exception { - String grammar = "grammar T;\n" + - "@members {boolean enumKeyword = true;}\n" + - "primary\n" + - " : ID {System.out.println(\"ID \"+$ID.text);}\n" + - " | {!this.enumKeyword}? 'enum' {System.out.println(\"enum\");}\n" + - " ;\n" + - "ID : [a-z]+ ;\n" + - "WS : [ \\t\\n\\r]+ -> skip ;"; - return execParser("T.g4", grammar, "TParser", "TLexer", "primary", input, true); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testPredTestedEvenWhenUnAmbig_1() throws Exception { - String found = testPredTestedEvenWhenUnAmbig("abc"); - assertEquals("ID abc\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testPredTestedEvenWhenUnAmbig_2() throws Exception { - String found = testPredTestedEvenWhenUnAmbig("enum"); - assertEquals("", found); - assertEquals("line 1:0 no viable alternative at input 'enum'\n", this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testDisabledAlternative() throws Exception { - String grammar = "grammar T;\n" + - "cppCompilationUnit : content+ EOF;\n" + - "content: anything | {false}? .;\n" + - "anything: ANY_CHAR;\n" + - "ANY_CHAR: [_a-zA-Z0-9];"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "cppCompilationUnit", "hello", true); - assertEquals("", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - String testPredFromAltTestedInLoopBack(String input) throws Exception { - String grammar = "grammar T;\n" + - "file_\n" + - "@after {System.out.println($ctx.toStringTree(this));}\n" + - " : para para EOF ;\n" + - "para: paraContent NL NL ;\n" + - "paraContent : ('s'|'x'|{this._input.LA(2)!=TParser.NL}? NL)+ ;\n" + - "NL : '\\n' ;\n" + - "s : 's' ;\n" + - "X : 'x' ;"; - return execParser("T.g4", grammar, "TParser", "TLexer", "file_", input, true); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testPredFromAltTestedInLoopBack_1() throws Exception { - String found = testPredFromAltTestedInLoopBack("s\n\n\nx\n"); - assertEquals("(file_ (para (paraContent s) \\n \\n) (para (paraContent \\n x \\n)) )\n", found); - assertEquals("line 5:0 mismatched input '' expecting '\n'\n", this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testPredFromAltTestedInLoopBack_2() throws Exception { - String found = testPredFromAltTestedInLoopBack("s\n\n\nx\n\n"); - assertEquals("(file_ (para (paraContent s) \\n \\n) (para (paraContent \\n x) \\n \\n) )\n", found); - assertNull(this.stderrDuringParse); - } - - -} diff --git a/tool/test/org/antlr/v4/test/rt/java/TestSets.java b/tool/test/org/antlr/v4/test/rt/java/TestSets.java deleted file mode 100644 index 0e1bcda4f..000000000 --- a/tool/test/org/antlr/v4/test/rt/java/TestSets.java +++ /dev/null @@ -1,263 +0,0 @@ -package org.antlr.v4.test.rt.java; - -import org.junit.Test; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; - -public class TestSets extends BaseTest { - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testSeqDoesNotBecomeSet() throws Exception { - String grammar = "grammar T;\n" + - "a : C {System.out.println(this._input.getText());} ;\n" + - "fragment A : '1' | '2';\n" + - "fragment B : '3' '4';\n" + - "C : A | B;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "a", "34", false); - assertEquals("34\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testParserSet() throws Exception { - String grammar = "grammar T;\n" + - "a : t=('x'|'y') {System.out.println($t.text);} ;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "a", "x", false); - assertEquals("x\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testParserNotSet() throws Exception { - String grammar = "grammar T;\n" + - "a : t=~('x'|'y') 'z' {System.out.println($t.text);} ;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "a", "zz", false); - assertEquals("z\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testParserNotToken() throws Exception { - String grammar = "grammar T;\n" + - "a : ~'x' 'z' {System.out.println(this._input.getText());} ;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "a", "zz", false); - assertEquals("zz\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testParserNotTokenWithLabel() throws Exception { - String grammar = "grammar T;\n" + - "a : t=~'x' 'z' {System.out.println($t.text);} ;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "a", "zz", false); - assertEquals("z\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testRuleAsSet() throws Exception { - String grammar = "grammar T;\n" + - "a @after {System.out.println(this._input.getText());} : 'a' | 'b' |'c' ;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "a", "b", false); - assertEquals("b\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testNotChar() throws Exception { - String grammar = "grammar T;\n" + - "a : A {System.out.println($A.text);} ;\n" + - "A : ~'b' ;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "a", "x", false); - assertEquals("x\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testOptionalSingleElement() throws Exception { - String grammar = "grammar T;\n" + - "a : A? 'c' {System.out.println(this._input.getText());} ;\n" + - "A : 'b' ;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "a", "bc", false); - assertEquals("bc\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testOptionalLexerSingleElement() throws Exception { - String grammar = "grammar T;\n" + - "a : A {System.out.println(this._input.getText());} ;\n" + - "A : 'b'? 'c' ;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "a", "bc", false); - assertEquals("bc\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - String testStarLexerSingleElement(String input) throws Exception { - String grammar = "grammar T;\n" + - "a : A {System.out.println(this._input.getText());} ;\n" + - "A : 'b'* 'c' ;"; - return execParser("T.g4", grammar, "TParser", "TLexer", "a", input, false); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testStarLexerSingleElement_1() throws Exception { - String found = testStarLexerSingleElement("bbbbc"); - assertEquals("bbbbc\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testStarLexerSingleElement_2() throws Exception { - String found = testStarLexerSingleElement("c"); - assertEquals("c\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testPlusLexerSingleElement() throws Exception { - String grammar = "grammar T;\n" + - "a : A {System.out.println(this._input.getText());} ;\n" + - "A : 'b'+ 'c' ;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "a", "bbbbc", false); - assertEquals("bbbbc\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testOptionalSet() throws Exception { - String grammar = "grammar T;\n" + - "a : ('a'|'b')? 'c' {System.out.println(this._input.getText());} ;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "a", "ac", false); - assertEquals("ac\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testStarSet() throws Exception { - String grammar = "grammar T;\n" + - "a : ('a'|'b')* 'c' {System.out.println(this._input.getText());} ;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "a", "abaac", false); - assertEquals("abaac\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testPlusSet() throws Exception { - String grammar = "grammar T;\n" + - "a : ('a'|'b')+ 'c' {System.out.println(this._input.getText());} ;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "a", "abaac", false); - assertEquals("abaac\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testLexerOptionalSet() throws Exception { - String grammar = "grammar T;\n" + - "a : A {System.out.println(this._input.getText());} ;\n" + - "A : ('a'|'b')? 'c' ;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "a", "ac", false); - assertEquals("ac\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testLexerStarSet() throws Exception { - String grammar = "grammar T;\n" + - "a : A {System.out.println(this._input.getText());} ;\n" + - "A : ('a'|'b')* 'c' ;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "a", "abaac", false); - assertEquals("abaac\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testLexerPlusSet() throws Exception { - String grammar = "grammar T;\n" + - "a : A {System.out.println(this._input.getText());} ;\n" + - "A : ('a'|'b')+ 'c' ;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "a", "abaac", false); - assertEquals("abaac\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testNotCharSet() throws Exception { - String grammar = "grammar T;\n" + - "a : A {System.out.println($A.text);} ;\n" + - "A : ~('b'|'c') ;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "a", "x", false); - assertEquals("x\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testNotCharSetWithLabel() throws Exception { - String grammar = "grammar T;\n" + - "a : A {System.out.println($A.text);} ;\n" + - "A : h=~('b'|'c') ;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "a", "x", false); - assertEquals("x\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testNotCharSetWithRuleRef3() throws Exception { - String grammar = "grammar T;\n" + - "a : A {System.out.println($A.text);} ;\n" + - "A : ('a'|B) ; // this doesn't collapse to set but works\n" + - "fragment\n" + - "B : ~('a'|'c') ;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "a", "x", false); - assertEquals("x\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testCharSetLiteral() throws Exception { - String grammar = "grammar T;\n" + - "a : (A {System.out.println($A.text);})+ ;\n" + - "A : [AaBb] ;\n" + - "WS : (' '|'\\n')+ -> skip ;"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "a", "A a B b", false); - assertEquals("A\na\nB\nb\n", found); - assertNull(this.stderrDuringParse); - } - - /* this file and method are generated, any edit will be overwritten by the next generation */ - @Test - public void testComplementSet() throws Exception { - String grammar = "grammar T;\n" + - "parse : ~NEW_LINE;\n" + - "NEW_LINE: '\\r'? '\\n';"; - String found = execParser("T.g4", grammar, "TParser", "TLexer", "parse", "a", false); - assertEquals("", found); - assertEquals("line 1:0 token recognition error at: 'a'\nline 1:1 missing {} at ''\n", this.stderrDuringParse); - } - - -}